re PR c/59708 (clang-compatible checked arithmetic builtins)
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "attribs.h"
30 #include "varasm.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "hashtab.h"
36 #include "hash-set.h"
37 #include "vec.h"
38 #include "input.h"
39 #include "function.h"
40 #include "insn-config.h"
41 #include "insn-attr.h"
42 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
43 #include "expr.h"
44 #include "insn-codes.h"
45 #include "optabs.h"
46 #include "libfuncs.h"
47 #include "recog.h"
48 #include "reload.h"
49 #include "typeclass.h"
50 #include "toplev.h"
51 #include "langhooks.h"
52 #include "intl.h"
53 #include "tm_p.h"
54 #include "tree-iterator.h"
55 #include "predict.h"
56 #include "dominance.h"
57 #include "cfg.h"
58 #include "basic-block.h"
59 #include "tree-ssa-alias.h"
60 #include "internal-fn.h"
61 #include "gimple-expr.h"
62 #include "is-a.h"
63 #include "gimple.h"
64 #include "gimple-ssa.h"
65 #include "hash-map.h"
66 #include "plugin-api.h"
67 #include "ipa-ref.h"
68 #include "cgraph.h"
69 #include "tree-ssanames.h"
70 #include "target.h"
71 #include "common/common-target.h"
72 #include "timevar.h"
73 #include "df.h"
74 #include "diagnostic.h"
75 #include "tree-ssa-live.h"
76 #include "tree-outof-ssa.h"
77 #include "target-globals.h"
78 #include "params.h"
79 #include "tree-ssa-address.h"
80 #include "cfgexpand.h"
81 #include "builtins.h"
82 #include "tree-chkp.h"
83 #include "rtl-chkp.h"
84
85 #ifndef STACK_PUSH_CODE
86 #ifdef STACK_GROWS_DOWNWARD
87 #define STACK_PUSH_CODE PRE_DEC
88 #else
89 #define STACK_PUSH_CODE PRE_INC
90 #endif
91 #endif
92
93
94 /* If this is nonzero, we do not bother generating VOLATILE
95 around volatile memory references, and we are willing to
96 output indirect addresses. If cse is to follow, we reject
97 indirect addresses so a useful potential cse is generated;
98 if it is used only once, instruction combination will produce
99 the same indirect address eventually. */
100 int cse_not_expected;
101
102 /* This structure is used by move_by_pieces to describe the move to
103 be performed. */
104 struct move_by_pieces_d
105 {
106 rtx to;
107 rtx to_addr;
108 int autinc_to;
109 int explicit_inc_to;
110 rtx from;
111 rtx from_addr;
112 int autinc_from;
113 int explicit_inc_from;
114 unsigned HOST_WIDE_INT len;
115 HOST_WIDE_INT offset;
116 int reverse;
117 };
118
119 /* This structure is used by store_by_pieces to describe the clear to
120 be performed. */
121
122 struct store_by_pieces_d
123 {
124 rtx to;
125 rtx to_addr;
126 int autinc_to;
127 int explicit_inc_to;
128 unsigned HOST_WIDE_INT len;
129 HOST_WIDE_INT offset;
130 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode);
131 void *constfundata;
132 int reverse;
133 };
134
135 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
136 struct move_by_pieces_d *);
137 static bool block_move_libcall_safe_for_call_parm (void);
138 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
139 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
140 unsigned HOST_WIDE_INT);
141 static tree emit_block_move_libcall_fn (int);
142 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
143 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, machine_mode);
144 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
145 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
146 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
147 struct store_by_pieces_d *);
148 static tree clear_storage_libcall_fn (int);
149 static rtx_insn *compress_float_constant (rtx, rtx);
150 static rtx get_subtarget (rtx);
151 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
152 HOST_WIDE_INT, machine_mode,
153 tree, int, alias_set_type);
154 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
155 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
156 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
157 machine_mode, tree, alias_set_type, bool);
158
159 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
160
161 static int is_aligning_offset (const_tree, const_tree);
162 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
163 enum expand_modifier);
164 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
165 static rtx do_store_flag (sepops, rtx, machine_mode);
166 #ifdef PUSH_ROUNDING
167 static void emit_single_push_insn (machine_mode, rtx, tree);
168 #endif
169 static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx, int);
170 static rtx const_vector_from_tree (tree);
171
172 \f
173 /* This is run to set up which modes can be used
174 directly in memory and to initialize the block move optab. It is run
175 at the beginning of compilation and when the target is reinitialized. */
176
177 void
178 init_expr_target (void)
179 {
180 rtx insn, pat;
181 machine_mode mode;
182 int num_clobbers;
183 rtx mem, mem1;
184 rtx reg;
185
186 /* Try indexing by frame ptr and try by stack ptr.
187 It is known that on the Convex the stack ptr isn't a valid index.
188 With luck, one or the other is valid on any machine. */
189 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
190 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
191
192 /* A scratch register we can modify in-place below to avoid
193 useless RTL allocations. */
194 reg = gen_rtx_REG (VOIDmode, -1);
195
196 insn = rtx_alloc (INSN);
197 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
198 PATTERN (insn) = pat;
199
200 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
201 mode = (machine_mode) ((int) mode + 1))
202 {
203 int regno;
204
205 direct_load[(int) mode] = direct_store[(int) mode] = 0;
206 PUT_MODE (mem, mode);
207 PUT_MODE (mem1, mode);
208 PUT_MODE (reg, mode);
209
210 /* See if there is some register that can be used in this mode and
211 directly loaded or stored from memory. */
212
213 if (mode != VOIDmode && mode != BLKmode)
214 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
215 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
216 regno++)
217 {
218 if (! HARD_REGNO_MODE_OK (regno, mode))
219 continue;
220
221 SET_REGNO (reg, regno);
222
223 SET_SRC (pat) = mem;
224 SET_DEST (pat) = reg;
225 if (recog (pat, insn, &num_clobbers) >= 0)
226 direct_load[(int) mode] = 1;
227
228 SET_SRC (pat) = mem1;
229 SET_DEST (pat) = reg;
230 if (recog (pat, insn, &num_clobbers) >= 0)
231 direct_load[(int) mode] = 1;
232
233 SET_SRC (pat) = reg;
234 SET_DEST (pat) = mem;
235 if (recog (pat, insn, &num_clobbers) >= 0)
236 direct_store[(int) mode] = 1;
237
238 SET_SRC (pat) = reg;
239 SET_DEST (pat) = mem1;
240 if (recog (pat, insn, &num_clobbers) >= 0)
241 direct_store[(int) mode] = 1;
242 }
243 }
244
245 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
246
247 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
248 mode = GET_MODE_WIDER_MODE (mode))
249 {
250 machine_mode srcmode;
251 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
252 srcmode = GET_MODE_WIDER_MODE (srcmode))
253 {
254 enum insn_code ic;
255
256 ic = can_extend_p (mode, srcmode, 0);
257 if (ic == CODE_FOR_nothing)
258 continue;
259
260 PUT_MODE (mem, srcmode);
261
262 if (insn_operand_matches (ic, 1, mem))
263 float_extend_from_mem[mode][srcmode] = true;
264 }
265 }
266 }
267
268 /* This is run at the start of compiling a function. */
269
270 void
271 init_expr (void)
272 {
273 memset (&crtl->expr, 0, sizeof (crtl->expr));
274 }
275 \f
276 /* Copy data from FROM to TO, where the machine modes are not the same.
277 Both modes may be integer, or both may be floating, or both may be
278 fixed-point.
279 UNSIGNEDP should be nonzero if FROM is an unsigned type.
280 This causes zero-extension instead of sign-extension. */
281
282 void
283 convert_move (rtx to, rtx from, int unsignedp)
284 {
285 machine_mode to_mode = GET_MODE (to);
286 machine_mode from_mode = GET_MODE (from);
287 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
288 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
289 enum insn_code code;
290 rtx libcall;
291
292 /* rtx code for making an equivalent value. */
293 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
294 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
295
296
297 gcc_assert (to_real == from_real);
298 gcc_assert (to_mode != BLKmode);
299 gcc_assert (from_mode != BLKmode);
300
301 /* If the source and destination are already the same, then there's
302 nothing to do. */
303 if (to == from)
304 return;
305
306 /* If FROM is a SUBREG that indicates that we have already done at least
307 the required extension, strip it. We don't handle such SUBREGs as
308 TO here. */
309
310 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
311 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
312 >= GET_MODE_PRECISION (to_mode))
313 && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
314 from = gen_lowpart (to_mode, from), from_mode = to_mode;
315
316 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
317
318 if (to_mode == from_mode
319 || (from_mode == VOIDmode && CONSTANT_P (from)))
320 {
321 emit_move_insn (to, from);
322 return;
323 }
324
325 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
326 {
327 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
328
329 if (VECTOR_MODE_P (to_mode))
330 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
331 else
332 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
333
334 emit_move_insn (to, from);
335 return;
336 }
337
338 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
339 {
340 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
341 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
342 return;
343 }
344
345 if (to_real)
346 {
347 rtx value;
348 rtx_insn *insns;
349 convert_optab tab;
350
351 gcc_assert ((GET_MODE_PRECISION (from_mode)
352 != GET_MODE_PRECISION (to_mode))
353 || (DECIMAL_FLOAT_MODE_P (from_mode)
354 != DECIMAL_FLOAT_MODE_P (to_mode)));
355
356 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
357 /* Conversion between decimal float and binary float, same size. */
358 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
359 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
360 tab = sext_optab;
361 else
362 tab = trunc_optab;
363
364 /* Try converting directly if the insn is supported. */
365
366 code = convert_optab_handler (tab, to_mode, from_mode);
367 if (code != CODE_FOR_nothing)
368 {
369 emit_unop_insn (code, to, from,
370 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
371 return;
372 }
373
374 /* Otherwise use a libcall. */
375 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
376
377 /* Is this conversion implemented yet? */
378 gcc_assert (libcall);
379
380 start_sequence ();
381 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
382 1, from, from_mode);
383 insns = get_insns ();
384 end_sequence ();
385 emit_libcall_block (insns, to, value,
386 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
387 from)
388 : gen_rtx_FLOAT_EXTEND (to_mode, from));
389 return;
390 }
391
392 /* Handle pointer conversion. */ /* SPEE 900220. */
393 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
394 {
395 convert_optab ctab;
396
397 if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
398 ctab = trunc_optab;
399 else if (unsignedp)
400 ctab = zext_optab;
401 else
402 ctab = sext_optab;
403
404 if (convert_optab_handler (ctab, to_mode, from_mode)
405 != CODE_FOR_nothing)
406 {
407 emit_unop_insn (convert_optab_handler (ctab, to_mode, from_mode),
408 to, from, UNKNOWN);
409 return;
410 }
411 }
412
413 /* Targets are expected to provide conversion insns between PxImode and
414 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
415 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
416 {
417 machine_mode full_mode
418 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
419
420 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
421 != CODE_FOR_nothing);
422
423 if (full_mode != from_mode)
424 from = convert_to_mode (full_mode, from, unsignedp);
425 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
426 to, from, UNKNOWN);
427 return;
428 }
429 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
430 {
431 rtx new_from;
432 machine_mode full_mode
433 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
434 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
435 enum insn_code icode;
436
437 icode = convert_optab_handler (ctab, full_mode, from_mode);
438 gcc_assert (icode != CODE_FOR_nothing);
439
440 if (to_mode == full_mode)
441 {
442 emit_unop_insn (icode, to, from, UNKNOWN);
443 return;
444 }
445
446 new_from = gen_reg_rtx (full_mode);
447 emit_unop_insn (icode, new_from, from, UNKNOWN);
448
449 /* else proceed to integer conversions below. */
450 from_mode = full_mode;
451 from = new_from;
452 }
453
454 /* Make sure both are fixed-point modes or both are not. */
455 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
456 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
457 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
458 {
459 /* If we widen from_mode to to_mode and they are in the same class,
460 we won't saturate the result.
461 Otherwise, always saturate the result to play safe. */
462 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
463 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
464 expand_fixed_convert (to, from, 0, 0);
465 else
466 expand_fixed_convert (to, from, 0, 1);
467 return;
468 }
469
470 /* Now both modes are integers. */
471
472 /* Handle expanding beyond a word. */
473 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
474 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
475 {
476 rtx_insn *insns;
477 rtx lowpart;
478 rtx fill_value;
479 rtx lowfrom;
480 int i;
481 machine_mode lowpart_mode;
482 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
483
484 /* Try converting directly if the insn is supported. */
485 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
486 != CODE_FOR_nothing)
487 {
488 /* If FROM is a SUBREG, put it into a register. Do this
489 so that we always generate the same set of insns for
490 better cse'ing; if an intermediate assignment occurred,
491 we won't be doing the operation directly on the SUBREG. */
492 if (optimize > 0 && GET_CODE (from) == SUBREG)
493 from = force_reg (from_mode, from);
494 emit_unop_insn (code, to, from, equiv_code);
495 return;
496 }
497 /* Next, try converting via full word. */
498 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
499 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
500 != CODE_FOR_nothing))
501 {
502 rtx word_to = gen_reg_rtx (word_mode);
503 if (REG_P (to))
504 {
505 if (reg_overlap_mentioned_p (to, from))
506 from = force_reg (from_mode, from);
507 emit_clobber (to);
508 }
509 convert_move (word_to, from, unsignedp);
510 emit_unop_insn (code, to, word_to, equiv_code);
511 return;
512 }
513
514 /* No special multiword conversion insn; do it by hand. */
515 start_sequence ();
516
517 /* Since we will turn this into a no conflict block, we must ensure the
518 the source does not overlap the target so force it into an isolated
519 register when maybe so. Likewise for any MEM input, since the
520 conversion sequence might require several references to it and we
521 must ensure we're getting the same value every time. */
522
523 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
524 from = force_reg (from_mode, from);
525
526 /* Get a copy of FROM widened to a word, if necessary. */
527 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
528 lowpart_mode = word_mode;
529 else
530 lowpart_mode = from_mode;
531
532 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
533
534 lowpart = gen_lowpart (lowpart_mode, to);
535 emit_move_insn (lowpart, lowfrom);
536
537 /* Compute the value to put in each remaining word. */
538 if (unsignedp)
539 fill_value = const0_rtx;
540 else
541 fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
542 LT, lowfrom, const0_rtx,
543 lowpart_mode, 0, -1);
544
545 /* Fill the remaining words. */
546 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
547 {
548 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
549 rtx subword = operand_subword (to, index, 1, to_mode);
550
551 gcc_assert (subword);
552
553 if (fill_value != subword)
554 emit_move_insn (subword, fill_value);
555 }
556
557 insns = get_insns ();
558 end_sequence ();
559
560 emit_insn (insns);
561 return;
562 }
563
564 /* Truncating multi-word to a word or less. */
565 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
566 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
567 {
568 if (!((MEM_P (from)
569 && ! MEM_VOLATILE_P (from)
570 && direct_load[(int) to_mode]
571 && ! mode_dependent_address_p (XEXP (from, 0),
572 MEM_ADDR_SPACE (from)))
573 || REG_P (from)
574 || GET_CODE (from) == SUBREG))
575 from = force_reg (from_mode, from);
576 convert_move (to, gen_lowpart (word_mode, from), 0);
577 return;
578 }
579
580 /* Now follow all the conversions between integers
581 no more than a word long. */
582
583 /* For truncation, usually we can just refer to FROM in a narrower mode. */
584 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
585 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
586 {
587 if (!((MEM_P (from)
588 && ! MEM_VOLATILE_P (from)
589 && direct_load[(int) to_mode]
590 && ! mode_dependent_address_p (XEXP (from, 0),
591 MEM_ADDR_SPACE (from)))
592 || REG_P (from)
593 || GET_CODE (from) == SUBREG))
594 from = force_reg (from_mode, from);
595 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
596 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
597 from = copy_to_reg (from);
598 emit_move_insn (to, gen_lowpart (to_mode, from));
599 return;
600 }
601
602 /* Handle extension. */
603 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
604 {
605 /* Convert directly if that works. */
606 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
607 != CODE_FOR_nothing)
608 {
609 emit_unop_insn (code, to, from, equiv_code);
610 return;
611 }
612 else
613 {
614 machine_mode intermediate;
615 rtx tmp;
616 int shift_amount;
617
618 /* Search for a mode to convert via. */
619 for (intermediate = from_mode; intermediate != VOIDmode;
620 intermediate = GET_MODE_WIDER_MODE (intermediate))
621 if (((can_extend_p (to_mode, intermediate, unsignedp)
622 != CODE_FOR_nothing)
623 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
624 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
625 && (can_extend_p (intermediate, from_mode, unsignedp)
626 != CODE_FOR_nothing))
627 {
628 convert_move (to, convert_to_mode (intermediate, from,
629 unsignedp), unsignedp);
630 return;
631 }
632
633 /* No suitable intermediate mode.
634 Generate what we need with shifts. */
635 shift_amount = (GET_MODE_PRECISION (to_mode)
636 - GET_MODE_PRECISION (from_mode));
637 from = gen_lowpart (to_mode, force_reg (from_mode, from));
638 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
639 to, unsignedp);
640 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
641 to, unsignedp);
642 if (tmp != to)
643 emit_move_insn (to, tmp);
644 return;
645 }
646 }
647
648 /* Support special truncate insns for certain modes. */
649 if (convert_optab_handler (trunc_optab, to_mode,
650 from_mode) != CODE_FOR_nothing)
651 {
652 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
653 to, from, UNKNOWN);
654 return;
655 }
656
657 /* Handle truncation of volatile memrefs, and so on;
658 the things that couldn't be truncated directly,
659 and for which there was no special instruction.
660
661 ??? Code above formerly short-circuited this, for most integer
662 mode pairs, with a force_reg in from_mode followed by a recursive
663 call to this routine. Appears always to have been wrong. */
664 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
665 {
666 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
667 emit_move_insn (to, temp);
668 return;
669 }
670
671 /* Mode combination is not recognized. */
672 gcc_unreachable ();
673 }
674
675 /* Return an rtx for a value that would result
676 from converting X to mode MODE.
677 Both X and MODE may be floating, or both integer.
678 UNSIGNEDP is nonzero if X is an unsigned value.
679 This can be done by referring to a part of X in place
680 or by copying to a new temporary with conversion. */
681
682 rtx
683 convert_to_mode (machine_mode mode, rtx x, int unsignedp)
684 {
685 return convert_modes (mode, VOIDmode, x, unsignedp);
686 }
687
688 /* Return an rtx for a value that would result
689 from converting X from mode OLDMODE to mode MODE.
690 Both modes may be floating, or both integer.
691 UNSIGNEDP is nonzero if X is an unsigned value.
692
693 This can be done by referring to a part of X in place
694 or by copying to a new temporary with conversion.
695
696 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
697
698 rtx
699 convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
700 {
701 rtx temp;
702
703 /* If FROM is a SUBREG that indicates that we have already done at least
704 the required extension, strip it. */
705
706 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
707 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
708 && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
709 x = gen_lowpart (mode, SUBREG_REG (x));
710
711 if (GET_MODE (x) != VOIDmode)
712 oldmode = GET_MODE (x);
713
714 if (mode == oldmode)
715 return x;
716
717 if (CONST_SCALAR_INT_P (x) && GET_MODE_CLASS (mode) == MODE_INT)
718 {
719 /* If the caller did not tell us the old mode, then there is not
720 much to do with respect to canonicalization. We have to
721 assume that all the bits are significant. */
722 if (GET_MODE_CLASS (oldmode) != MODE_INT)
723 oldmode = MAX_MODE_INT;
724 wide_int w = wide_int::from (std::make_pair (x, oldmode),
725 GET_MODE_PRECISION (mode),
726 unsignedp ? UNSIGNED : SIGNED);
727 return immed_wide_int_const (w, mode);
728 }
729
730 /* We can do this with a gen_lowpart if both desired and current modes
731 are integer, and this is either a constant integer, a register, or a
732 non-volatile MEM. */
733 if (GET_MODE_CLASS (mode) == MODE_INT
734 && GET_MODE_CLASS (oldmode) == MODE_INT
735 && GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
736 && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) mode])
737 || (REG_P (x)
738 && (!HARD_REGISTER_P (x)
739 || HARD_REGNO_MODE_OK (REGNO (x), mode))
740 && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x)))))
741
742 return gen_lowpart (mode, x);
743
744 /* Converting from integer constant into mode is always equivalent to an
745 subreg operation. */
746 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
747 {
748 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
749 return simplify_gen_subreg (mode, x, oldmode, 0);
750 }
751
752 temp = gen_reg_rtx (mode);
753 convert_move (temp, x, unsignedp);
754 return temp;
755 }
756 \f
757 /* Return the largest alignment we can use for doing a move (or store)
758 of MAX_PIECES. ALIGN is the largest alignment we could use. */
759
760 static unsigned int
761 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
762 {
763 machine_mode tmode;
764
765 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
766 if (align >= GET_MODE_ALIGNMENT (tmode))
767 align = GET_MODE_ALIGNMENT (tmode);
768 else
769 {
770 machine_mode tmode, xmode;
771
772 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
773 tmode != VOIDmode;
774 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
775 if (GET_MODE_SIZE (tmode) > max_pieces
776 || SLOW_UNALIGNED_ACCESS (tmode, align))
777 break;
778
779 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
780 }
781
782 return align;
783 }
784
785 /* Return the widest integer mode no wider than SIZE. If no such mode
786 can be found, return VOIDmode. */
787
788 static machine_mode
789 widest_int_mode_for_size (unsigned int size)
790 {
791 machine_mode tmode, mode = VOIDmode;
792
793 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
794 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
795 if (GET_MODE_SIZE (tmode) < size)
796 mode = tmode;
797
798 return mode;
799 }
800
801 /* Determine whether the LEN bytes can be moved by using several move
802 instructions. Return nonzero if a call to move_by_pieces should
803 succeed. */
804
805 int
806 can_move_by_pieces (unsigned HOST_WIDE_INT len,
807 unsigned int align)
808 {
809 return targetm.use_by_pieces_infrastructure_p (len, align, MOVE_BY_PIECES,
810 optimize_insn_for_speed_p ());
811 }
812
813 /* Generate several move instructions to copy LEN bytes from block FROM to
814 block TO. (These are MEM rtx's with BLKmode).
815
816 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
817 used to push FROM to the stack.
818
819 ALIGN is maximum stack alignment we can assume.
820
821 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
822 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
823 stpcpy. */
824
825 rtx
826 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
827 unsigned int align, int endp)
828 {
829 struct move_by_pieces_d data;
830 machine_mode to_addr_mode;
831 machine_mode from_addr_mode = get_address_mode (from);
832 rtx to_addr, from_addr = XEXP (from, 0);
833 unsigned int max_size = MOVE_MAX_PIECES + 1;
834 enum insn_code icode;
835
836 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
837
838 data.offset = 0;
839 data.from_addr = from_addr;
840 if (to)
841 {
842 to_addr_mode = get_address_mode (to);
843 to_addr = XEXP (to, 0);
844 data.to = to;
845 data.autinc_to
846 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
847 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
848 data.reverse
849 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
850 }
851 else
852 {
853 to_addr_mode = VOIDmode;
854 to_addr = NULL_RTX;
855 data.to = NULL_RTX;
856 data.autinc_to = 1;
857 #ifdef STACK_GROWS_DOWNWARD
858 data.reverse = 1;
859 #else
860 data.reverse = 0;
861 #endif
862 }
863 data.to_addr = to_addr;
864 data.from = from;
865 data.autinc_from
866 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
867 || GET_CODE (from_addr) == POST_INC
868 || GET_CODE (from_addr) == POST_DEC);
869
870 data.explicit_inc_from = 0;
871 data.explicit_inc_to = 0;
872 if (data.reverse) data.offset = len;
873 data.len = len;
874
875 /* If copying requires more than two move insns,
876 copy addresses to registers (to make displacements shorter)
877 and use post-increment if available. */
878 if (!(data.autinc_from && data.autinc_to)
879 && move_by_pieces_ninsns (len, align, max_size) > 2)
880 {
881 /* Find the mode of the largest move...
882 MODE might not be used depending on the definitions of the
883 USE_* macros below. */
884 machine_mode mode ATTRIBUTE_UNUSED
885 = widest_int_mode_for_size (max_size);
886
887 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
888 {
889 data.from_addr = copy_to_mode_reg (from_addr_mode,
890 plus_constant (from_addr_mode,
891 from_addr, len));
892 data.autinc_from = 1;
893 data.explicit_inc_from = -1;
894 }
895 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
896 {
897 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
898 data.autinc_from = 1;
899 data.explicit_inc_from = 1;
900 }
901 if (!data.autinc_from && CONSTANT_P (from_addr))
902 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
903 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
904 {
905 data.to_addr = copy_to_mode_reg (to_addr_mode,
906 plus_constant (to_addr_mode,
907 to_addr, len));
908 data.autinc_to = 1;
909 data.explicit_inc_to = -1;
910 }
911 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
912 {
913 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
914 data.autinc_to = 1;
915 data.explicit_inc_to = 1;
916 }
917 if (!data.autinc_to && CONSTANT_P (to_addr))
918 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
919 }
920
921 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
922
923 /* First move what we can in the largest integer mode, then go to
924 successively smaller modes. */
925
926 while (max_size > 1 && data.len > 0)
927 {
928 machine_mode mode = widest_int_mode_for_size (max_size);
929
930 if (mode == VOIDmode)
931 break;
932
933 icode = optab_handler (mov_optab, mode);
934 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
935 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
936
937 max_size = GET_MODE_SIZE (mode);
938 }
939
940 /* The code above should have handled everything. */
941 gcc_assert (!data.len);
942
943 if (endp)
944 {
945 rtx to1;
946
947 gcc_assert (!data.reverse);
948 if (data.autinc_to)
949 {
950 if (endp == 2)
951 {
952 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
953 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
954 else
955 data.to_addr = copy_to_mode_reg (to_addr_mode,
956 plus_constant (to_addr_mode,
957 data.to_addr,
958 -1));
959 }
960 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
961 data.offset);
962 }
963 else
964 {
965 if (endp == 2)
966 --data.offset;
967 to1 = adjust_address (data.to, QImode, data.offset);
968 }
969 return to1;
970 }
971 else
972 return data.to;
973 }
974
975 /* Return number of insns required to move L bytes by pieces.
976 ALIGN (in bits) is maximum alignment we can assume. */
977
978 unsigned HOST_WIDE_INT
979 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
980 unsigned int max_size)
981 {
982 unsigned HOST_WIDE_INT n_insns = 0;
983
984 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
985
986 while (max_size > 1 && l > 0)
987 {
988 machine_mode mode;
989 enum insn_code icode;
990
991 mode = widest_int_mode_for_size (max_size);
992
993 if (mode == VOIDmode)
994 break;
995
996 icode = optab_handler (mov_optab, mode);
997 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
998 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
999
1000 max_size = GET_MODE_SIZE (mode);
1001 }
1002
1003 gcc_assert (!l);
1004 return n_insns;
1005 }
1006
1007 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1008 with move instructions for mode MODE. GENFUN is the gen_... function
1009 to make a move insn for that mode. DATA has all the other info. */
1010
1011 static void
1012 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
1013 struct move_by_pieces_d *data)
1014 {
1015 unsigned int size = GET_MODE_SIZE (mode);
1016 rtx to1 = NULL_RTX, from1;
1017
1018 while (data->len >= size)
1019 {
1020 if (data->reverse)
1021 data->offset -= size;
1022
1023 if (data->to)
1024 {
1025 if (data->autinc_to)
1026 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1027 data->offset);
1028 else
1029 to1 = adjust_address (data->to, mode, data->offset);
1030 }
1031
1032 if (data->autinc_from)
1033 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1034 data->offset);
1035 else
1036 from1 = adjust_address (data->from, mode, data->offset);
1037
1038 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1039 emit_insn (gen_add2_insn (data->to_addr,
1040 gen_int_mode (-(HOST_WIDE_INT) size,
1041 GET_MODE (data->to_addr))));
1042 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1043 emit_insn (gen_add2_insn (data->from_addr,
1044 gen_int_mode (-(HOST_WIDE_INT) size,
1045 GET_MODE (data->from_addr))));
1046
1047 if (data->to)
1048 emit_insn ((*genfun) (to1, from1));
1049 else
1050 {
1051 #ifdef PUSH_ROUNDING
1052 emit_single_push_insn (mode, from1, NULL);
1053 #else
1054 gcc_unreachable ();
1055 #endif
1056 }
1057
1058 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1059 emit_insn (gen_add2_insn (data->to_addr,
1060 gen_int_mode (size,
1061 GET_MODE (data->to_addr))));
1062 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1063 emit_insn (gen_add2_insn (data->from_addr,
1064 gen_int_mode (size,
1065 GET_MODE (data->from_addr))));
1066
1067 if (! data->reverse)
1068 data->offset += size;
1069
1070 data->len -= size;
1071 }
1072 }
1073 \f
1074 /* Emit code to move a block Y to a block X. This may be done with
1075 string-move instructions, with multiple scalar move instructions,
1076 or with a library call.
1077
1078 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1079 SIZE is an rtx that says how long they are.
1080 ALIGN is the maximum alignment we can assume they have.
1081 METHOD describes what kind of copy this is, and what mechanisms may be used.
1082 MIN_SIZE is the minimal size of block to move
1083 MAX_SIZE is the maximal size of block to move, if it can not be represented
1084 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1085
1086 Return the address of the new block, if memcpy is called and returns it,
1087 0 otherwise. */
1088
1089 rtx
1090 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1091 unsigned int expected_align, HOST_WIDE_INT expected_size,
1092 unsigned HOST_WIDE_INT min_size,
1093 unsigned HOST_WIDE_INT max_size,
1094 unsigned HOST_WIDE_INT probable_max_size)
1095 {
1096 bool may_use_call;
1097 rtx retval = 0;
1098 unsigned int align;
1099
1100 gcc_assert (size);
1101 if (CONST_INT_P (size)
1102 && INTVAL (size) == 0)
1103 return 0;
1104
1105 switch (method)
1106 {
1107 case BLOCK_OP_NORMAL:
1108 case BLOCK_OP_TAILCALL:
1109 may_use_call = true;
1110 break;
1111
1112 case BLOCK_OP_CALL_PARM:
1113 may_use_call = block_move_libcall_safe_for_call_parm ();
1114
1115 /* Make inhibit_defer_pop nonzero around the library call
1116 to force it to pop the arguments right away. */
1117 NO_DEFER_POP;
1118 break;
1119
1120 case BLOCK_OP_NO_LIBCALL:
1121 may_use_call = false;
1122 break;
1123
1124 default:
1125 gcc_unreachable ();
1126 }
1127
1128 gcc_assert (MEM_P (x) && MEM_P (y));
1129 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1130 gcc_assert (align >= BITS_PER_UNIT);
1131
1132 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1133 block copy is more efficient for other large modes, e.g. DCmode. */
1134 x = adjust_address (x, BLKmode, 0);
1135 y = adjust_address (y, BLKmode, 0);
1136
1137 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1138 can be incorrect is coming from __builtin_memcpy. */
1139 if (CONST_INT_P (size))
1140 {
1141 x = shallow_copy_rtx (x);
1142 y = shallow_copy_rtx (y);
1143 set_mem_size (x, INTVAL (size));
1144 set_mem_size (y, INTVAL (size));
1145 }
1146
1147 if (CONST_INT_P (size) && can_move_by_pieces (INTVAL (size), align))
1148 move_by_pieces (x, y, INTVAL (size), align, 0);
1149 else if (emit_block_move_via_movmem (x, y, size, align,
1150 expected_align, expected_size,
1151 min_size, max_size, probable_max_size))
1152 ;
1153 else if (may_use_call
1154 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1155 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1156 {
1157 /* Since x and y are passed to a libcall, mark the corresponding
1158 tree EXPR as addressable. */
1159 tree y_expr = MEM_EXPR (y);
1160 tree x_expr = MEM_EXPR (x);
1161 if (y_expr)
1162 mark_addressable (y_expr);
1163 if (x_expr)
1164 mark_addressable (x_expr);
1165 retval = emit_block_move_via_libcall (x, y, size,
1166 method == BLOCK_OP_TAILCALL);
1167 }
1168
1169 else
1170 emit_block_move_via_loop (x, y, size, align);
1171
1172 if (method == BLOCK_OP_CALL_PARM)
1173 OK_DEFER_POP;
1174
1175 return retval;
1176 }
1177
1178 rtx
1179 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1180 {
1181 unsigned HOST_WIDE_INT max, min = 0;
1182 if (GET_CODE (size) == CONST_INT)
1183 min = max = UINTVAL (size);
1184 else
1185 max = GET_MODE_MASK (GET_MODE (size));
1186 return emit_block_move_hints (x, y, size, method, 0, -1,
1187 min, max, max);
1188 }
1189
1190 /* A subroutine of emit_block_move. Returns true if calling the
1191 block move libcall will not clobber any parameters which may have
1192 already been placed on the stack. */
1193
1194 static bool
1195 block_move_libcall_safe_for_call_parm (void)
1196 {
1197 #if defined (REG_PARM_STACK_SPACE)
1198 tree fn;
1199 #endif
1200
1201 /* If arguments are pushed on the stack, then they're safe. */
1202 if (PUSH_ARGS)
1203 return true;
1204
1205 /* If registers go on the stack anyway, any argument is sure to clobber
1206 an outgoing argument. */
1207 #if defined (REG_PARM_STACK_SPACE)
1208 fn = emit_block_move_libcall_fn (false);
1209 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1210 depend on its argument. */
1211 (void) fn;
1212 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1213 && REG_PARM_STACK_SPACE (fn) != 0)
1214 return false;
1215 #endif
1216
1217 /* If any argument goes in memory, then it might clobber an outgoing
1218 argument. */
1219 {
1220 CUMULATIVE_ARGS args_so_far_v;
1221 cumulative_args_t args_so_far;
1222 tree fn, arg;
1223
1224 fn = emit_block_move_libcall_fn (false);
1225 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1226 args_so_far = pack_cumulative_args (&args_so_far_v);
1227
1228 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1229 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1230 {
1231 machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1232 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1233 NULL_TREE, true);
1234 if (!tmp || !REG_P (tmp))
1235 return false;
1236 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1237 return false;
1238 targetm.calls.function_arg_advance (args_so_far, mode,
1239 NULL_TREE, true);
1240 }
1241 }
1242 return true;
1243 }
1244
1245 /* A subroutine of emit_block_move. Expand a movmem pattern;
1246 return true if successful. */
1247
1248 static bool
1249 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1250 unsigned int expected_align, HOST_WIDE_INT expected_size,
1251 unsigned HOST_WIDE_INT min_size,
1252 unsigned HOST_WIDE_INT max_size,
1253 unsigned HOST_WIDE_INT probable_max_size)
1254 {
1255 int save_volatile_ok = volatile_ok;
1256 machine_mode mode;
1257
1258 if (expected_align < align)
1259 expected_align = align;
1260 if (expected_size != -1)
1261 {
1262 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1263 expected_size = probable_max_size;
1264 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1265 expected_size = min_size;
1266 }
1267
1268 /* Since this is a move insn, we don't care about volatility. */
1269 volatile_ok = 1;
1270
1271 /* Try the most limited insn first, because there's no point
1272 including more than one in the machine description unless
1273 the more limited one has some advantage. */
1274
1275 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1276 mode = GET_MODE_WIDER_MODE (mode))
1277 {
1278 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1279
1280 if (code != CODE_FOR_nothing
1281 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1282 here because if SIZE is less than the mode mask, as it is
1283 returned by the macro, it will definitely be less than the
1284 actual mode mask. Since SIZE is within the Pmode address
1285 space, we limit MODE to Pmode. */
1286 && ((CONST_INT_P (size)
1287 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1288 <= (GET_MODE_MASK (mode) >> 1)))
1289 || max_size <= (GET_MODE_MASK (mode) >> 1)
1290 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1291 {
1292 struct expand_operand ops[9];
1293 unsigned int nops;
1294
1295 /* ??? When called via emit_block_move_for_call, it'd be
1296 nice if there were some way to inform the backend, so
1297 that it doesn't fail the expansion because it thinks
1298 emitting the libcall would be more efficient. */
1299 nops = insn_data[(int) code].n_generator_args;
1300 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1301
1302 create_fixed_operand (&ops[0], x);
1303 create_fixed_operand (&ops[1], y);
1304 /* The check above guarantees that this size conversion is valid. */
1305 create_convert_operand_to (&ops[2], size, mode, true);
1306 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1307 if (nops >= 6)
1308 {
1309 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1310 create_integer_operand (&ops[5], expected_size);
1311 }
1312 if (nops >= 8)
1313 {
1314 create_integer_operand (&ops[6], min_size);
1315 /* If we can not represent the maximal size,
1316 make parameter NULL. */
1317 if ((HOST_WIDE_INT) max_size != -1)
1318 create_integer_operand (&ops[7], max_size);
1319 else
1320 create_fixed_operand (&ops[7], NULL);
1321 }
1322 if (nops == 9)
1323 {
1324 /* If we can not represent the maximal size,
1325 make parameter NULL. */
1326 if ((HOST_WIDE_INT) probable_max_size != -1)
1327 create_integer_operand (&ops[8], probable_max_size);
1328 else
1329 create_fixed_operand (&ops[8], NULL);
1330 }
1331 if (maybe_expand_insn (code, nops, ops))
1332 {
1333 volatile_ok = save_volatile_ok;
1334 return true;
1335 }
1336 }
1337 }
1338
1339 volatile_ok = save_volatile_ok;
1340 return false;
1341 }
1342
1343 /* A subroutine of emit_block_move. Expand a call to memcpy.
1344 Return the return value from memcpy, 0 otherwise. */
1345
1346 rtx
1347 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1348 {
1349 rtx dst_addr, src_addr;
1350 tree call_expr, fn, src_tree, dst_tree, size_tree;
1351 machine_mode size_mode;
1352 rtx retval;
1353
1354 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1355 pseudos. We can then place those new pseudos into a VAR_DECL and
1356 use them later. */
1357
1358 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1359 src_addr = copy_addr_to_reg (XEXP (src, 0));
1360
1361 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1362 src_addr = convert_memory_address (ptr_mode, src_addr);
1363
1364 dst_tree = make_tree (ptr_type_node, dst_addr);
1365 src_tree = make_tree (ptr_type_node, src_addr);
1366
1367 size_mode = TYPE_MODE (sizetype);
1368
1369 size = convert_to_mode (size_mode, size, 1);
1370 size = copy_to_mode_reg (size_mode, size);
1371
1372 /* It is incorrect to use the libcall calling conventions to call
1373 memcpy in this context. This could be a user call to memcpy and
1374 the user may wish to examine the return value from memcpy. For
1375 targets where libcalls and normal calls have different conventions
1376 for returning pointers, we could end up generating incorrect code. */
1377
1378 size_tree = make_tree (sizetype, size);
1379
1380 fn = emit_block_move_libcall_fn (true);
1381 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1382 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1383
1384 retval = expand_normal (call_expr);
1385
1386 return retval;
1387 }
1388
1389 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1390 for the function we use for block copies. */
1391
1392 static GTY(()) tree block_move_fn;
1393
1394 void
1395 init_block_move_fn (const char *asmspec)
1396 {
1397 if (!block_move_fn)
1398 {
1399 tree args, fn, attrs, attr_args;
1400
1401 fn = get_identifier ("memcpy");
1402 args = build_function_type_list (ptr_type_node, ptr_type_node,
1403 const_ptr_type_node, sizetype,
1404 NULL_TREE);
1405
1406 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1407 DECL_EXTERNAL (fn) = 1;
1408 TREE_PUBLIC (fn) = 1;
1409 DECL_ARTIFICIAL (fn) = 1;
1410 TREE_NOTHROW (fn) = 1;
1411 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1412 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1413
1414 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1415 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1416
1417 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1418
1419 block_move_fn = fn;
1420 }
1421
1422 if (asmspec)
1423 set_user_assembler_name (block_move_fn, asmspec);
1424 }
1425
1426 static tree
1427 emit_block_move_libcall_fn (int for_call)
1428 {
1429 static bool emitted_extern;
1430
1431 if (!block_move_fn)
1432 init_block_move_fn (NULL);
1433
1434 if (for_call && !emitted_extern)
1435 {
1436 emitted_extern = true;
1437 make_decl_rtl (block_move_fn);
1438 }
1439
1440 return block_move_fn;
1441 }
1442
1443 /* A subroutine of emit_block_move. Copy the data via an explicit
1444 loop. This is used only when libcalls are forbidden. */
1445 /* ??? It'd be nice to copy in hunks larger than QImode. */
1446
1447 static void
1448 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1449 unsigned int align ATTRIBUTE_UNUSED)
1450 {
1451 rtx_code_label *cmp_label, *top_label;
1452 rtx iter, x_addr, y_addr, tmp;
1453 machine_mode x_addr_mode = get_address_mode (x);
1454 machine_mode y_addr_mode = get_address_mode (y);
1455 machine_mode iter_mode;
1456
1457 iter_mode = GET_MODE (size);
1458 if (iter_mode == VOIDmode)
1459 iter_mode = word_mode;
1460
1461 top_label = gen_label_rtx ();
1462 cmp_label = gen_label_rtx ();
1463 iter = gen_reg_rtx (iter_mode);
1464
1465 emit_move_insn (iter, const0_rtx);
1466
1467 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1468 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1469 do_pending_stack_adjust ();
1470
1471 emit_jump (cmp_label);
1472 emit_label (top_label);
1473
1474 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1475 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1476
1477 if (x_addr_mode != y_addr_mode)
1478 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1479 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1480
1481 x = change_address (x, QImode, x_addr);
1482 y = change_address (y, QImode, y_addr);
1483
1484 emit_move_insn (x, y);
1485
1486 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1487 true, OPTAB_LIB_WIDEN);
1488 if (tmp != iter)
1489 emit_move_insn (iter, tmp);
1490
1491 emit_label (cmp_label);
1492
1493 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1494 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1495 }
1496 \f
1497 /* Copy all or part of a value X into registers starting at REGNO.
1498 The number of registers to be filled is NREGS. */
1499
1500 void
1501 move_block_to_reg (int regno, rtx x, int nregs, machine_mode mode)
1502 {
1503 int i;
1504 #ifdef HAVE_load_multiple
1505 rtx pat;
1506 rtx_insn *last;
1507 #endif
1508
1509 if (nregs == 0)
1510 return;
1511
1512 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1513 x = validize_mem (force_const_mem (mode, x));
1514
1515 /* See if the machine can do this with a load multiple insn. */
1516 #ifdef HAVE_load_multiple
1517 if (HAVE_load_multiple)
1518 {
1519 last = get_last_insn ();
1520 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1521 GEN_INT (nregs));
1522 if (pat)
1523 {
1524 emit_insn (pat);
1525 return;
1526 }
1527 else
1528 delete_insns_since (last);
1529 }
1530 #endif
1531
1532 for (i = 0; i < nregs; i++)
1533 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1534 operand_subword_force (x, i, mode));
1535 }
1536
1537 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1538 The number of registers to be filled is NREGS. */
1539
1540 void
1541 move_block_from_reg (int regno, rtx x, int nregs)
1542 {
1543 int i;
1544
1545 if (nregs == 0)
1546 return;
1547
1548 /* See if the machine can do this with a store multiple insn. */
1549 #ifdef HAVE_store_multiple
1550 if (HAVE_store_multiple)
1551 {
1552 rtx_insn *last = get_last_insn ();
1553 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1554 GEN_INT (nregs));
1555 if (pat)
1556 {
1557 emit_insn (pat);
1558 return;
1559 }
1560 else
1561 delete_insns_since (last);
1562 }
1563 #endif
1564
1565 for (i = 0; i < nregs; i++)
1566 {
1567 rtx tem = operand_subword (x, i, 1, BLKmode);
1568
1569 gcc_assert (tem);
1570
1571 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1572 }
1573 }
1574
1575 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1576 ORIG, where ORIG is a non-consecutive group of registers represented by
1577 a PARALLEL. The clone is identical to the original except in that the
1578 original set of registers is replaced by a new set of pseudo registers.
1579 The new set has the same modes as the original set. */
1580
1581 rtx
1582 gen_group_rtx (rtx orig)
1583 {
1584 int i, length;
1585 rtx *tmps;
1586
1587 gcc_assert (GET_CODE (orig) == PARALLEL);
1588
1589 length = XVECLEN (orig, 0);
1590 tmps = XALLOCAVEC (rtx, length);
1591
1592 /* Skip a NULL entry in first slot. */
1593 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1594
1595 if (i)
1596 tmps[0] = 0;
1597
1598 for (; i < length; i++)
1599 {
1600 machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1601 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1602
1603 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1604 }
1605
1606 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1607 }
1608
1609 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1610 except that values are placed in TMPS[i], and must later be moved
1611 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1612
1613 static void
1614 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1615 {
1616 rtx src;
1617 int start, i;
1618 machine_mode m = GET_MODE (orig_src);
1619
1620 gcc_assert (GET_CODE (dst) == PARALLEL);
1621
1622 if (m != VOIDmode
1623 && !SCALAR_INT_MODE_P (m)
1624 && !MEM_P (orig_src)
1625 && GET_CODE (orig_src) != CONCAT)
1626 {
1627 machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1628 if (imode == BLKmode)
1629 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1630 else
1631 src = gen_reg_rtx (imode);
1632 if (imode != BLKmode)
1633 src = gen_lowpart (GET_MODE (orig_src), src);
1634 emit_move_insn (src, orig_src);
1635 /* ...and back again. */
1636 if (imode != BLKmode)
1637 src = gen_lowpart (imode, src);
1638 emit_group_load_1 (tmps, dst, src, type, ssize);
1639 return;
1640 }
1641
1642 /* Check for a NULL entry, used to indicate that the parameter goes
1643 both on the stack and in registers. */
1644 if (XEXP (XVECEXP (dst, 0, 0), 0))
1645 start = 0;
1646 else
1647 start = 1;
1648
1649 /* Process the pieces. */
1650 for (i = start; i < XVECLEN (dst, 0); i++)
1651 {
1652 machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1653 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1654 unsigned int bytelen = GET_MODE_SIZE (mode);
1655 int shift = 0;
1656
1657 /* Handle trailing fragments that run over the size of the struct. */
1658 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1659 {
1660 /* Arrange to shift the fragment to where it belongs.
1661 extract_bit_field loads to the lsb of the reg. */
1662 if (
1663 #ifdef BLOCK_REG_PADDING
1664 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1665 == (BYTES_BIG_ENDIAN ? upward : downward)
1666 #else
1667 BYTES_BIG_ENDIAN
1668 #endif
1669 )
1670 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1671 bytelen = ssize - bytepos;
1672 gcc_assert (bytelen > 0);
1673 }
1674
1675 /* If we won't be loading directly from memory, protect the real source
1676 from strange tricks we might play; but make sure that the source can
1677 be loaded directly into the destination. */
1678 src = orig_src;
1679 if (!MEM_P (orig_src)
1680 && (!CONSTANT_P (orig_src)
1681 || (GET_MODE (orig_src) != mode
1682 && GET_MODE (orig_src) != VOIDmode)))
1683 {
1684 if (GET_MODE (orig_src) == VOIDmode)
1685 src = gen_reg_rtx (mode);
1686 else
1687 src = gen_reg_rtx (GET_MODE (orig_src));
1688
1689 emit_move_insn (src, orig_src);
1690 }
1691
1692 /* Optimize the access just a bit. */
1693 if (MEM_P (src)
1694 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1695 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1696 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1697 && bytelen == GET_MODE_SIZE (mode))
1698 {
1699 tmps[i] = gen_reg_rtx (mode);
1700 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1701 }
1702 else if (COMPLEX_MODE_P (mode)
1703 && GET_MODE (src) == mode
1704 && bytelen == GET_MODE_SIZE (mode))
1705 /* Let emit_move_complex do the bulk of the work. */
1706 tmps[i] = src;
1707 else if (GET_CODE (src) == CONCAT)
1708 {
1709 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1710 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1711
1712 if ((bytepos == 0 && bytelen == slen0)
1713 || (bytepos != 0 && bytepos + bytelen <= slen))
1714 {
1715 /* The following assumes that the concatenated objects all
1716 have the same size. In this case, a simple calculation
1717 can be used to determine the object and the bit field
1718 to be extracted. */
1719 tmps[i] = XEXP (src, bytepos / slen0);
1720 if (! CONSTANT_P (tmps[i])
1721 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1722 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1723 (bytepos % slen0) * BITS_PER_UNIT,
1724 1, NULL_RTX, mode, mode);
1725 }
1726 else
1727 {
1728 rtx mem;
1729
1730 gcc_assert (!bytepos);
1731 mem = assign_stack_temp (GET_MODE (src), slen);
1732 emit_move_insn (mem, src);
1733 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1734 0, 1, NULL_RTX, mode, mode);
1735 }
1736 }
1737 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1738 SIMD register, which is currently broken. While we get GCC
1739 to emit proper RTL for these cases, let's dump to memory. */
1740 else if (VECTOR_MODE_P (GET_MODE (dst))
1741 && REG_P (src))
1742 {
1743 int slen = GET_MODE_SIZE (GET_MODE (src));
1744 rtx mem;
1745
1746 mem = assign_stack_temp (GET_MODE (src), slen);
1747 emit_move_insn (mem, src);
1748 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1749 }
1750 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1751 && XVECLEN (dst, 0) > 1)
1752 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1753 else if (CONSTANT_P (src))
1754 {
1755 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1756
1757 if (len == ssize)
1758 tmps[i] = src;
1759 else
1760 {
1761 rtx first, second;
1762
1763 /* TODO: const_wide_int can have sizes other than this... */
1764 gcc_assert (2 * len == ssize);
1765 split_double (src, &first, &second);
1766 if (i)
1767 tmps[i] = second;
1768 else
1769 tmps[i] = first;
1770 }
1771 }
1772 else if (REG_P (src) && GET_MODE (src) == mode)
1773 tmps[i] = src;
1774 else
1775 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1776 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1777 mode, mode);
1778
1779 if (shift)
1780 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1781 shift, tmps[i], 0);
1782 }
1783 }
1784
1785 /* Emit code to move a block SRC of type TYPE to a block DST,
1786 where DST is non-consecutive registers represented by a PARALLEL.
1787 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1788 if not known. */
1789
1790 void
1791 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1792 {
1793 rtx *tmps;
1794 int i;
1795
1796 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1797 emit_group_load_1 (tmps, dst, src, type, ssize);
1798
1799 /* Copy the extracted pieces into the proper (probable) hard regs. */
1800 for (i = 0; i < XVECLEN (dst, 0); i++)
1801 {
1802 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1803 if (d == NULL)
1804 continue;
1805 emit_move_insn (d, tmps[i]);
1806 }
1807 }
1808
1809 /* Similar, but load SRC into new pseudos in a format that looks like
1810 PARALLEL. This can later be fed to emit_group_move to get things
1811 in the right place. */
1812
1813 rtx
1814 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1815 {
1816 rtvec vec;
1817 int i;
1818
1819 vec = rtvec_alloc (XVECLEN (parallel, 0));
1820 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1821
1822 /* Convert the vector to look just like the original PARALLEL, except
1823 with the computed values. */
1824 for (i = 0; i < XVECLEN (parallel, 0); i++)
1825 {
1826 rtx e = XVECEXP (parallel, 0, i);
1827 rtx d = XEXP (e, 0);
1828
1829 if (d)
1830 {
1831 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1832 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1833 }
1834 RTVEC_ELT (vec, i) = e;
1835 }
1836
1837 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1838 }
1839
1840 /* Emit code to move a block SRC to block DST, where SRC and DST are
1841 non-consecutive groups of registers, each represented by a PARALLEL. */
1842
1843 void
1844 emit_group_move (rtx dst, rtx src)
1845 {
1846 int i;
1847
1848 gcc_assert (GET_CODE (src) == PARALLEL
1849 && GET_CODE (dst) == PARALLEL
1850 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1851
1852 /* Skip first entry if NULL. */
1853 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1854 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1855 XEXP (XVECEXP (src, 0, i), 0));
1856 }
1857
1858 /* Move a group of registers represented by a PARALLEL into pseudos. */
1859
1860 rtx
1861 emit_group_move_into_temps (rtx src)
1862 {
1863 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1864 int i;
1865
1866 for (i = 0; i < XVECLEN (src, 0); i++)
1867 {
1868 rtx e = XVECEXP (src, 0, i);
1869 rtx d = XEXP (e, 0);
1870
1871 if (d)
1872 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1873 RTVEC_ELT (vec, i) = e;
1874 }
1875
1876 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1877 }
1878
1879 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1880 where SRC is non-consecutive registers represented by a PARALLEL.
1881 SSIZE represents the total size of block ORIG_DST, or -1 if not
1882 known. */
1883
1884 void
1885 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1886 {
1887 rtx *tmps, dst;
1888 int start, finish, i;
1889 machine_mode m = GET_MODE (orig_dst);
1890
1891 gcc_assert (GET_CODE (src) == PARALLEL);
1892
1893 if (!SCALAR_INT_MODE_P (m)
1894 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1895 {
1896 machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1897 if (imode == BLKmode)
1898 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1899 else
1900 dst = gen_reg_rtx (imode);
1901 emit_group_store (dst, src, type, ssize);
1902 if (imode != BLKmode)
1903 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1904 emit_move_insn (orig_dst, dst);
1905 return;
1906 }
1907
1908 /* Check for a NULL entry, used to indicate that the parameter goes
1909 both on the stack and in registers. */
1910 if (XEXP (XVECEXP (src, 0, 0), 0))
1911 start = 0;
1912 else
1913 start = 1;
1914 finish = XVECLEN (src, 0);
1915
1916 tmps = XALLOCAVEC (rtx, finish);
1917
1918 /* Copy the (probable) hard regs into pseudos. */
1919 for (i = start; i < finish; i++)
1920 {
1921 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1922 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1923 {
1924 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1925 emit_move_insn (tmps[i], reg);
1926 }
1927 else
1928 tmps[i] = reg;
1929 }
1930
1931 /* If we won't be storing directly into memory, protect the real destination
1932 from strange tricks we might play. */
1933 dst = orig_dst;
1934 if (GET_CODE (dst) == PARALLEL)
1935 {
1936 rtx temp;
1937
1938 /* We can get a PARALLEL dst if there is a conditional expression in
1939 a return statement. In that case, the dst and src are the same,
1940 so no action is necessary. */
1941 if (rtx_equal_p (dst, src))
1942 return;
1943
1944 /* It is unclear if we can ever reach here, but we may as well handle
1945 it. Allocate a temporary, and split this into a store/load to/from
1946 the temporary. */
1947 temp = assign_stack_temp (GET_MODE (dst), ssize);
1948 emit_group_store (temp, src, type, ssize);
1949 emit_group_load (dst, temp, type, ssize);
1950 return;
1951 }
1952 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1953 {
1954 machine_mode outer = GET_MODE (dst);
1955 machine_mode inner;
1956 HOST_WIDE_INT bytepos;
1957 bool done = false;
1958 rtx temp;
1959
1960 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1961 dst = gen_reg_rtx (outer);
1962
1963 /* Make life a bit easier for combine. */
1964 /* If the first element of the vector is the low part
1965 of the destination mode, use a paradoxical subreg to
1966 initialize the destination. */
1967 if (start < finish)
1968 {
1969 inner = GET_MODE (tmps[start]);
1970 bytepos = subreg_lowpart_offset (inner, outer);
1971 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1972 {
1973 temp = simplify_gen_subreg (outer, tmps[start],
1974 inner, 0);
1975 if (temp)
1976 {
1977 emit_move_insn (dst, temp);
1978 done = true;
1979 start++;
1980 }
1981 }
1982 }
1983
1984 /* If the first element wasn't the low part, try the last. */
1985 if (!done
1986 && start < finish - 1)
1987 {
1988 inner = GET_MODE (tmps[finish - 1]);
1989 bytepos = subreg_lowpart_offset (inner, outer);
1990 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1991 {
1992 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1993 inner, 0);
1994 if (temp)
1995 {
1996 emit_move_insn (dst, temp);
1997 done = true;
1998 finish--;
1999 }
2000 }
2001 }
2002
2003 /* Otherwise, simply initialize the result to zero. */
2004 if (!done)
2005 emit_move_insn (dst, CONST0_RTX (outer));
2006 }
2007
2008 /* Process the pieces. */
2009 for (i = start; i < finish; i++)
2010 {
2011 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2012 machine_mode mode = GET_MODE (tmps[i]);
2013 unsigned int bytelen = GET_MODE_SIZE (mode);
2014 unsigned int adj_bytelen;
2015 rtx dest = dst;
2016
2017 /* Handle trailing fragments that run over the size of the struct. */
2018 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2019 adj_bytelen = ssize - bytepos;
2020 else
2021 adj_bytelen = bytelen;
2022
2023 if (GET_CODE (dst) == CONCAT)
2024 {
2025 if (bytepos + adj_bytelen
2026 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2027 dest = XEXP (dst, 0);
2028 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2029 {
2030 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2031 dest = XEXP (dst, 1);
2032 }
2033 else
2034 {
2035 machine_mode dest_mode = GET_MODE (dest);
2036 machine_mode tmp_mode = GET_MODE (tmps[i]);
2037
2038 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2039
2040 if (GET_MODE_ALIGNMENT (dest_mode)
2041 >= GET_MODE_ALIGNMENT (tmp_mode))
2042 {
2043 dest = assign_stack_temp (dest_mode,
2044 GET_MODE_SIZE (dest_mode));
2045 emit_move_insn (adjust_address (dest,
2046 tmp_mode,
2047 bytepos),
2048 tmps[i]);
2049 dst = dest;
2050 }
2051 else
2052 {
2053 dest = assign_stack_temp (tmp_mode,
2054 GET_MODE_SIZE (tmp_mode));
2055 emit_move_insn (dest, tmps[i]);
2056 dst = adjust_address (dest, dest_mode, bytepos);
2057 }
2058 break;
2059 }
2060 }
2061
2062 /* Handle trailing fragments that run over the size of the struct. */
2063 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2064 {
2065 /* store_bit_field always takes its value from the lsb.
2066 Move the fragment to the lsb if it's not already there. */
2067 if (
2068 #ifdef BLOCK_REG_PADDING
2069 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2070 == (BYTES_BIG_ENDIAN ? upward : downward)
2071 #else
2072 BYTES_BIG_ENDIAN
2073 #endif
2074 )
2075 {
2076 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2077 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2078 shift, tmps[i], 0);
2079 }
2080
2081 /* Make sure not to write past the end of the struct. */
2082 store_bit_field (dest,
2083 adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2084 bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2085 VOIDmode, tmps[i]);
2086 }
2087
2088 /* Optimize the access just a bit. */
2089 else if (MEM_P (dest)
2090 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2091 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2092 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2093 && bytelen == GET_MODE_SIZE (mode))
2094 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2095
2096 else
2097 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2098 0, 0, mode, tmps[i]);
2099 }
2100
2101 /* Copy from the pseudo into the (probable) hard reg. */
2102 if (orig_dst != dst)
2103 emit_move_insn (orig_dst, dst);
2104 }
2105
2106 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2107 of the value stored in X. */
2108
2109 rtx
2110 maybe_emit_group_store (rtx x, tree type)
2111 {
2112 machine_mode mode = TYPE_MODE (type);
2113 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2114 if (GET_CODE (x) == PARALLEL)
2115 {
2116 rtx result = gen_reg_rtx (mode);
2117 emit_group_store (result, x, type, int_size_in_bytes (type));
2118 return result;
2119 }
2120 return x;
2121 }
2122
2123 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2124
2125 This is used on targets that return BLKmode values in registers. */
2126
2127 void
2128 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2129 {
2130 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2131 rtx src = NULL, dst = NULL;
2132 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2133 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2134 machine_mode mode = GET_MODE (srcreg);
2135 machine_mode tmode = GET_MODE (target);
2136 machine_mode copy_mode;
2137
2138 /* BLKmode registers created in the back-end shouldn't have survived. */
2139 gcc_assert (mode != BLKmode);
2140
2141 /* If the structure doesn't take up a whole number of words, see whether
2142 SRCREG is padded on the left or on the right. If it's on the left,
2143 set PADDING_CORRECTION to the number of bits to skip.
2144
2145 In most ABIs, the structure will be returned at the least end of
2146 the register, which translates to right padding on little-endian
2147 targets and left padding on big-endian targets. The opposite
2148 holds if the structure is returned at the most significant
2149 end of the register. */
2150 if (bytes % UNITS_PER_WORD != 0
2151 && (targetm.calls.return_in_msb (type)
2152 ? !BYTES_BIG_ENDIAN
2153 : BYTES_BIG_ENDIAN))
2154 padding_correction
2155 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2156
2157 /* We can use a single move if we have an exact mode for the size. */
2158 else if (MEM_P (target)
2159 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2160 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2161 && bytes == GET_MODE_SIZE (mode))
2162 {
2163 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2164 return;
2165 }
2166
2167 /* And if we additionally have the same mode for a register. */
2168 else if (REG_P (target)
2169 && GET_MODE (target) == mode
2170 && bytes == GET_MODE_SIZE (mode))
2171 {
2172 emit_move_insn (target, srcreg);
2173 return;
2174 }
2175
2176 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2177 into a new pseudo which is a full word. */
2178 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2179 {
2180 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2181 mode = word_mode;
2182 }
2183
2184 /* Copy the structure BITSIZE bits at a time. If the target lives in
2185 memory, take care of not reading/writing past its end by selecting
2186 a copy mode suited to BITSIZE. This should always be possible given
2187 how it is computed.
2188
2189 If the target lives in register, make sure not to select a copy mode
2190 larger than the mode of the register.
2191
2192 We could probably emit more efficient code for machines which do not use
2193 strict alignment, but it doesn't seem worth the effort at the current
2194 time. */
2195
2196 copy_mode = word_mode;
2197 if (MEM_P (target))
2198 {
2199 machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2200 if (mem_mode != BLKmode)
2201 copy_mode = mem_mode;
2202 }
2203 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2204 copy_mode = tmode;
2205
2206 for (bitpos = 0, xbitpos = padding_correction;
2207 bitpos < bytes * BITS_PER_UNIT;
2208 bitpos += bitsize, xbitpos += bitsize)
2209 {
2210 /* We need a new source operand each time xbitpos is on a
2211 word boundary and when xbitpos == padding_correction
2212 (the first time through). */
2213 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2214 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2215
2216 /* We need a new destination operand each time bitpos is on
2217 a word boundary. */
2218 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2219 dst = target;
2220 else if (bitpos % BITS_PER_WORD == 0)
2221 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2222
2223 /* Use xbitpos for the source extraction (right justified) and
2224 bitpos for the destination store (left justified). */
2225 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2226 extract_bit_field (src, bitsize,
2227 xbitpos % BITS_PER_WORD, 1,
2228 NULL_RTX, copy_mode, copy_mode));
2229 }
2230 }
2231
2232 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2233 register if it contains any data, otherwise return null.
2234
2235 This is used on targets that return BLKmode values in registers. */
2236
2237 rtx
2238 copy_blkmode_to_reg (machine_mode mode, tree src)
2239 {
2240 int i, n_regs;
2241 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2242 unsigned int bitsize;
2243 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2244 machine_mode dst_mode;
2245
2246 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2247
2248 x = expand_normal (src);
2249
2250 bytes = int_size_in_bytes (TREE_TYPE (src));
2251 if (bytes == 0)
2252 return NULL_RTX;
2253
2254 /* If the structure doesn't take up a whole number of words, see
2255 whether the register value should be padded on the left or on
2256 the right. Set PADDING_CORRECTION to the number of padding
2257 bits needed on the left side.
2258
2259 In most ABIs, the structure will be returned at the least end of
2260 the register, which translates to right padding on little-endian
2261 targets and left padding on big-endian targets. The opposite
2262 holds if the structure is returned at the most significant
2263 end of the register. */
2264 if (bytes % UNITS_PER_WORD != 0
2265 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2266 ? !BYTES_BIG_ENDIAN
2267 : BYTES_BIG_ENDIAN))
2268 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2269 * BITS_PER_UNIT));
2270
2271 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2272 dst_words = XALLOCAVEC (rtx, n_regs);
2273 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2274
2275 /* Copy the structure BITSIZE bits at a time. */
2276 for (bitpos = 0, xbitpos = padding_correction;
2277 bitpos < bytes * BITS_PER_UNIT;
2278 bitpos += bitsize, xbitpos += bitsize)
2279 {
2280 /* We need a new destination pseudo each time xbitpos is
2281 on a word boundary and when xbitpos == padding_correction
2282 (the first time through). */
2283 if (xbitpos % BITS_PER_WORD == 0
2284 || xbitpos == padding_correction)
2285 {
2286 /* Generate an appropriate register. */
2287 dst_word = gen_reg_rtx (word_mode);
2288 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2289
2290 /* Clear the destination before we move anything into it. */
2291 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2292 }
2293
2294 /* We need a new source operand each time bitpos is on a word
2295 boundary. */
2296 if (bitpos % BITS_PER_WORD == 0)
2297 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2298
2299 /* Use bitpos for the source extraction (left justified) and
2300 xbitpos for the destination store (right justified). */
2301 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2302 0, 0, word_mode,
2303 extract_bit_field (src_word, bitsize,
2304 bitpos % BITS_PER_WORD, 1,
2305 NULL_RTX, word_mode, word_mode));
2306 }
2307
2308 if (mode == BLKmode)
2309 {
2310 /* Find the smallest integer mode large enough to hold the
2311 entire structure. */
2312 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2313 mode != VOIDmode;
2314 mode = GET_MODE_WIDER_MODE (mode))
2315 /* Have we found a large enough mode? */
2316 if (GET_MODE_SIZE (mode) >= bytes)
2317 break;
2318
2319 /* A suitable mode should have been found. */
2320 gcc_assert (mode != VOIDmode);
2321 }
2322
2323 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2324 dst_mode = word_mode;
2325 else
2326 dst_mode = mode;
2327 dst = gen_reg_rtx (dst_mode);
2328
2329 for (i = 0; i < n_regs; i++)
2330 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2331
2332 if (mode != dst_mode)
2333 dst = gen_lowpart (mode, dst);
2334
2335 return dst;
2336 }
2337
2338 /* Add a USE expression for REG to the (possibly empty) list pointed
2339 to by CALL_FUSAGE. REG must denote a hard register. */
2340
2341 void
2342 use_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2343 {
2344 gcc_assert (REG_P (reg));
2345
2346 if (!HARD_REGISTER_P (reg))
2347 return;
2348
2349 *call_fusage
2350 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2351 }
2352
2353 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2354 to by CALL_FUSAGE. REG must denote a hard register. */
2355
2356 void
2357 clobber_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2358 {
2359 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2360
2361 *call_fusage
2362 = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2363 }
2364
2365 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2366 starting at REGNO. All of these registers must be hard registers. */
2367
2368 void
2369 use_regs (rtx *call_fusage, int regno, int nregs)
2370 {
2371 int i;
2372
2373 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2374
2375 for (i = 0; i < nregs; i++)
2376 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2377 }
2378
2379 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2380 PARALLEL REGS. This is for calls that pass values in multiple
2381 non-contiguous locations. The Irix 6 ABI has examples of this. */
2382
2383 void
2384 use_group_regs (rtx *call_fusage, rtx regs)
2385 {
2386 int i;
2387
2388 for (i = 0; i < XVECLEN (regs, 0); i++)
2389 {
2390 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2391
2392 /* A NULL entry means the parameter goes both on the stack and in
2393 registers. This can also be a MEM for targets that pass values
2394 partially on the stack and partially in registers. */
2395 if (reg != 0 && REG_P (reg))
2396 use_reg (call_fusage, reg);
2397 }
2398 }
2399
2400 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2401 assigment and the code of the expresion on the RHS is CODE. Return
2402 NULL otherwise. */
2403
2404 static gimple
2405 get_def_for_expr (tree name, enum tree_code code)
2406 {
2407 gimple def_stmt;
2408
2409 if (TREE_CODE (name) != SSA_NAME)
2410 return NULL;
2411
2412 def_stmt = get_gimple_for_ssa_name (name);
2413 if (!def_stmt
2414 || gimple_assign_rhs_code (def_stmt) != code)
2415 return NULL;
2416
2417 return def_stmt;
2418 }
2419
2420 #ifdef HAVE_conditional_move
2421 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2422 assigment and the class of the expresion on the RHS is CLASS. Return
2423 NULL otherwise. */
2424
2425 static gimple
2426 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2427 {
2428 gimple def_stmt;
2429
2430 if (TREE_CODE (name) != SSA_NAME)
2431 return NULL;
2432
2433 def_stmt = get_gimple_for_ssa_name (name);
2434 if (!def_stmt
2435 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2436 return NULL;
2437
2438 return def_stmt;
2439 }
2440 #endif
2441 \f
2442
2443 /* Determine whether the LEN bytes generated by CONSTFUN can be
2444 stored to memory using several move instructions. CONSTFUNDATA is
2445 a pointer which will be passed as argument in every CONSTFUN call.
2446 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2447 a memset operation and false if it's a copy of a constant string.
2448 Return nonzero if a call to store_by_pieces should succeed. */
2449
2450 int
2451 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2452 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2453 void *constfundata, unsigned int align, bool memsetp)
2454 {
2455 unsigned HOST_WIDE_INT l;
2456 unsigned int max_size;
2457 HOST_WIDE_INT offset = 0;
2458 machine_mode mode;
2459 enum insn_code icode;
2460 int reverse;
2461 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2462 rtx cst ATTRIBUTE_UNUSED;
2463
2464 if (len == 0)
2465 return 1;
2466
2467 if (!targetm.use_by_pieces_infrastructure_p (len, align,
2468 memsetp
2469 ? SET_BY_PIECES
2470 : STORE_BY_PIECES,
2471 optimize_insn_for_speed_p ()))
2472 return 0;
2473
2474 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2475
2476 /* We would first store what we can in the largest integer mode, then go to
2477 successively smaller modes. */
2478
2479 for (reverse = 0;
2480 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2481 reverse++)
2482 {
2483 l = len;
2484 max_size = STORE_MAX_PIECES + 1;
2485 while (max_size > 1 && l > 0)
2486 {
2487 mode = widest_int_mode_for_size (max_size);
2488
2489 if (mode == VOIDmode)
2490 break;
2491
2492 icode = optab_handler (mov_optab, mode);
2493 if (icode != CODE_FOR_nothing
2494 && align >= GET_MODE_ALIGNMENT (mode))
2495 {
2496 unsigned int size = GET_MODE_SIZE (mode);
2497
2498 while (l >= size)
2499 {
2500 if (reverse)
2501 offset -= size;
2502
2503 cst = (*constfun) (constfundata, offset, mode);
2504 if (!targetm.legitimate_constant_p (mode, cst))
2505 return 0;
2506
2507 if (!reverse)
2508 offset += size;
2509
2510 l -= size;
2511 }
2512 }
2513
2514 max_size = GET_MODE_SIZE (mode);
2515 }
2516
2517 /* The code above should have handled everything. */
2518 gcc_assert (!l);
2519 }
2520
2521 return 1;
2522 }
2523
2524 /* Generate several move instructions to store LEN bytes generated by
2525 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2526 pointer which will be passed as argument in every CONSTFUN call.
2527 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2528 a memset operation and false if it's a copy of a constant string.
2529 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2530 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2531 stpcpy. */
2532
2533 rtx
2534 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2535 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2536 void *constfundata, unsigned int align, bool memsetp, int endp)
2537 {
2538 machine_mode to_addr_mode = get_address_mode (to);
2539 struct store_by_pieces_d data;
2540
2541 if (len == 0)
2542 {
2543 gcc_assert (endp != 2);
2544 return to;
2545 }
2546
2547 gcc_assert (targetm.use_by_pieces_infrastructure_p
2548 (len, align,
2549 memsetp
2550 ? SET_BY_PIECES
2551 : STORE_BY_PIECES,
2552 optimize_insn_for_speed_p ()));
2553
2554 data.constfun = constfun;
2555 data.constfundata = constfundata;
2556 data.len = len;
2557 data.to = to;
2558 store_by_pieces_1 (&data, align);
2559 if (endp)
2560 {
2561 rtx to1;
2562
2563 gcc_assert (!data.reverse);
2564 if (data.autinc_to)
2565 {
2566 if (endp == 2)
2567 {
2568 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2569 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2570 else
2571 data.to_addr = copy_to_mode_reg (to_addr_mode,
2572 plus_constant (to_addr_mode,
2573 data.to_addr,
2574 -1));
2575 }
2576 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2577 data.offset);
2578 }
2579 else
2580 {
2581 if (endp == 2)
2582 --data.offset;
2583 to1 = adjust_address (data.to, QImode, data.offset);
2584 }
2585 return to1;
2586 }
2587 else
2588 return data.to;
2589 }
2590
2591 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2592 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2593
2594 static void
2595 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2596 {
2597 struct store_by_pieces_d data;
2598
2599 if (len == 0)
2600 return;
2601
2602 data.constfun = clear_by_pieces_1;
2603 data.constfundata = NULL;
2604 data.len = len;
2605 data.to = to;
2606 store_by_pieces_1 (&data, align);
2607 }
2608
2609 /* Callback routine for clear_by_pieces.
2610 Return const0_rtx unconditionally. */
2611
2612 static rtx
2613 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2614 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2615 machine_mode mode ATTRIBUTE_UNUSED)
2616 {
2617 return const0_rtx;
2618 }
2619
2620 /* Subroutine of clear_by_pieces and store_by_pieces.
2621 Generate several move instructions to store LEN bytes of block TO. (A MEM
2622 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2623
2624 static void
2625 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2626 unsigned int align ATTRIBUTE_UNUSED)
2627 {
2628 machine_mode to_addr_mode = get_address_mode (data->to);
2629 rtx to_addr = XEXP (data->to, 0);
2630 unsigned int max_size = STORE_MAX_PIECES + 1;
2631 enum insn_code icode;
2632
2633 data->offset = 0;
2634 data->to_addr = to_addr;
2635 data->autinc_to
2636 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2637 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2638
2639 data->explicit_inc_to = 0;
2640 data->reverse
2641 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2642 if (data->reverse)
2643 data->offset = data->len;
2644
2645 /* If storing requires more than two move insns,
2646 copy addresses to registers (to make displacements shorter)
2647 and use post-increment if available. */
2648 if (!data->autinc_to
2649 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2650 {
2651 /* Determine the main mode we'll be using.
2652 MODE might not be used depending on the definitions of the
2653 USE_* macros below. */
2654 machine_mode mode ATTRIBUTE_UNUSED
2655 = widest_int_mode_for_size (max_size);
2656
2657 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2658 {
2659 data->to_addr = copy_to_mode_reg (to_addr_mode,
2660 plus_constant (to_addr_mode,
2661 to_addr,
2662 data->len));
2663 data->autinc_to = 1;
2664 data->explicit_inc_to = -1;
2665 }
2666
2667 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2668 && ! data->autinc_to)
2669 {
2670 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2671 data->autinc_to = 1;
2672 data->explicit_inc_to = 1;
2673 }
2674
2675 if ( !data->autinc_to && CONSTANT_P (to_addr))
2676 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2677 }
2678
2679 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2680
2681 /* First store what we can in the largest integer mode, then go to
2682 successively smaller modes. */
2683
2684 while (max_size > 1 && data->len > 0)
2685 {
2686 machine_mode mode = widest_int_mode_for_size (max_size);
2687
2688 if (mode == VOIDmode)
2689 break;
2690
2691 icode = optab_handler (mov_optab, mode);
2692 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2693 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2694
2695 max_size = GET_MODE_SIZE (mode);
2696 }
2697
2698 /* The code above should have handled everything. */
2699 gcc_assert (!data->len);
2700 }
2701
2702 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2703 with move instructions for mode MODE. GENFUN is the gen_... function
2704 to make a move insn for that mode. DATA has all the other info. */
2705
2706 static void
2707 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2708 struct store_by_pieces_d *data)
2709 {
2710 unsigned int size = GET_MODE_SIZE (mode);
2711 rtx to1, cst;
2712
2713 while (data->len >= size)
2714 {
2715 if (data->reverse)
2716 data->offset -= size;
2717
2718 if (data->autinc_to)
2719 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2720 data->offset);
2721 else
2722 to1 = adjust_address (data->to, mode, data->offset);
2723
2724 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2725 emit_insn (gen_add2_insn (data->to_addr,
2726 gen_int_mode (-(HOST_WIDE_INT) size,
2727 GET_MODE (data->to_addr))));
2728
2729 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2730 emit_insn ((*genfun) (to1, cst));
2731
2732 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2733 emit_insn (gen_add2_insn (data->to_addr,
2734 gen_int_mode (size,
2735 GET_MODE (data->to_addr))));
2736
2737 if (! data->reverse)
2738 data->offset += size;
2739
2740 data->len -= size;
2741 }
2742 }
2743 \f
2744 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2745 its length in bytes. */
2746
2747 rtx
2748 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2749 unsigned int expected_align, HOST_WIDE_INT expected_size,
2750 unsigned HOST_WIDE_INT min_size,
2751 unsigned HOST_WIDE_INT max_size,
2752 unsigned HOST_WIDE_INT probable_max_size)
2753 {
2754 machine_mode mode = GET_MODE (object);
2755 unsigned int align;
2756
2757 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2758
2759 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2760 just move a zero. Otherwise, do this a piece at a time. */
2761 if (mode != BLKmode
2762 && CONST_INT_P (size)
2763 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2764 {
2765 rtx zero = CONST0_RTX (mode);
2766 if (zero != NULL)
2767 {
2768 emit_move_insn (object, zero);
2769 return NULL;
2770 }
2771
2772 if (COMPLEX_MODE_P (mode))
2773 {
2774 zero = CONST0_RTX (GET_MODE_INNER (mode));
2775 if (zero != NULL)
2776 {
2777 write_complex_part (object, zero, 0);
2778 write_complex_part (object, zero, 1);
2779 return NULL;
2780 }
2781 }
2782 }
2783
2784 if (size == const0_rtx)
2785 return NULL;
2786
2787 align = MEM_ALIGN (object);
2788
2789 if (CONST_INT_P (size)
2790 && targetm.use_by_pieces_infrastructure_p (INTVAL (size), align,
2791 CLEAR_BY_PIECES,
2792 optimize_insn_for_speed_p ()))
2793 clear_by_pieces (object, INTVAL (size), align);
2794 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2795 expected_align, expected_size,
2796 min_size, max_size, probable_max_size))
2797 ;
2798 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2799 return set_storage_via_libcall (object, size, const0_rtx,
2800 method == BLOCK_OP_TAILCALL);
2801 else
2802 gcc_unreachable ();
2803
2804 return NULL;
2805 }
2806
2807 rtx
2808 clear_storage (rtx object, rtx size, enum block_op_methods method)
2809 {
2810 unsigned HOST_WIDE_INT max, min = 0;
2811 if (GET_CODE (size) == CONST_INT)
2812 min = max = UINTVAL (size);
2813 else
2814 max = GET_MODE_MASK (GET_MODE (size));
2815 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
2816 }
2817
2818
2819 /* A subroutine of clear_storage. Expand a call to memset.
2820 Return the return value of memset, 0 otherwise. */
2821
2822 rtx
2823 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2824 {
2825 tree call_expr, fn, object_tree, size_tree, val_tree;
2826 machine_mode size_mode;
2827 rtx retval;
2828
2829 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2830 place those into new pseudos into a VAR_DECL and use them later. */
2831
2832 object = copy_addr_to_reg (XEXP (object, 0));
2833
2834 size_mode = TYPE_MODE (sizetype);
2835 size = convert_to_mode (size_mode, size, 1);
2836 size = copy_to_mode_reg (size_mode, size);
2837
2838 /* It is incorrect to use the libcall calling conventions to call
2839 memset in this context. This could be a user call to memset and
2840 the user may wish to examine the return value from memset. For
2841 targets where libcalls and normal calls have different conventions
2842 for returning pointers, we could end up generating incorrect code. */
2843
2844 object_tree = make_tree (ptr_type_node, object);
2845 if (!CONST_INT_P (val))
2846 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2847 size_tree = make_tree (sizetype, size);
2848 val_tree = make_tree (integer_type_node, val);
2849
2850 fn = clear_storage_libcall_fn (true);
2851 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2852 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2853
2854 retval = expand_normal (call_expr);
2855
2856 return retval;
2857 }
2858
2859 /* A subroutine of set_storage_via_libcall. Create the tree node
2860 for the function we use for block clears. */
2861
2862 tree block_clear_fn;
2863
2864 void
2865 init_block_clear_fn (const char *asmspec)
2866 {
2867 if (!block_clear_fn)
2868 {
2869 tree fn, args;
2870
2871 fn = get_identifier ("memset");
2872 args = build_function_type_list (ptr_type_node, ptr_type_node,
2873 integer_type_node, sizetype,
2874 NULL_TREE);
2875
2876 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2877 DECL_EXTERNAL (fn) = 1;
2878 TREE_PUBLIC (fn) = 1;
2879 DECL_ARTIFICIAL (fn) = 1;
2880 TREE_NOTHROW (fn) = 1;
2881 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2882 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2883
2884 block_clear_fn = fn;
2885 }
2886
2887 if (asmspec)
2888 set_user_assembler_name (block_clear_fn, asmspec);
2889 }
2890
2891 static tree
2892 clear_storage_libcall_fn (int for_call)
2893 {
2894 static bool emitted_extern;
2895
2896 if (!block_clear_fn)
2897 init_block_clear_fn (NULL);
2898
2899 if (for_call && !emitted_extern)
2900 {
2901 emitted_extern = true;
2902 make_decl_rtl (block_clear_fn);
2903 }
2904
2905 return block_clear_fn;
2906 }
2907 \f
2908 /* Expand a setmem pattern; return true if successful. */
2909
2910 bool
2911 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2912 unsigned int expected_align, HOST_WIDE_INT expected_size,
2913 unsigned HOST_WIDE_INT min_size,
2914 unsigned HOST_WIDE_INT max_size,
2915 unsigned HOST_WIDE_INT probable_max_size)
2916 {
2917 /* Try the most limited insn first, because there's no point
2918 including more than one in the machine description unless
2919 the more limited one has some advantage. */
2920
2921 machine_mode mode;
2922
2923 if (expected_align < align)
2924 expected_align = align;
2925 if (expected_size != -1)
2926 {
2927 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
2928 expected_size = max_size;
2929 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
2930 expected_size = min_size;
2931 }
2932
2933 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2934 mode = GET_MODE_WIDER_MODE (mode))
2935 {
2936 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2937
2938 if (code != CODE_FOR_nothing
2939 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2940 here because if SIZE is less than the mode mask, as it is
2941 returned by the macro, it will definitely be less than the
2942 actual mode mask. Since SIZE is within the Pmode address
2943 space, we limit MODE to Pmode. */
2944 && ((CONST_INT_P (size)
2945 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2946 <= (GET_MODE_MASK (mode) >> 1)))
2947 || max_size <= (GET_MODE_MASK (mode) >> 1)
2948 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2949 {
2950 struct expand_operand ops[9];
2951 unsigned int nops;
2952
2953 nops = insn_data[(int) code].n_generator_args;
2954 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
2955
2956 create_fixed_operand (&ops[0], object);
2957 /* The check above guarantees that this size conversion is valid. */
2958 create_convert_operand_to (&ops[1], size, mode, true);
2959 create_convert_operand_from (&ops[2], val, byte_mode, true);
2960 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2961 if (nops >= 6)
2962 {
2963 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2964 create_integer_operand (&ops[5], expected_size);
2965 }
2966 if (nops >= 8)
2967 {
2968 create_integer_operand (&ops[6], min_size);
2969 /* If we can not represent the maximal size,
2970 make parameter NULL. */
2971 if ((HOST_WIDE_INT) max_size != -1)
2972 create_integer_operand (&ops[7], max_size);
2973 else
2974 create_fixed_operand (&ops[7], NULL);
2975 }
2976 if (nops == 9)
2977 {
2978 /* If we can not represent the maximal size,
2979 make parameter NULL. */
2980 if ((HOST_WIDE_INT) probable_max_size != -1)
2981 create_integer_operand (&ops[8], probable_max_size);
2982 else
2983 create_fixed_operand (&ops[8], NULL);
2984 }
2985 if (maybe_expand_insn (code, nops, ops))
2986 return true;
2987 }
2988 }
2989
2990 return false;
2991 }
2992
2993 \f
2994 /* Write to one of the components of the complex value CPLX. Write VAL to
2995 the real part if IMAG_P is false, and the imaginary part if its true. */
2996
2997 void
2998 write_complex_part (rtx cplx, rtx val, bool imag_p)
2999 {
3000 machine_mode cmode;
3001 machine_mode imode;
3002 unsigned ibitsize;
3003
3004 if (GET_CODE (cplx) == CONCAT)
3005 {
3006 emit_move_insn (XEXP (cplx, imag_p), val);
3007 return;
3008 }
3009
3010 cmode = GET_MODE (cplx);
3011 imode = GET_MODE_INNER (cmode);
3012 ibitsize = GET_MODE_BITSIZE (imode);
3013
3014 /* For MEMs simplify_gen_subreg may generate an invalid new address
3015 because, e.g., the original address is considered mode-dependent
3016 by the target, which restricts simplify_subreg from invoking
3017 adjust_address_nv. Instead of preparing fallback support for an
3018 invalid address, we call adjust_address_nv directly. */
3019 if (MEM_P (cplx))
3020 {
3021 emit_move_insn (adjust_address_nv (cplx, imode,
3022 imag_p ? GET_MODE_SIZE (imode) : 0),
3023 val);
3024 return;
3025 }
3026
3027 /* If the sub-object is at least word sized, then we know that subregging
3028 will work. This special case is important, since store_bit_field
3029 wants to operate on integer modes, and there's rarely an OImode to
3030 correspond to TCmode. */
3031 if (ibitsize >= BITS_PER_WORD
3032 /* For hard regs we have exact predicates. Assume we can split
3033 the original object if it spans an even number of hard regs.
3034 This special case is important for SCmode on 64-bit platforms
3035 where the natural size of floating-point regs is 32-bit. */
3036 || (REG_P (cplx)
3037 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3038 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3039 {
3040 rtx part = simplify_gen_subreg (imode, cplx, cmode,
3041 imag_p ? GET_MODE_SIZE (imode) : 0);
3042 if (part)
3043 {
3044 emit_move_insn (part, val);
3045 return;
3046 }
3047 else
3048 /* simplify_gen_subreg may fail for sub-word MEMs. */
3049 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3050 }
3051
3052 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
3053 }
3054
3055 /* Extract one of the components of the complex value CPLX. Extract the
3056 real part if IMAG_P is false, and the imaginary part if it's true. */
3057
3058 static rtx
3059 read_complex_part (rtx cplx, bool imag_p)
3060 {
3061 machine_mode cmode, imode;
3062 unsigned ibitsize;
3063
3064 if (GET_CODE (cplx) == CONCAT)
3065 return XEXP (cplx, imag_p);
3066
3067 cmode = GET_MODE (cplx);
3068 imode = GET_MODE_INNER (cmode);
3069 ibitsize = GET_MODE_BITSIZE (imode);
3070
3071 /* Special case reads from complex constants that got spilled to memory. */
3072 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3073 {
3074 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3075 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3076 {
3077 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3078 if (CONSTANT_CLASS_P (part))
3079 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3080 }
3081 }
3082
3083 /* For MEMs simplify_gen_subreg may generate an invalid new address
3084 because, e.g., the original address is considered mode-dependent
3085 by the target, which restricts simplify_subreg from invoking
3086 adjust_address_nv. Instead of preparing fallback support for an
3087 invalid address, we call adjust_address_nv directly. */
3088 if (MEM_P (cplx))
3089 return adjust_address_nv (cplx, imode,
3090 imag_p ? GET_MODE_SIZE (imode) : 0);
3091
3092 /* If the sub-object is at least word sized, then we know that subregging
3093 will work. This special case is important, since extract_bit_field
3094 wants to operate on integer modes, and there's rarely an OImode to
3095 correspond to TCmode. */
3096 if (ibitsize >= BITS_PER_WORD
3097 /* For hard regs we have exact predicates. Assume we can split
3098 the original object if it spans an even number of hard regs.
3099 This special case is important for SCmode on 64-bit platforms
3100 where the natural size of floating-point regs is 32-bit. */
3101 || (REG_P (cplx)
3102 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3103 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3104 {
3105 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3106 imag_p ? GET_MODE_SIZE (imode) : 0);
3107 if (ret)
3108 return ret;
3109 else
3110 /* simplify_gen_subreg may fail for sub-word MEMs. */
3111 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3112 }
3113
3114 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3115 true, NULL_RTX, imode, imode);
3116 }
3117 \f
3118 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3119 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3120 represented in NEW_MODE. If FORCE is true, this will never happen, as
3121 we'll force-create a SUBREG if needed. */
3122
3123 static rtx
3124 emit_move_change_mode (machine_mode new_mode,
3125 machine_mode old_mode, rtx x, bool force)
3126 {
3127 rtx ret;
3128
3129 if (push_operand (x, GET_MODE (x)))
3130 {
3131 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3132 MEM_COPY_ATTRIBUTES (ret, x);
3133 }
3134 else if (MEM_P (x))
3135 {
3136 /* We don't have to worry about changing the address since the
3137 size in bytes is supposed to be the same. */
3138 if (reload_in_progress)
3139 {
3140 /* Copy the MEM to change the mode and move any
3141 substitutions from the old MEM to the new one. */
3142 ret = adjust_address_nv (x, new_mode, 0);
3143 copy_replacements (x, ret);
3144 }
3145 else
3146 ret = adjust_address (x, new_mode, 0);
3147 }
3148 else
3149 {
3150 /* Note that we do want simplify_subreg's behavior of validating
3151 that the new mode is ok for a hard register. If we were to use
3152 simplify_gen_subreg, we would create the subreg, but would
3153 probably run into the target not being able to implement it. */
3154 /* Except, of course, when FORCE is true, when this is exactly what
3155 we want. Which is needed for CCmodes on some targets. */
3156 if (force)
3157 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3158 else
3159 ret = simplify_subreg (new_mode, x, old_mode, 0);
3160 }
3161
3162 return ret;
3163 }
3164
3165 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3166 an integer mode of the same size as MODE. Returns the instruction
3167 emitted, or NULL if such a move could not be generated. */
3168
3169 static rtx_insn *
3170 emit_move_via_integer (machine_mode mode, rtx x, rtx y, bool force)
3171 {
3172 machine_mode imode;
3173 enum insn_code code;
3174
3175 /* There must exist a mode of the exact size we require. */
3176 imode = int_mode_for_mode (mode);
3177 if (imode == BLKmode)
3178 return NULL;
3179
3180 /* The target must support moves in this mode. */
3181 code = optab_handler (mov_optab, imode);
3182 if (code == CODE_FOR_nothing)
3183 return NULL;
3184
3185 x = emit_move_change_mode (imode, mode, x, force);
3186 if (x == NULL_RTX)
3187 return NULL;
3188 y = emit_move_change_mode (imode, mode, y, force);
3189 if (y == NULL_RTX)
3190 return NULL;
3191 return emit_insn (GEN_FCN (code) (x, y));
3192 }
3193
3194 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3195 Return an equivalent MEM that does not use an auto-increment. */
3196
3197 rtx
3198 emit_move_resolve_push (machine_mode mode, rtx x)
3199 {
3200 enum rtx_code code = GET_CODE (XEXP (x, 0));
3201 HOST_WIDE_INT adjust;
3202 rtx temp;
3203
3204 adjust = GET_MODE_SIZE (mode);
3205 #ifdef PUSH_ROUNDING
3206 adjust = PUSH_ROUNDING (adjust);
3207 #endif
3208 if (code == PRE_DEC || code == POST_DEC)
3209 adjust = -adjust;
3210 else if (code == PRE_MODIFY || code == POST_MODIFY)
3211 {
3212 rtx expr = XEXP (XEXP (x, 0), 1);
3213 HOST_WIDE_INT val;
3214
3215 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3216 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3217 val = INTVAL (XEXP (expr, 1));
3218 if (GET_CODE (expr) == MINUS)
3219 val = -val;
3220 gcc_assert (adjust == val || adjust == -val);
3221 adjust = val;
3222 }
3223
3224 /* Do not use anti_adjust_stack, since we don't want to update
3225 stack_pointer_delta. */
3226 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3227 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3228 0, OPTAB_LIB_WIDEN);
3229 if (temp != stack_pointer_rtx)
3230 emit_move_insn (stack_pointer_rtx, temp);
3231
3232 switch (code)
3233 {
3234 case PRE_INC:
3235 case PRE_DEC:
3236 case PRE_MODIFY:
3237 temp = stack_pointer_rtx;
3238 break;
3239 case POST_INC:
3240 case POST_DEC:
3241 case POST_MODIFY:
3242 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3243 break;
3244 default:
3245 gcc_unreachable ();
3246 }
3247
3248 return replace_equiv_address (x, temp);
3249 }
3250
3251 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3252 X is known to satisfy push_operand, and MODE is known to be complex.
3253 Returns the last instruction emitted. */
3254
3255 rtx_insn *
3256 emit_move_complex_push (machine_mode mode, rtx x, rtx y)
3257 {
3258 machine_mode submode = GET_MODE_INNER (mode);
3259 bool imag_first;
3260
3261 #ifdef PUSH_ROUNDING
3262 unsigned int submodesize = GET_MODE_SIZE (submode);
3263
3264 /* In case we output to the stack, but the size is smaller than the
3265 machine can push exactly, we need to use move instructions. */
3266 if (PUSH_ROUNDING (submodesize) != submodesize)
3267 {
3268 x = emit_move_resolve_push (mode, x);
3269 return emit_move_insn (x, y);
3270 }
3271 #endif
3272
3273 /* Note that the real part always precedes the imag part in memory
3274 regardless of machine's endianness. */
3275 switch (GET_CODE (XEXP (x, 0)))
3276 {
3277 case PRE_DEC:
3278 case POST_DEC:
3279 imag_first = true;
3280 break;
3281 case PRE_INC:
3282 case POST_INC:
3283 imag_first = false;
3284 break;
3285 default:
3286 gcc_unreachable ();
3287 }
3288
3289 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3290 read_complex_part (y, imag_first));
3291 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3292 read_complex_part (y, !imag_first));
3293 }
3294
3295 /* A subroutine of emit_move_complex. Perform the move from Y to X
3296 via two moves of the parts. Returns the last instruction emitted. */
3297
3298 rtx_insn *
3299 emit_move_complex_parts (rtx x, rtx y)
3300 {
3301 /* Show the output dies here. This is necessary for SUBREGs
3302 of pseudos since we cannot track their lifetimes correctly;
3303 hard regs shouldn't appear here except as return values. */
3304 if (!reload_completed && !reload_in_progress
3305 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3306 emit_clobber (x);
3307
3308 write_complex_part (x, read_complex_part (y, false), false);
3309 write_complex_part (x, read_complex_part (y, true), true);
3310
3311 return get_last_insn ();
3312 }
3313
3314 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3315 MODE is known to be complex. Returns the last instruction emitted. */
3316
3317 static rtx_insn *
3318 emit_move_complex (machine_mode mode, rtx x, rtx y)
3319 {
3320 bool try_int;
3321
3322 /* Need to take special care for pushes, to maintain proper ordering
3323 of the data, and possibly extra padding. */
3324 if (push_operand (x, mode))
3325 return emit_move_complex_push (mode, x, y);
3326
3327 /* See if we can coerce the target into moving both values at once, except
3328 for floating point where we favor moving as parts if this is easy. */
3329 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3330 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3331 && !(REG_P (x)
3332 && HARD_REGISTER_P (x)
3333 && hard_regno_nregs[REGNO (x)][mode] == 1)
3334 && !(REG_P (y)
3335 && HARD_REGISTER_P (y)
3336 && hard_regno_nregs[REGNO (y)][mode] == 1))
3337 try_int = false;
3338 /* Not possible if the values are inherently not adjacent. */
3339 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3340 try_int = false;
3341 /* Is possible if both are registers (or subregs of registers). */
3342 else if (register_operand (x, mode) && register_operand (y, mode))
3343 try_int = true;
3344 /* If one of the operands is a memory, and alignment constraints
3345 are friendly enough, we may be able to do combined memory operations.
3346 We do not attempt this if Y is a constant because that combination is
3347 usually better with the by-parts thing below. */
3348 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3349 && (!STRICT_ALIGNMENT
3350 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3351 try_int = true;
3352 else
3353 try_int = false;
3354
3355 if (try_int)
3356 {
3357 rtx_insn *ret;
3358
3359 /* For memory to memory moves, optimal behavior can be had with the
3360 existing block move logic. */
3361 if (MEM_P (x) && MEM_P (y))
3362 {
3363 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3364 BLOCK_OP_NO_LIBCALL);
3365 return get_last_insn ();
3366 }
3367
3368 ret = emit_move_via_integer (mode, x, y, true);
3369 if (ret)
3370 return ret;
3371 }
3372
3373 return emit_move_complex_parts (x, y);
3374 }
3375
3376 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3377 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3378
3379 static rtx_insn *
3380 emit_move_ccmode (machine_mode mode, rtx x, rtx y)
3381 {
3382 rtx_insn *ret;
3383
3384 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3385 if (mode != CCmode)
3386 {
3387 enum insn_code code = optab_handler (mov_optab, CCmode);
3388 if (code != CODE_FOR_nothing)
3389 {
3390 x = emit_move_change_mode (CCmode, mode, x, true);
3391 y = emit_move_change_mode (CCmode, mode, y, true);
3392 return emit_insn (GEN_FCN (code) (x, y));
3393 }
3394 }
3395
3396 /* Otherwise, find the MODE_INT mode of the same width. */
3397 ret = emit_move_via_integer (mode, x, y, false);
3398 gcc_assert (ret != NULL);
3399 return ret;
3400 }
3401
3402 /* Return true if word I of OP lies entirely in the
3403 undefined bits of a paradoxical subreg. */
3404
3405 static bool
3406 undefined_operand_subword_p (const_rtx op, int i)
3407 {
3408 machine_mode innermode, innermostmode;
3409 int offset;
3410 if (GET_CODE (op) != SUBREG)
3411 return false;
3412 innermode = GET_MODE (op);
3413 innermostmode = GET_MODE (SUBREG_REG (op));
3414 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3415 /* The SUBREG_BYTE represents offset, as if the value were stored in
3416 memory, except for a paradoxical subreg where we define
3417 SUBREG_BYTE to be 0; undo this exception as in
3418 simplify_subreg. */
3419 if (SUBREG_BYTE (op) == 0
3420 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3421 {
3422 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3423 if (WORDS_BIG_ENDIAN)
3424 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3425 if (BYTES_BIG_ENDIAN)
3426 offset += difference % UNITS_PER_WORD;
3427 }
3428 if (offset >= GET_MODE_SIZE (innermostmode)
3429 || offset <= -GET_MODE_SIZE (word_mode))
3430 return true;
3431 return false;
3432 }
3433
3434 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3435 MODE is any multi-word or full-word mode that lacks a move_insn
3436 pattern. Note that you will get better code if you define such
3437 patterns, even if they must turn into multiple assembler instructions. */
3438
3439 static rtx_insn *
3440 emit_move_multi_word (machine_mode mode, rtx x, rtx y)
3441 {
3442 rtx_insn *last_insn = 0;
3443 rtx_insn *seq;
3444 rtx inner;
3445 bool need_clobber;
3446 int i;
3447
3448 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3449
3450 /* If X is a push on the stack, do the push now and replace
3451 X with a reference to the stack pointer. */
3452 if (push_operand (x, mode))
3453 x = emit_move_resolve_push (mode, x);
3454
3455 /* If we are in reload, see if either operand is a MEM whose address
3456 is scheduled for replacement. */
3457 if (reload_in_progress && MEM_P (x)
3458 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3459 x = replace_equiv_address_nv (x, inner);
3460 if (reload_in_progress && MEM_P (y)
3461 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3462 y = replace_equiv_address_nv (y, inner);
3463
3464 start_sequence ();
3465
3466 need_clobber = false;
3467 for (i = 0;
3468 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3469 i++)
3470 {
3471 rtx xpart = operand_subword (x, i, 1, mode);
3472 rtx ypart;
3473
3474 /* Do not generate code for a move if it would come entirely
3475 from the undefined bits of a paradoxical subreg. */
3476 if (undefined_operand_subword_p (y, i))
3477 continue;
3478
3479 ypart = operand_subword (y, i, 1, mode);
3480
3481 /* If we can't get a part of Y, put Y into memory if it is a
3482 constant. Otherwise, force it into a register. Then we must
3483 be able to get a part of Y. */
3484 if (ypart == 0 && CONSTANT_P (y))
3485 {
3486 y = use_anchored_address (force_const_mem (mode, y));
3487 ypart = operand_subword (y, i, 1, mode);
3488 }
3489 else if (ypart == 0)
3490 ypart = operand_subword_force (y, i, mode);
3491
3492 gcc_assert (xpart && ypart);
3493
3494 need_clobber |= (GET_CODE (xpart) == SUBREG);
3495
3496 last_insn = emit_move_insn (xpart, ypart);
3497 }
3498
3499 seq = get_insns ();
3500 end_sequence ();
3501
3502 /* Show the output dies here. This is necessary for SUBREGs
3503 of pseudos since we cannot track their lifetimes correctly;
3504 hard regs shouldn't appear here except as return values.
3505 We never want to emit such a clobber after reload. */
3506 if (x != y
3507 && ! (reload_in_progress || reload_completed)
3508 && need_clobber != 0)
3509 emit_clobber (x);
3510
3511 emit_insn (seq);
3512
3513 return last_insn;
3514 }
3515
3516 /* Low level part of emit_move_insn.
3517 Called just like emit_move_insn, but assumes X and Y
3518 are basically valid. */
3519
3520 rtx_insn *
3521 emit_move_insn_1 (rtx x, rtx y)
3522 {
3523 machine_mode mode = GET_MODE (x);
3524 enum insn_code code;
3525
3526 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3527
3528 code = optab_handler (mov_optab, mode);
3529 if (code != CODE_FOR_nothing)
3530 return emit_insn (GEN_FCN (code) (x, y));
3531
3532 /* Expand complex moves by moving real part and imag part. */
3533 if (COMPLEX_MODE_P (mode))
3534 return emit_move_complex (mode, x, y);
3535
3536 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3537 || ALL_FIXED_POINT_MODE_P (mode))
3538 {
3539 rtx_insn *result = emit_move_via_integer (mode, x, y, true);
3540
3541 /* If we can't find an integer mode, use multi words. */
3542 if (result)
3543 return result;
3544 else
3545 return emit_move_multi_word (mode, x, y);
3546 }
3547
3548 if (GET_MODE_CLASS (mode) == MODE_CC)
3549 return emit_move_ccmode (mode, x, y);
3550
3551 /* Try using a move pattern for the corresponding integer mode. This is
3552 only safe when simplify_subreg can convert MODE constants into integer
3553 constants. At present, it can only do this reliably if the value
3554 fits within a HOST_WIDE_INT. */
3555 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3556 {
3557 rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3558
3559 if (ret)
3560 {
3561 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3562 return ret;
3563 }
3564 }
3565
3566 return emit_move_multi_word (mode, x, y);
3567 }
3568
3569 /* Generate code to copy Y into X.
3570 Both Y and X must have the same mode, except that
3571 Y can be a constant with VOIDmode.
3572 This mode cannot be BLKmode; use emit_block_move for that.
3573
3574 Return the last instruction emitted. */
3575
3576 rtx_insn *
3577 emit_move_insn (rtx x, rtx y)
3578 {
3579 machine_mode mode = GET_MODE (x);
3580 rtx y_cst = NULL_RTX;
3581 rtx_insn *last_insn;
3582 rtx set;
3583
3584 gcc_assert (mode != BLKmode
3585 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3586
3587 if (CONSTANT_P (y))
3588 {
3589 if (optimize
3590 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3591 && (last_insn = compress_float_constant (x, y)))
3592 return last_insn;
3593
3594 y_cst = y;
3595
3596 if (!targetm.legitimate_constant_p (mode, y))
3597 {
3598 y = force_const_mem (mode, y);
3599
3600 /* If the target's cannot_force_const_mem prevented the spill,
3601 assume that the target's move expanders will also take care
3602 of the non-legitimate constant. */
3603 if (!y)
3604 y = y_cst;
3605 else
3606 y = use_anchored_address (y);
3607 }
3608 }
3609
3610 /* If X or Y are memory references, verify that their addresses are valid
3611 for the machine. */
3612 if (MEM_P (x)
3613 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3614 MEM_ADDR_SPACE (x))
3615 && ! push_operand (x, GET_MODE (x))))
3616 x = validize_mem (x);
3617
3618 if (MEM_P (y)
3619 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3620 MEM_ADDR_SPACE (y)))
3621 y = validize_mem (y);
3622
3623 gcc_assert (mode != BLKmode);
3624
3625 last_insn = emit_move_insn_1 (x, y);
3626
3627 if (y_cst && REG_P (x)
3628 && (set = single_set (last_insn)) != NULL_RTX
3629 && SET_DEST (set) == x
3630 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3631 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3632
3633 return last_insn;
3634 }
3635
3636 /* Generate the body of an instruction to copy Y into X.
3637 It may be a list of insns, if one insn isn't enough. */
3638
3639 rtx
3640 gen_move_insn (rtx x, rtx y)
3641 {
3642 rtx_insn *seq;
3643
3644 start_sequence ();
3645 emit_move_insn_1 (x, y);
3646 seq = get_insns ();
3647 end_sequence ();
3648 return seq;
3649 }
3650
3651 /* If Y is representable exactly in a narrower mode, and the target can
3652 perform the extension directly from constant or memory, then emit the
3653 move as an extension. */
3654
3655 static rtx_insn *
3656 compress_float_constant (rtx x, rtx y)
3657 {
3658 machine_mode dstmode = GET_MODE (x);
3659 machine_mode orig_srcmode = GET_MODE (y);
3660 machine_mode srcmode;
3661 REAL_VALUE_TYPE r;
3662 int oldcost, newcost;
3663 bool speed = optimize_insn_for_speed_p ();
3664
3665 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3666
3667 if (targetm.legitimate_constant_p (dstmode, y))
3668 oldcost = set_src_cost (y, speed);
3669 else
3670 oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3671
3672 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3673 srcmode != orig_srcmode;
3674 srcmode = GET_MODE_WIDER_MODE (srcmode))
3675 {
3676 enum insn_code ic;
3677 rtx trunc_y;
3678 rtx_insn *last_insn;
3679
3680 /* Skip if the target can't extend this way. */
3681 ic = can_extend_p (dstmode, srcmode, 0);
3682 if (ic == CODE_FOR_nothing)
3683 continue;
3684
3685 /* Skip if the narrowed value isn't exact. */
3686 if (! exact_real_truncate (srcmode, &r))
3687 continue;
3688
3689 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3690
3691 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3692 {
3693 /* Skip if the target needs extra instructions to perform
3694 the extension. */
3695 if (!insn_operand_matches (ic, 1, trunc_y))
3696 continue;
3697 /* This is valid, but may not be cheaper than the original. */
3698 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3699 speed);
3700 if (oldcost < newcost)
3701 continue;
3702 }
3703 else if (float_extend_from_mem[dstmode][srcmode])
3704 {
3705 trunc_y = force_const_mem (srcmode, trunc_y);
3706 /* This is valid, but may not be cheaper than the original. */
3707 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3708 speed);
3709 if (oldcost < newcost)
3710 continue;
3711 trunc_y = validize_mem (trunc_y);
3712 }
3713 else
3714 continue;
3715
3716 /* For CSE's benefit, force the compressed constant pool entry
3717 into a new pseudo. This constant may be used in different modes,
3718 and if not, combine will put things back together for us. */
3719 trunc_y = force_reg (srcmode, trunc_y);
3720
3721 /* If x is a hard register, perform the extension into a pseudo,
3722 so that e.g. stack realignment code is aware of it. */
3723 rtx target = x;
3724 if (REG_P (x) && HARD_REGISTER_P (x))
3725 target = gen_reg_rtx (dstmode);
3726
3727 emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3728 last_insn = get_last_insn ();
3729
3730 if (REG_P (target))
3731 set_unique_reg_note (last_insn, REG_EQUAL, y);
3732
3733 if (target != x)
3734 return emit_move_insn (x, target);
3735 return last_insn;
3736 }
3737
3738 return NULL;
3739 }
3740 \f
3741 /* Pushing data onto the stack. */
3742
3743 /* Push a block of length SIZE (perhaps variable)
3744 and return an rtx to address the beginning of the block.
3745 The value may be virtual_outgoing_args_rtx.
3746
3747 EXTRA is the number of bytes of padding to push in addition to SIZE.
3748 BELOW nonzero means this padding comes at low addresses;
3749 otherwise, the padding comes at high addresses. */
3750
3751 rtx
3752 push_block (rtx size, int extra, int below)
3753 {
3754 rtx temp;
3755
3756 size = convert_modes (Pmode, ptr_mode, size, 1);
3757 if (CONSTANT_P (size))
3758 anti_adjust_stack (plus_constant (Pmode, size, extra));
3759 else if (REG_P (size) && extra == 0)
3760 anti_adjust_stack (size);
3761 else
3762 {
3763 temp = copy_to_mode_reg (Pmode, size);
3764 if (extra != 0)
3765 temp = expand_binop (Pmode, add_optab, temp,
3766 gen_int_mode (extra, Pmode),
3767 temp, 0, OPTAB_LIB_WIDEN);
3768 anti_adjust_stack (temp);
3769 }
3770
3771 #ifndef STACK_GROWS_DOWNWARD
3772 if (0)
3773 #else
3774 if (1)
3775 #endif
3776 {
3777 temp = virtual_outgoing_args_rtx;
3778 if (extra != 0 && below)
3779 temp = plus_constant (Pmode, temp, extra);
3780 }
3781 else
3782 {
3783 if (CONST_INT_P (size))
3784 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3785 -INTVAL (size) - (below ? 0 : extra));
3786 else if (extra != 0 && !below)
3787 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3788 negate_rtx (Pmode, plus_constant (Pmode, size,
3789 extra)));
3790 else
3791 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3792 negate_rtx (Pmode, size));
3793 }
3794
3795 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3796 }
3797
3798 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3799
3800 static rtx
3801 mem_autoinc_base (rtx mem)
3802 {
3803 if (MEM_P (mem))
3804 {
3805 rtx addr = XEXP (mem, 0);
3806 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3807 return XEXP (addr, 0);
3808 }
3809 return NULL;
3810 }
3811
3812 /* A utility routine used here, in reload, and in try_split. The insns
3813 after PREV up to and including LAST are known to adjust the stack,
3814 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3815 placing notes as appropriate. PREV may be NULL, indicating the
3816 entire insn sequence prior to LAST should be scanned.
3817
3818 The set of allowed stack pointer modifications is small:
3819 (1) One or more auto-inc style memory references (aka pushes),
3820 (2) One or more addition/subtraction with the SP as destination,
3821 (3) A single move insn with the SP as destination,
3822 (4) A call_pop insn,
3823 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3824
3825 Insns in the sequence that do not modify the SP are ignored,
3826 except for noreturn calls.
3827
3828 The return value is the amount of adjustment that can be trivially
3829 verified, via immediate operand or auto-inc. If the adjustment
3830 cannot be trivially extracted, the return value is INT_MIN. */
3831
3832 HOST_WIDE_INT
3833 find_args_size_adjust (rtx_insn *insn)
3834 {
3835 rtx dest, set, pat;
3836 int i;
3837
3838 pat = PATTERN (insn);
3839 set = NULL;
3840
3841 /* Look for a call_pop pattern. */
3842 if (CALL_P (insn))
3843 {
3844 /* We have to allow non-call_pop patterns for the case
3845 of emit_single_push_insn of a TLS address. */
3846 if (GET_CODE (pat) != PARALLEL)
3847 return 0;
3848
3849 /* All call_pop have a stack pointer adjust in the parallel.
3850 The call itself is always first, and the stack adjust is
3851 usually last, so search from the end. */
3852 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3853 {
3854 set = XVECEXP (pat, 0, i);
3855 if (GET_CODE (set) != SET)
3856 continue;
3857 dest = SET_DEST (set);
3858 if (dest == stack_pointer_rtx)
3859 break;
3860 }
3861 /* We'd better have found the stack pointer adjust. */
3862 if (i == 0)
3863 return 0;
3864 /* Fall through to process the extracted SET and DEST
3865 as if it was a standalone insn. */
3866 }
3867 else if (GET_CODE (pat) == SET)
3868 set = pat;
3869 else if ((set = single_set (insn)) != NULL)
3870 ;
3871 else if (GET_CODE (pat) == PARALLEL)
3872 {
3873 /* ??? Some older ports use a parallel with a stack adjust
3874 and a store for a PUSH_ROUNDING pattern, rather than a
3875 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3876 /* ??? See h8300 and m68k, pushqi1. */
3877 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3878 {
3879 set = XVECEXP (pat, 0, i);
3880 if (GET_CODE (set) != SET)
3881 continue;
3882 dest = SET_DEST (set);
3883 if (dest == stack_pointer_rtx)
3884 break;
3885
3886 /* We do not expect an auto-inc of the sp in the parallel. */
3887 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3888 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3889 != stack_pointer_rtx);
3890 }
3891 if (i < 0)
3892 return 0;
3893 }
3894 else
3895 return 0;
3896
3897 dest = SET_DEST (set);
3898
3899 /* Look for direct modifications of the stack pointer. */
3900 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3901 {
3902 /* Look for a trivial adjustment, otherwise assume nothing. */
3903 /* Note that the SPU restore_stack_block pattern refers to
3904 the stack pointer in V4SImode. Consider that non-trivial. */
3905 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3906 && GET_CODE (SET_SRC (set)) == PLUS
3907 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3908 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3909 return INTVAL (XEXP (SET_SRC (set), 1));
3910 /* ??? Reload can generate no-op moves, which will be cleaned
3911 up later. Recognize it and continue searching. */
3912 else if (rtx_equal_p (dest, SET_SRC (set)))
3913 return 0;
3914 else
3915 return HOST_WIDE_INT_MIN;
3916 }
3917 else
3918 {
3919 rtx mem, addr;
3920
3921 /* Otherwise only think about autoinc patterns. */
3922 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3923 {
3924 mem = dest;
3925 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3926 != stack_pointer_rtx);
3927 }
3928 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3929 mem = SET_SRC (set);
3930 else
3931 return 0;
3932
3933 addr = XEXP (mem, 0);
3934 switch (GET_CODE (addr))
3935 {
3936 case PRE_INC:
3937 case POST_INC:
3938 return GET_MODE_SIZE (GET_MODE (mem));
3939 case PRE_DEC:
3940 case POST_DEC:
3941 return -GET_MODE_SIZE (GET_MODE (mem));
3942 case PRE_MODIFY:
3943 case POST_MODIFY:
3944 addr = XEXP (addr, 1);
3945 gcc_assert (GET_CODE (addr) == PLUS);
3946 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3947 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3948 return INTVAL (XEXP (addr, 1));
3949 default:
3950 gcc_unreachable ();
3951 }
3952 }
3953 }
3954
3955 int
3956 fixup_args_size_notes (rtx_insn *prev, rtx_insn *last, int end_args_size)
3957 {
3958 int args_size = end_args_size;
3959 bool saw_unknown = false;
3960 rtx_insn *insn;
3961
3962 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3963 {
3964 HOST_WIDE_INT this_delta;
3965
3966 if (!NONDEBUG_INSN_P (insn))
3967 continue;
3968
3969 this_delta = find_args_size_adjust (insn);
3970 if (this_delta == 0)
3971 {
3972 if (!CALL_P (insn)
3973 || ACCUMULATE_OUTGOING_ARGS
3974 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3975 continue;
3976 }
3977
3978 gcc_assert (!saw_unknown);
3979 if (this_delta == HOST_WIDE_INT_MIN)
3980 saw_unknown = true;
3981
3982 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3983 #ifdef STACK_GROWS_DOWNWARD
3984 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3985 #endif
3986 args_size -= this_delta;
3987 }
3988
3989 return saw_unknown ? INT_MIN : args_size;
3990 }
3991
3992 #ifdef PUSH_ROUNDING
3993 /* Emit single push insn. */
3994
3995 static void
3996 emit_single_push_insn_1 (machine_mode mode, rtx x, tree type)
3997 {
3998 rtx dest_addr;
3999 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
4000 rtx dest;
4001 enum insn_code icode;
4002
4003 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
4004 /* If there is push pattern, use it. Otherwise try old way of throwing
4005 MEM representing push operation to move expander. */
4006 icode = optab_handler (push_optab, mode);
4007 if (icode != CODE_FOR_nothing)
4008 {
4009 struct expand_operand ops[1];
4010
4011 create_input_operand (&ops[0], x, mode);
4012 if (maybe_expand_insn (icode, 1, ops))
4013 return;
4014 }
4015 if (GET_MODE_SIZE (mode) == rounded_size)
4016 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
4017 /* If we are to pad downward, adjust the stack pointer first and
4018 then store X into the stack location using an offset. This is
4019 because emit_move_insn does not know how to pad; it does not have
4020 access to type. */
4021 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
4022 {
4023 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
4024 HOST_WIDE_INT offset;
4025
4026 emit_move_insn (stack_pointer_rtx,
4027 expand_binop (Pmode,
4028 #ifdef STACK_GROWS_DOWNWARD
4029 sub_optab,
4030 #else
4031 add_optab,
4032 #endif
4033 stack_pointer_rtx,
4034 gen_int_mode (rounded_size, Pmode),
4035 NULL_RTX, 0, OPTAB_LIB_WIDEN));
4036
4037 offset = (HOST_WIDE_INT) padding_size;
4038 #ifdef STACK_GROWS_DOWNWARD
4039 if (STACK_PUSH_CODE == POST_DEC)
4040 /* We have already decremented the stack pointer, so get the
4041 previous value. */
4042 offset += (HOST_WIDE_INT) rounded_size;
4043 #else
4044 if (STACK_PUSH_CODE == POST_INC)
4045 /* We have already incremented the stack pointer, so get the
4046 previous value. */
4047 offset -= (HOST_WIDE_INT) rounded_size;
4048 #endif
4049 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4050 gen_int_mode (offset, Pmode));
4051 }
4052 else
4053 {
4054 #ifdef STACK_GROWS_DOWNWARD
4055 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4056 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4057 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4058 Pmode));
4059 #else
4060 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4061 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4062 gen_int_mode (rounded_size, Pmode));
4063 #endif
4064 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4065 }
4066
4067 dest = gen_rtx_MEM (mode, dest_addr);
4068
4069 if (type != 0)
4070 {
4071 set_mem_attributes (dest, type, 1);
4072
4073 if (cfun->tail_call_marked)
4074 /* Function incoming arguments may overlap with sibling call
4075 outgoing arguments and we cannot allow reordering of reads
4076 from function arguments with stores to outgoing arguments
4077 of sibling calls. */
4078 set_mem_alias_set (dest, 0);
4079 }
4080 emit_move_insn (dest, x);
4081 }
4082
4083 /* Emit and annotate a single push insn. */
4084
4085 static void
4086 emit_single_push_insn (machine_mode mode, rtx x, tree type)
4087 {
4088 int delta, old_delta = stack_pointer_delta;
4089 rtx_insn *prev = get_last_insn ();
4090 rtx_insn *last;
4091
4092 emit_single_push_insn_1 (mode, x, type);
4093
4094 last = get_last_insn ();
4095
4096 /* Notice the common case where we emitted exactly one insn. */
4097 if (PREV_INSN (last) == prev)
4098 {
4099 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4100 return;
4101 }
4102
4103 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4104 gcc_assert (delta == INT_MIN || delta == old_delta);
4105 }
4106 #endif
4107
4108 /* Generate code to push X onto the stack, assuming it has mode MODE and
4109 type TYPE.
4110 MODE is redundant except when X is a CONST_INT (since they don't
4111 carry mode info).
4112 SIZE is an rtx for the size of data to be copied (in bytes),
4113 needed only if X is BLKmode.
4114
4115 ALIGN (in bits) is maximum alignment we can assume.
4116
4117 If PARTIAL and REG are both nonzero, then copy that many of the first
4118 bytes of X into registers starting with REG, and push the rest of X.
4119 The amount of space pushed is decreased by PARTIAL bytes.
4120 REG must be a hard register in this case.
4121 If REG is zero but PARTIAL is not, take any all others actions for an
4122 argument partially in registers, but do not actually load any
4123 registers.
4124
4125 EXTRA is the amount in bytes of extra space to leave next to this arg.
4126 This is ignored if an argument block has already been allocated.
4127
4128 On a machine that lacks real push insns, ARGS_ADDR is the address of
4129 the bottom of the argument block for this call. We use indexing off there
4130 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4131 argument block has not been preallocated.
4132
4133 ARGS_SO_FAR is the size of args previously pushed for this call.
4134
4135 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4136 for arguments passed in registers. If nonzero, it will be the number
4137 of bytes required. */
4138
4139 void
4140 emit_push_insn (rtx x, machine_mode mode, tree type, rtx size,
4141 unsigned int align, int partial, rtx reg, int extra,
4142 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4143 rtx alignment_pad)
4144 {
4145 rtx xinner;
4146 enum direction stack_direction
4147 #ifdef STACK_GROWS_DOWNWARD
4148 = downward;
4149 #else
4150 = upward;
4151 #endif
4152
4153 /* Decide where to pad the argument: `downward' for below,
4154 `upward' for above, or `none' for don't pad it.
4155 Default is below for small data on big-endian machines; else above. */
4156 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4157
4158 /* Invert direction if stack is post-decrement.
4159 FIXME: why? */
4160 if (STACK_PUSH_CODE == POST_DEC)
4161 if (where_pad != none)
4162 where_pad = (where_pad == downward ? upward : downward);
4163
4164 xinner = x;
4165
4166 if (mode == BLKmode
4167 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4168 {
4169 /* Copy a block into the stack, entirely or partially. */
4170
4171 rtx temp;
4172 int used;
4173 int offset;
4174 int skip;
4175
4176 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4177 used = partial - offset;
4178
4179 if (mode != BLKmode)
4180 {
4181 /* A value is to be stored in an insufficiently aligned
4182 stack slot; copy via a suitably aligned slot if
4183 necessary. */
4184 size = GEN_INT (GET_MODE_SIZE (mode));
4185 if (!MEM_P (xinner))
4186 {
4187 temp = assign_temp (type, 1, 1);
4188 emit_move_insn (temp, xinner);
4189 xinner = temp;
4190 }
4191 }
4192
4193 gcc_assert (size);
4194
4195 /* USED is now the # of bytes we need not copy to the stack
4196 because registers will take care of them. */
4197
4198 if (partial != 0)
4199 xinner = adjust_address (xinner, BLKmode, used);
4200
4201 /* If the partial register-part of the arg counts in its stack size,
4202 skip the part of stack space corresponding to the registers.
4203 Otherwise, start copying to the beginning of the stack space,
4204 by setting SKIP to 0. */
4205 skip = (reg_parm_stack_space == 0) ? 0 : used;
4206
4207 #ifdef PUSH_ROUNDING
4208 /* Do it with several push insns if that doesn't take lots of insns
4209 and if there is no difficulty with push insns that skip bytes
4210 on the stack for alignment purposes. */
4211 if (args_addr == 0
4212 && PUSH_ARGS
4213 && CONST_INT_P (size)
4214 && skip == 0
4215 && MEM_ALIGN (xinner) >= align
4216 && can_move_by_pieces ((unsigned) INTVAL (size) - used, align)
4217 /* Here we avoid the case of a structure whose weak alignment
4218 forces many pushes of a small amount of data,
4219 and such small pushes do rounding that causes trouble. */
4220 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4221 || align >= BIGGEST_ALIGNMENT
4222 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4223 == (align / BITS_PER_UNIT)))
4224 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4225 {
4226 /* Push padding now if padding above and stack grows down,
4227 or if padding below and stack grows up.
4228 But if space already allocated, this has already been done. */
4229 if (extra && args_addr == 0
4230 && where_pad != none && where_pad != stack_direction)
4231 anti_adjust_stack (GEN_INT (extra));
4232
4233 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4234 }
4235 else
4236 #endif /* PUSH_ROUNDING */
4237 {
4238 rtx target;
4239
4240 /* Otherwise make space on the stack and copy the data
4241 to the address of that space. */
4242
4243 /* Deduct words put into registers from the size we must copy. */
4244 if (partial != 0)
4245 {
4246 if (CONST_INT_P (size))
4247 size = GEN_INT (INTVAL (size) - used);
4248 else
4249 size = expand_binop (GET_MODE (size), sub_optab, size,
4250 gen_int_mode (used, GET_MODE (size)),
4251 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4252 }
4253
4254 /* Get the address of the stack space.
4255 In this case, we do not deal with EXTRA separately.
4256 A single stack adjust will do. */
4257 if (! args_addr)
4258 {
4259 temp = push_block (size, extra, where_pad == downward);
4260 extra = 0;
4261 }
4262 else if (CONST_INT_P (args_so_far))
4263 temp = memory_address (BLKmode,
4264 plus_constant (Pmode, args_addr,
4265 skip + INTVAL (args_so_far)));
4266 else
4267 temp = memory_address (BLKmode,
4268 plus_constant (Pmode,
4269 gen_rtx_PLUS (Pmode,
4270 args_addr,
4271 args_so_far),
4272 skip));
4273
4274 if (!ACCUMULATE_OUTGOING_ARGS)
4275 {
4276 /* If the source is referenced relative to the stack pointer,
4277 copy it to another register to stabilize it. We do not need
4278 to do this if we know that we won't be changing sp. */
4279
4280 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4281 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4282 temp = copy_to_reg (temp);
4283 }
4284
4285 target = gen_rtx_MEM (BLKmode, temp);
4286
4287 /* We do *not* set_mem_attributes here, because incoming arguments
4288 may overlap with sibling call outgoing arguments and we cannot
4289 allow reordering of reads from function arguments with stores
4290 to outgoing arguments of sibling calls. We do, however, want
4291 to record the alignment of the stack slot. */
4292 /* ALIGN may well be better aligned than TYPE, e.g. due to
4293 PARM_BOUNDARY. Assume the caller isn't lying. */
4294 set_mem_align (target, align);
4295
4296 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4297 }
4298 }
4299 else if (partial > 0)
4300 {
4301 /* Scalar partly in registers. */
4302
4303 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4304 int i;
4305 int not_stack;
4306 /* # bytes of start of argument
4307 that we must make space for but need not store. */
4308 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4309 int args_offset = INTVAL (args_so_far);
4310 int skip;
4311
4312 /* Push padding now if padding above and stack grows down,
4313 or if padding below and stack grows up.
4314 But if space already allocated, this has already been done. */
4315 if (extra && args_addr == 0
4316 && where_pad != none && where_pad != stack_direction)
4317 anti_adjust_stack (GEN_INT (extra));
4318
4319 /* If we make space by pushing it, we might as well push
4320 the real data. Otherwise, we can leave OFFSET nonzero
4321 and leave the space uninitialized. */
4322 if (args_addr == 0)
4323 offset = 0;
4324
4325 /* Now NOT_STACK gets the number of words that we don't need to
4326 allocate on the stack. Convert OFFSET to words too. */
4327 not_stack = (partial - offset) / UNITS_PER_WORD;
4328 offset /= UNITS_PER_WORD;
4329
4330 /* If the partial register-part of the arg counts in its stack size,
4331 skip the part of stack space corresponding to the registers.
4332 Otherwise, start copying to the beginning of the stack space,
4333 by setting SKIP to 0. */
4334 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4335
4336 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4337 x = validize_mem (force_const_mem (mode, x));
4338
4339 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4340 SUBREGs of such registers are not allowed. */
4341 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4342 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4343 x = copy_to_reg (x);
4344
4345 /* Loop over all the words allocated on the stack for this arg. */
4346 /* We can do it by words, because any scalar bigger than a word
4347 has a size a multiple of a word. */
4348 for (i = size - 1; i >= not_stack; i--)
4349 if (i >= not_stack + offset)
4350 emit_push_insn (operand_subword_force (x, i, mode),
4351 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4352 0, args_addr,
4353 GEN_INT (args_offset + ((i - not_stack + skip)
4354 * UNITS_PER_WORD)),
4355 reg_parm_stack_space, alignment_pad);
4356 }
4357 else
4358 {
4359 rtx addr;
4360 rtx dest;
4361
4362 /* Push padding now if padding above and stack grows down,
4363 or if padding below and stack grows up.
4364 But if space already allocated, this has already been done. */
4365 if (extra && args_addr == 0
4366 && where_pad != none && where_pad != stack_direction)
4367 anti_adjust_stack (GEN_INT (extra));
4368
4369 #ifdef PUSH_ROUNDING
4370 if (args_addr == 0 && PUSH_ARGS)
4371 emit_single_push_insn (mode, x, type);
4372 else
4373 #endif
4374 {
4375 if (CONST_INT_P (args_so_far))
4376 addr
4377 = memory_address (mode,
4378 plus_constant (Pmode, args_addr,
4379 INTVAL (args_so_far)));
4380 else
4381 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4382 args_so_far));
4383 dest = gen_rtx_MEM (mode, addr);
4384
4385 /* We do *not* set_mem_attributes here, because incoming arguments
4386 may overlap with sibling call outgoing arguments and we cannot
4387 allow reordering of reads from function arguments with stores
4388 to outgoing arguments of sibling calls. We do, however, want
4389 to record the alignment of the stack slot. */
4390 /* ALIGN may well be better aligned than TYPE, e.g. due to
4391 PARM_BOUNDARY. Assume the caller isn't lying. */
4392 set_mem_align (dest, align);
4393
4394 emit_move_insn (dest, x);
4395 }
4396 }
4397
4398 /* If part should go in registers, copy that part
4399 into the appropriate registers. Do this now, at the end,
4400 since mem-to-mem copies above may do function calls. */
4401 if (partial > 0 && reg != 0)
4402 {
4403 /* Handle calls that pass values in multiple non-contiguous locations.
4404 The Irix 6 ABI has examples of this. */
4405 if (GET_CODE (reg) == PARALLEL)
4406 emit_group_load (reg, x, type, -1);
4407 else
4408 {
4409 gcc_assert (partial % UNITS_PER_WORD == 0);
4410 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4411 }
4412 }
4413
4414 if (extra && args_addr == 0 && where_pad == stack_direction)
4415 anti_adjust_stack (GEN_INT (extra));
4416
4417 if (alignment_pad && args_addr == 0)
4418 anti_adjust_stack (alignment_pad);
4419 }
4420 \f
4421 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4422 operations. */
4423
4424 static rtx
4425 get_subtarget (rtx x)
4426 {
4427 return (optimize
4428 || x == 0
4429 /* Only registers can be subtargets. */
4430 || !REG_P (x)
4431 /* Don't use hard regs to avoid extending their life. */
4432 || REGNO (x) < FIRST_PSEUDO_REGISTER
4433 ? 0 : x);
4434 }
4435
4436 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4437 FIELD is a bitfield. Returns true if the optimization was successful,
4438 and there's nothing else to do. */
4439
4440 static bool
4441 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4442 unsigned HOST_WIDE_INT bitpos,
4443 unsigned HOST_WIDE_INT bitregion_start,
4444 unsigned HOST_WIDE_INT bitregion_end,
4445 machine_mode mode1, rtx str_rtx,
4446 tree to, tree src)
4447 {
4448 machine_mode str_mode = GET_MODE (str_rtx);
4449 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4450 tree op0, op1;
4451 rtx value, result;
4452 optab binop;
4453 gimple srcstmt;
4454 enum tree_code code;
4455
4456 if (mode1 != VOIDmode
4457 || bitsize >= BITS_PER_WORD
4458 || str_bitsize > BITS_PER_WORD
4459 || TREE_SIDE_EFFECTS (to)
4460 || TREE_THIS_VOLATILE (to))
4461 return false;
4462
4463 STRIP_NOPS (src);
4464 if (TREE_CODE (src) != SSA_NAME)
4465 return false;
4466 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4467 return false;
4468
4469 srcstmt = get_gimple_for_ssa_name (src);
4470 if (!srcstmt
4471 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4472 return false;
4473
4474 code = gimple_assign_rhs_code (srcstmt);
4475
4476 op0 = gimple_assign_rhs1 (srcstmt);
4477
4478 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4479 to find its initialization. Hopefully the initialization will
4480 be from a bitfield load. */
4481 if (TREE_CODE (op0) == SSA_NAME)
4482 {
4483 gimple op0stmt = get_gimple_for_ssa_name (op0);
4484
4485 /* We want to eventually have OP0 be the same as TO, which
4486 should be a bitfield. */
4487 if (!op0stmt
4488 || !is_gimple_assign (op0stmt)
4489 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4490 return false;
4491 op0 = gimple_assign_rhs1 (op0stmt);
4492 }
4493
4494 op1 = gimple_assign_rhs2 (srcstmt);
4495
4496 if (!operand_equal_p (to, op0, 0))
4497 return false;
4498
4499 if (MEM_P (str_rtx))
4500 {
4501 unsigned HOST_WIDE_INT offset1;
4502
4503 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4504 str_mode = word_mode;
4505 str_mode = get_best_mode (bitsize, bitpos,
4506 bitregion_start, bitregion_end,
4507 MEM_ALIGN (str_rtx), str_mode, 0);
4508 if (str_mode == VOIDmode)
4509 return false;
4510 str_bitsize = GET_MODE_BITSIZE (str_mode);
4511
4512 offset1 = bitpos;
4513 bitpos %= str_bitsize;
4514 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4515 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4516 }
4517 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4518 return false;
4519
4520 /* If the bit field covers the whole REG/MEM, store_field
4521 will likely generate better code. */
4522 if (bitsize >= str_bitsize)
4523 return false;
4524
4525 /* We can't handle fields split across multiple entities. */
4526 if (bitpos + bitsize > str_bitsize)
4527 return false;
4528
4529 if (BYTES_BIG_ENDIAN)
4530 bitpos = str_bitsize - bitpos - bitsize;
4531
4532 switch (code)
4533 {
4534 case PLUS_EXPR:
4535 case MINUS_EXPR:
4536 /* For now, just optimize the case of the topmost bitfield
4537 where we don't need to do any masking and also
4538 1 bit bitfields where xor can be used.
4539 We might win by one instruction for the other bitfields
4540 too if insv/extv instructions aren't used, so that
4541 can be added later. */
4542 if (bitpos + bitsize != str_bitsize
4543 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4544 break;
4545
4546 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4547 value = convert_modes (str_mode,
4548 TYPE_MODE (TREE_TYPE (op1)), value,
4549 TYPE_UNSIGNED (TREE_TYPE (op1)));
4550
4551 /* We may be accessing data outside the field, which means
4552 we can alias adjacent data. */
4553 if (MEM_P (str_rtx))
4554 {
4555 str_rtx = shallow_copy_rtx (str_rtx);
4556 set_mem_alias_set (str_rtx, 0);
4557 set_mem_expr (str_rtx, 0);
4558 }
4559
4560 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4561 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4562 {
4563 value = expand_and (str_mode, value, const1_rtx, NULL);
4564 binop = xor_optab;
4565 }
4566 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4567 result = expand_binop (str_mode, binop, str_rtx,
4568 value, str_rtx, 1, OPTAB_WIDEN);
4569 if (result != str_rtx)
4570 emit_move_insn (str_rtx, result);
4571 return true;
4572
4573 case BIT_IOR_EXPR:
4574 case BIT_XOR_EXPR:
4575 if (TREE_CODE (op1) != INTEGER_CST)
4576 break;
4577 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4578 value = convert_modes (str_mode,
4579 TYPE_MODE (TREE_TYPE (op1)), value,
4580 TYPE_UNSIGNED (TREE_TYPE (op1)));
4581
4582 /* We may be accessing data outside the field, which means
4583 we can alias adjacent data. */
4584 if (MEM_P (str_rtx))
4585 {
4586 str_rtx = shallow_copy_rtx (str_rtx);
4587 set_mem_alias_set (str_rtx, 0);
4588 set_mem_expr (str_rtx, 0);
4589 }
4590
4591 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4592 if (bitpos + bitsize != str_bitsize)
4593 {
4594 rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4595 str_mode);
4596 value = expand_and (str_mode, value, mask, NULL_RTX);
4597 }
4598 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4599 result = expand_binop (str_mode, binop, str_rtx,
4600 value, str_rtx, 1, OPTAB_WIDEN);
4601 if (result != str_rtx)
4602 emit_move_insn (str_rtx, result);
4603 return true;
4604
4605 default:
4606 break;
4607 }
4608
4609 return false;
4610 }
4611
4612 /* In the C++ memory model, consecutive bit fields in a structure are
4613 considered one memory location.
4614
4615 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4616 returns the bit range of consecutive bits in which this COMPONENT_REF
4617 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4618 and *OFFSET may be adjusted in the process.
4619
4620 If the access does not need to be restricted, 0 is returned in both
4621 *BITSTART and *BITEND. */
4622
4623 static void
4624 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4625 unsigned HOST_WIDE_INT *bitend,
4626 tree exp,
4627 HOST_WIDE_INT *bitpos,
4628 tree *offset)
4629 {
4630 HOST_WIDE_INT bitoffset;
4631 tree field, repr;
4632
4633 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4634
4635 field = TREE_OPERAND (exp, 1);
4636 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4637 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4638 need to limit the range we can access. */
4639 if (!repr)
4640 {
4641 *bitstart = *bitend = 0;
4642 return;
4643 }
4644
4645 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4646 part of a larger bit field, then the representative does not serve any
4647 useful purpose. This can occur in Ada. */
4648 if (handled_component_p (TREE_OPERAND (exp, 0)))
4649 {
4650 machine_mode rmode;
4651 HOST_WIDE_INT rbitsize, rbitpos;
4652 tree roffset;
4653 int unsignedp;
4654 int volatilep = 0;
4655 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4656 &roffset, &rmode, &unsignedp, &volatilep, false);
4657 if ((rbitpos % BITS_PER_UNIT) != 0)
4658 {
4659 *bitstart = *bitend = 0;
4660 return;
4661 }
4662 }
4663
4664 /* Compute the adjustment to bitpos from the offset of the field
4665 relative to the representative. DECL_FIELD_OFFSET of field and
4666 repr are the same by construction if they are not constants,
4667 see finish_bitfield_layout. */
4668 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4669 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4670 bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4671 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4672 else
4673 bitoffset = 0;
4674 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4675 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4676
4677 /* If the adjustment is larger than bitpos, we would have a negative bit
4678 position for the lower bound and this may wreak havoc later. Adjust
4679 offset and bitpos to make the lower bound non-negative in that case. */
4680 if (bitoffset > *bitpos)
4681 {
4682 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4683 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4684
4685 *bitpos += adjust;
4686 if (*offset == NULL_TREE)
4687 *offset = size_int (-adjust / BITS_PER_UNIT);
4688 else
4689 *offset
4690 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4691 *bitstart = 0;
4692 }
4693 else
4694 *bitstart = *bitpos - bitoffset;
4695
4696 *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4697 }
4698
4699 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4700 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4701 DECL_RTL was not set yet, return NORTL. */
4702
4703 static inline bool
4704 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4705 {
4706 if (TREE_CODE (addr) != ADDR_EXPR)
4707 return false;
4708
4709 tree base = TREE_OPERAND (addr, 0);
4710
4711 if (!DECL_P (base)
4712 || TREE_ADDRESSABLE (base)
4713 || DECL_MODE (base) == BLKmode)
4714 return false;
4715
4716 if (!DECL_RTL_SET_P (base))
4717 return nortl;
4718
4719 return (!MEM_P (DECL_RTL (base)));
4720 }
4721
4722 /* Returns true if the MEM_REF REF refers to an object that does not
4723 reside in memory and has non-BLKmode. */
4724
4725 static inline bool
4726 mem_ref_refers_to_non_mem_p (tree ref)
4727 {
4728 tree base = TREE_OPERAND (ref, 0);
4729 return addr_expr_of_non_mem_decl_p_1 (base, false);
4730 }
4731
4732 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4733 is true, try generating a nontemporal store. */
4734
4735 void
4736 expand_assignment (tree to, tree from, bool nontemporal)
4737 {
4738 rtx to_rtx = 0;
4739 rtx result;
4740 machine_mode mode;
4741 unsigned int align;
4742 enum insn_code icode;
4743
4744 /* Don't crash if the lhs of the assignment was erroneous. */
4745 if (TREE_CODE (to) == ERROR_MARK)
4746 {
4747 expand_normal (from);
4748 return;
4749 }
4750
4751 /* Optimize away no-op moves without side-effects. */
4752 if (operand_equal_p (to, from, 0))
4753 return;
4754
4755 /* Handle misaligned stores. */
4756 mode = TYPE_MODE (TREE_TYPE (to));
4757 if ((TREE_CODE (to) == MEM_REF
4758 || TREE_CODE (to) == TARGET_MEM_REF)
4759 && mode != BLKmode
4760 && !mem_ref_refers_to_non_mem_p (to)
4761 && ((align = get_object_alignment (to))
4762 < GET_MODE_ALIGNMENT (mode))
4763 && (((icode = optab_handler (movmisalign_optab, mode))
4764 != CODE_FOR_nothing)
4765 || SLOW_UNALIGNED_ACCESS (mode, align)))
4766 {
4767 rtx reg, mem;
4768
4769 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4770 reg = force_not_mem (reg);
4771 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4772
4773 if (icode != CODE_FOR_nothing)
4774 {
4775 struct expand_operand ops[2];
4776
4777 create_fixed_operand (&ops[0], mem);
4778 create_input_operand (&ops[1], reg, mode);
4779 /* The movmisalign<mode> pattern cannot fail, else the assignment
4780 would silently be omitted. */
4781 expand_insn (icode, 2, ops);
4782 }
4783 else
4784 store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg);
4785 return;
4786 }
4787
4788 /* Assignment of a structure component needs special treatment
4789 if the structure component's rtx is not simply a MEM.
4790 Assignment of an array element at a constant index, and assignment of
4791 an array element in an unaligned packed structure field, has the same
4792 problem. Same for (partially) storing into a non-memory object. */
4793 if (handled_component_p (to)
4794 || (TREE_CODE (to) == MEM_REF
4795 && mem_ref_refers_to_non_mem_p (to))
4796 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4797 {
4798 machine_mode mode1;
4799 HOST_WIDE_INT bitsize, bitpos;
4800 unsigned HOST_WIDE_INT bitregion_start = 0;
4801 unsigned HOST_WIDE_INT bitregion_end = 0;
4802 tree offset;
4803 int unsignedp;
4804 int volatilep = 0;
4805 tree tem;
4806
4807 push_temp_slots ();
4808 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4809 &unsignedp, &volatilep, true);
4810
4811 /* Make sure bitpos is not negative, it can wreak havoc later. */
4812 if (bitpos < 0)
4813 {
4814 gcc_assert (offset == NULL_TREE);
4815 offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4816 ? 3 : exact_log2 (BITS_PER_UNIT)));
4817 bitpos &= BITS_PER_UNIT - 1;
4818 }
4819
4820 if (TREE_CODE (to) == COMPONENT_REF
4821 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4822 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4823 /* The C++ memory model naturally applies to byte-aligned fields.
4824 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4825 BITSIZE are not byte-aligned, there is no need to limit the range
4826 we can access. This can occur with packed structures in Ada. */
4827 else if (bitsize > 0
4828 && bitsize % BITS_PER_UNIT == 0
4829 && bitpos % BITS_PER_UNIT == 0)
4830 {
4831 bitregion_start = bitpos;
4832 bitregion_end = bitpos + bitsize - 1;
4833 }
4834
4835 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4836
4837 /* If the field has a mode, we want to access it in the
4838 field's mode, not the computed mode.
4839 If a MEM has VOIDmode (external with incomplete type),
4840 use BLKmode for it instead. */
4841 if (MEM_P (to_rtx))
4842 {
4843 if (mode1 != VOIDmode)
4844 to_rtx = adjust_address (to_rtx, mode1, 0);
4845 else if (GET_MODE (to_rtx) == VOIDmode)
4846 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4847 }
4848
4849 if (offset != 0)
4850 {
4851 machine_mode address_mode;
4852 rtx offset_rtx;
4853
4854 if (!MEM_P (to_rtx))
4855 {
4856 /* We can get constant negative offsets into arrays with broken
4857 user code. Translate this to a trap instead of ICEing. */
4858 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4859 expand_builtin_trap ();
4860 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4861 }
4862
4863 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4864 address_mode = get_address_mode (to_rtx);
4865 if (GET_MODE (offset_rtx) != address_mode)
4866 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4867
4868 /* If we have an expression in OFFSET_RTX and a non-zero
4869 byte offset in BITPOS, adding the byte offset before the
4870 OFFSET_RTX results in better intermediate code, which makes
4871 later rtl optimization passes perform better.
4872
4873 We prefer intermediate code like this:
4874
4875 r124:DI=r123:DI+0x18
4876 [r124:DI]=r121:DI
4877
4878 ... instead of ...
4879
4880 r124:DI=r123:DI+0x10
4881 [r124:DI+0x8]=r121:DI
4882
4883 This is only done for aligned data values, as these can
4884 be expected to result in single move instructions. */
4885 if (mode1 != VOIDmode
4886 && bitpos != 0
4887 && bitsize > 0
4888 && (bitpos % bitsize) == 0
4889 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4890 && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
4891 {
4892 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4893 bitregion_start = 0;
4894 if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos)
4895 bitregion_end -= bitpos;
4896 bitpos = 0;
4897 }
4898
4899 to_rtx = offset_address (to_rtx, offset_rtx,
4900 highest_pow2_factor_for_target (to,
4901 offset));
4902 }
4903
4904 /* No action is needed if the target is not a memory and the field
4905 lies completely outside that target. This can occur if the source
4906 code contains an out-of-bounds access to a small array. */
4907 if (!MEM_P (to_rtx)
4908 && GET_MODE (to_rtx) != BLKmode
4909 && (unsigned HOST_WIDE_INT) bitpos
4910 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4911 {
4912 expand_normal (from);
4913 result = NULL;
4914 }
4915 /* Handle expand_expr of a complex value returning a CONCAT. */
4916 else if (GET_CODE (to_rtx) == CONCAT)
4917 {
4918 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4919 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4920 && bitpos == 0
4921 && bitsize == mode_bitsize)
4922 result = store_expr (from, to_rtx, false, nontemporal);
4923 else if (bitsize == mode_bitsize / 2
4924 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4925 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4926 nontemporal);
4927 else if (bitpos + bitsize <= mode_bitsize / 2)
4928 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4929 bitregion_start, bitregion_end,
4930 mode1, from,
4931 get_alias_set (to), nontemporal);
4932 else if (bitpos >= mode_bitsize / 2)
4933 result = store_field (XEXP (to_rtx, 1), bitsize,
4934 bitpos - mode_bitsize / 2,
4935 bitregion_start, bitregion_end,
4936 mode1, from,
4937 get_alias_set (to), nontemporal);
4938 else if (bitpos == 0 && bitsize == mode_bitsize)
4939 {
4940 rtx from_rtx;
4941 result = expand_normal (from);
4942 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4943 TYPE_MODE (TREE_TYPE (from)), 0);
4944 emit_move_insn (XEXP (to_rtx, 0),
4945 read_complex_part (from_rtx, false));
4946 emit_move_insn (XEXP (to_rtx, 1),
4947 read_complex_part (from_rtx, true));
4948 }
4949 else
4950 {
4951 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4952 GET_MODE_SIZE (GET_MODE (to_rtx)));
4953 write_complex_part (temp, XEXP (to_rtx, 0), false);
4954 write_complex_part (temp, XEXP (to_rtx, 1), true);
4955 result = store_field (temp, bitsize, bitpos,
4956 bitregion_start, bitregion_end,
4957 mode1, from,
4958 get_alias_set (to), nontemporal);
4959 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4960 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4961 }
4962 }
4963 else
4964 {
4965 if (MEM_P (to_rtx))
4966 {
4967 /* If the field is at offset zero, we could have been given the
4968 DECL_RTX of the parent struct. Don't munge it. */
4969 to_rtx = shallow_copy_rtx (to_rtx);
4970 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4971 if (volatilep)
4972 MEM_VOLATILE_P (to_rtx) = 1;
4973 }
4974
4975 if (optimize_bitfield_assignment_op (bitsize, bitpos,
4976 bitregion_start, bitregion_end,
4977 mode1,
4978 to_rtx, to, from))
4979 result = NULL;
4980 else
4981 result = store_field (to_rtx, bitsize, bitpos,
4982 bitregion_start, bitregion_end,
4983 mode1, from,
4984 get_alias_set (to), nontemporal);
4985 }
4986
4987 if (result)
4988 preserve_temp_slots (result);
4989 pop_temp_slots ();
4990 return;
4991 }
4992
4993 /* If the rhs is a function call and its value is not an aggregate,
4994 call the function before we start to compute the lhs.
4995 This is needed for correct code for cases such as
4996 val = setjmp (buf) on machines where reference to val
4997 requires loading up part of an address in a separate insn.
4998
4999 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5000 since it might be a promoted variable where the zero- or sign- extension
5001 needs to be done. Handling this in the normal way is safe because no
5002 computation is done before the call. The same is true for SSA names. */
5003 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
5004 && COMPLETE_TYPE_P (TREE_TYPE (from))
5005 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
5006 && ! (((TREE_CODE (to) == VAR_DECL
5007 || TREE_CODE (to) == PARM_DECL
5008 || TREE_CODE (to) == RESULT_DECL)
5009 && REG_P (DECL_RTL (to)))
5010 || TREE_CODE (to) == SSA_NAME))
5011 {
5012 rtx value;
5013 rtx bounds;
5014
5015 push_temp_slots ();
5016 value = expand_normal (from);
5017
5018 /* Split value and bounds to store them separately. */
5019 chkp_split_slot (value, &value, &bounds);
5020
5021 if (to_rtx == 0)
5022 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5023
5024 /* Handle calls that return values in multiple non-contiguous locations.
5025 The Irix 6 ABI has examples of this. */
5026 if (GET_CODE (to_rtx) == PARALLEL)
5027 {
5028 if (GET_CODE (value) == PARALLEL)
5029 emit_group_move (to_rtx, value);
5030 else
5031 emit_group_load (to_rtx, value, TREE_TYPE (from),
5032 int_size_in_bytes (TREE_TYPE (from)));
5033 }
5034 else if (GET_CODE (value) == PARALLEL)
5035 emit_group_store (to_rtx, value, TREE_TYPE (from),
5036 int_size_in_bytes (TREE_TYPE (from)));
5037 else if (GET_MODE (to_rtx) == BLKmode)
5038 {
5039 /* Handle calls that return BLKmode values in registers. */
5040 if (REG_P (value))
5041 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5042 else
5043 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5044 }
5045 else
5046 {
5047 if (POINTER_TYPE_P (TREE_TYPE (to)))
5048 value = convert_memory_address_addr_space
5049 (GET_MODE (to_rtx), value,
5050 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5051
5052 emit_move_insn (to_rtx, value);
5053 }
5054
5055 /* Store bounds if required. */
5056 if (bounds
5057 && (BOUNDED_P (to) || chkp_type_has_pointer (TREE_TYPE (to))))
5058 {
5059 gcc_assert (MEM_P (to_rtx));
5060 chkp_emit_bounds_store (bounds, value, to_rtx);
5061 }
5062
5063 preserve_temp_slots (to_rtx);
5064 pop_temp_slots ();
5065 return;
5066 }
5067
5068 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5069 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5070
5071 /* Don't move directly into a return register. */
5072 if (TREE_CODE (to) == RESULT_DECL
5073 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5074 {
5075 rtx temp;
5076
5077 push_temp_slots ();
5078
5079 /* If the source is itself a return value, it still is in a pseudo at
5080 this point so we can move it back to the return register directly. */
5081 if (REG_P (to_rtx)
5082 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5083 && TREE_CODE (from) != CALL_EXPR)
5084 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5085 else
5086 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5087
5088 /* Handle calls that return values in multiple non-contiguous locations.
5089 The Irix 6 ABI has examples of this. */
5090 if (GET_CODE (to_rtx) == PARALLEL)
5091 {
5092 if (GET_CODE (temp) == PARALLEL)
5093 emit_group_move (to_rtx, temp);
5094 else
5095 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5096 int_size_in_bytes (TREE_TYPE (from)));
5097 }
5098 else if (temp)
5099 emit_move_insn (to_rtx, temp);
5100
5101 preserve_temp_slots (to_rtx);
5102 pop_temp_slots ();
5103 return;
5104 }
5105
5106 /* In case we are returning the contents of an object which overlaps
5107 the place the value is being stored, use a safe function when copying
5108 a value through a pointer into a structure value return block. */
5109 if (TREE_CODE (to) == RESULT_DECL
5110 && TREE_CODE (from) == INDIRECT_REF
5111 && ADDR_SPACE_GENERIC_P
5112 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5113 && refs_may_alias_p (to, from)
5114 && cfun->returns_struct
5115 && !cfun->returns_pcc_struct)
5116 {
5117 rtx from_rtx, size;
5118
5119 push_temp_slots ();
5120 size = expr_size (from);
5121 from_rtx = expand_normal (from);
5122
5123 emit_library_call (memmove_libfunc, LCT_NORMAL,
5124 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
5125 XEXP (from_rtx, 0), Pmode,
5126 convert_to_mode (TYPE_MODE (sizetype),
5127 size, TYPE_UNSIGNED (sizetype)),
5128 TYPE_MODE (sizetype));
5129
5130 preserve_temp_slots (to_rtx);
5131 pop_temp_slots ();
5132 return;
5133 }
5134
5135 /* Compute FROM and store the value in the rtx we got. */
5136
5137 push_temp_slots ();
5138 result = store_expr_with_bounds (from, to_rtx, 0, nontemporal, to);
5139 preserve_temp_slots (result);
5140 pop_temp_slots ();
5141 return;
5142 }
5143
5144 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5145 succeeded, false otherwise. */
5146
5147 bool
5148 emit_storent_insn (rtx to, rtx from)
5149 {
5150 struct expand_operand ops[2];
5151 machine_mode mode = GET_MODE (to);
5152 enum insn_code code = optab_handler (storent_optab, mode);
5153
5154 if (code == CODE_FOR_nothing)
5155 return false;
5156
5157 create_fixed_operand (&ops[0], to);
5158 create_input_operand (&ops[1], from, mode);
5159 return maybe_expand_insn (code, 2, ops);
5160 }
5161
5162 /* Generate code for computing expression EXP,
5163 and storing the value into TARGET.
5164
5165 If the mode is BLKmode then we may return TARGET itself.
5166 It turns out that in BLKmode it doesn't cause a problem.
5167 because C has no operators that could combine two different
5168 assignments into the same BLKmode object with different values
5169 with no sequence point. Will other languages need this to
5170 be more thorough?
5171
5172 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5173 stack, and block moves may need to be treated specially.
5174
5175 If NONTEMPORAL is true, try using a nontemporal store instruction.
5176
5177 If BTARGET is not NULL then computed bounds of EXP are
5178 associated with BTARGET. */
5179
5180 rtx
5181 store_expr_with_bounds (tree exp, rtx target, int call_param_p,
5182 bool nontemporal, tree btarget)
5183 {
5184 rtx temp;
5185 rtx alt_rtl = NULL_RTX;
5186 location_t loc = curr_insn_location ();
5187
5188 if (VOID_TYPE_P (TREE_TYPE (exp)))
5189 {
5190 /* C++ can generate ?: expressions with a throw expression in one
5191 branch and an rvalue in the other. Here, we resolve attempts to
5192 store the throw expression's nonexistent result. */
5193 gcc_assert (!call_param_p);
5194 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5195 return NULL_RTX;
5196 }
5197 if (TREE_CODE (exp) == COMPOUND_EXPR)
5198 {
5199 /* Perform first part of compound expression, then assign from second
5200 part. */
5201 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5202 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5203 return store_expr_with_bounds (TREE_OPERAND (exp, 1), target,
5204 call_param_p, nontemporal, btarget);
5205 }
5206 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5207 {
5208 /* For conditional expression, get safe form of the target. Then
5209 test the condition, doing the appropriate assignment on either
5210 side. This avoids the creation of unnecessary temporaries.
5211 For non-BLKmode, it is more efficient not to do this. */
5212
5213 rtx_code_label *lab1 = gen_label_rtx (), *lab2 = gen_label_rtx ();
5214
5215 do_pending_stack_adjust ();
5216 NO_DEFER_POP;
5217 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5218 store_expr_with_bounds (TREE_OPERAND (exp, 1), target, call_param_p,
5219 nontemporal, btarget);
5220 emit_jump_insn (gen_jump (lab2));
5221 emit_barrier ();
5222 emit_label (lab1);
5223 store_expr_with_bounds (TREE_OPERAND (exp, 2), target, call_param_p,
5224 nontemporal, btarget);
5225 emit_label (lab2);
5226 OK_DEFER_POP;
5227
5228 return NULL_RTX;
5229 }
5230 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5231 /* If this is a scalar in a register that is stored in a wider mode
5232 than the declared mode, compute the result into its declared mode
5233 and then convert to the wider mode. Our value is the computed
5234 expression. */
5235 {
5236 rtx inner_target = 0;
5237
5238 /* We can do the conversion inside EXP, which will often result
5239 in some optimizations. Do the conversion in two steps: first
5240 change the signedness, if needed, then the extend. But don't
5241 do this if the type of EXP is a subtype of something else
5242 since then the conversion might involve more than just
5243 converting modes. */
5244 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5245 && TREE_TYPE (TREE_TYPE (exp)) == 0
5246 && GET_MODE_PRECISION (GET_MODE (target))
5247 == TYPE_PRECISION (TREE_TYPE (exp)))
5248 {
5249 if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5250 TYPE_UNSIGNED (TREE_TYPE (exp))))
5251 {
5252 /* Some types, e.g. Fortran's logical*4, won't have a signed
5253 version, so use the mode instead. */
5254 tree ntype
5255 = (signed_or_unsigned_type_for
5256 (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
5257 if (ntype == NULL)
5258 ntype = lang_hooks.types.type_for_mode
5259 (TYPE_MODE (TREE_TYPE (exp)),
5260 SUBREG_PROMOTED_SIGN (target));
5261
5262 exp = fold_convert_loc (loc, ntype, exp);
5263 }
5264
5265 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5266 (GET_MODE (SUBREG_REG (target)),
5267 SUBREG_PROMOTED_SIGN (target)),
5268 exp);
5269
5270 inner_target = SUBREG_REG (target);
5271 }
5272
5273 temp = expand_expr (exp, inner_target, VOIDmode,
5274 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5275
5276 /* Handle bounds returned by call. */
5277 if (TREE_CODE (exp) == CALL_EXPR)
5278 {
5279 rtx bounds;
5280 chkp_split_slot (temp, &temp, &bounds);
5281 if (bounds && btarget)
5282 {
5283 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5284 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5285 chkp_set_rtl_bounds (btarget, tmp);
5286 }
5287 }
5288
5289 /* If TEMP is a VOIDmode constant, use convert_modes to make
5290 sure that we properly convert it. */
5291 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5292 {
5293 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5294 temp, SUBREG_PROMOTED_SIGN (target));
5295 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5296 GET_MODE (target), temp,
5297 SUBREG_PROMOTED_SIGN (target));
5298 }
5299
5300 convert_move (SUBREG_REG (target), temp,
5301 SUBREG_PROMOTED_SIGN (target));
5302
5303 return NULL_RTX;
5304 }
5305 else if ((TREE_CODE (exp) == STRING_CST
5306 || (TREE_CODE (exp) == MEM_REF
5307 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5308 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5309 == STRING_CST
5310 && integer_zerop (TREE_OPERAND (exp, 1))))
5311 && !nontemporal && !call_param_p
5312 && MEM_P (target))
5313 {
5314 /* Optimize initialization of an array with a STRING_CST. */
5315 HOST_WIDE_INT exp_len, str_copy_len;
5316 rtx dest_mem;
5317 tree str = TREE_CODE (exp) == STRING_CST
5318 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5319
5320 exp_len = int_expr_size (exp);
5321 if (exp_len <= 0)
5322 goto normal_expr;
5323
5324 if (TREE_STRING_LENGTH (str) <= 0)
5325 goto normal_expr;
5326
5327 str_copy_len = strlen (TREE_STRING_POINTER (str));
5328 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5329 goto normal_expr;
5330
5331 str_copy_len = TREE_STRING_LENGTH (str);
5332 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5333 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5334 {
5335 str_copy_len += STORE_MAX_PIECES - 1;
5336 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5337 }
5338 str_copy_len = MIN (str_copy_len, exp_len);
5339 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5340 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5341 MEM_ALIGN (target), false))
5342 goto normal_expr;
5343
5344 dest_mem = target;
5345
5346 dest_mem = store_by_pieces (dest_mem,
5347 str_copy_len, builtin_strncpy_read_str,
5348 CONST_CAST (char *,
5349 TREE_STRING_POINTER (str)),
5350 MEM_ALIGN (target), false,
5351 exp_len > str_copy_len ? 1 : 0);
5352 if (exp_len > str_copy_len)
5353 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5354 GEN_INT (exp_len - str_copy_len),
5355 BLOCK_OP_NORMAL);
5356 return NULL_RTX;
5357 }
5358 else
5359 {
5360 rtx tmp_target;
5361
5362 normal_expr:
5363 /* If we want to use a nontemporal store, force the value to
5364 register first. */
5365 tmp_target = nontemporal ? NULL_RTX : target;
5366 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5367 (call_param_p
5368 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5369 &alt_rtl, false);
5370
5371 /* Handle bounds returned by call. */
5372 if (TREE_CODE (exp) == CALL_EXPR)
5373 {
5374 rtx bounds;
5375 chkp_split_slot (temp, &temp, &bounds);
5376 if (bounds && btarget)
5377 {
5378 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5379 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5380 chkp_set_rtl_bounds (btarget, tmp);
5381 }
5382 }
5383 }
5384
5385 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5386 the same as that of TARGET, adjust the constant. This is needed, for
5387 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5388 only a word-sized value. */
5389 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5390 && TREE_CODE (exp) != ERROR_MARK
5391 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5392 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5393 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5394
5395 /* If value was not generated in the target, store it there.
5396 Convert the value to TARGET's type first if necessary and emit the
5397 pending incrementations that have been queued when expanding EXP.
5398 Note that we cannot emit the whole queue blindly because this will
5399 effectively disable the POST_INC optimization later.
5400
5401 If TEMP and TARGET compare equal according to rtx_equal_p, but
5402 one or both of them are volatile memory refs, we have to distinguish
5403 two cases:
5404 - expand_expr has used TARGET. In this case, we must not generate
5405 another copy. This can be detected by TARGET being equal according
5406 to == .
5407 - expand_expr has not used TARGET - that means that the source just
5408 happens to have the same RTX form. Since temp will have been created
5409 by expand_expr, it will compare unequal according to == .
5410 We must generate a copy in this case, to reach the correct number
5411 of volatile memory references. */
5412
5413 if ((! rtx_equal_p (temp, target)
5414 || (temp != target && (side_effects_p (temp)
5415 || side_effects_p (target))))
5416 && TREE_CODE (exp) != ERROR_MARK
5417 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5418 but TARGET is not valid memory reference, TEMP will differ
5419 from TARGET although it is really the same location. */
5420 && !(alt_rtl
5421 && rtx_equal_p (alt_rtl, target)
5422 && !side_effects_p (alt_rtl)
5423 && !side_effects_p (target))
5424 /* If there's nothing to copy, don't bother. Don't call
5425 expr_size unless necessary, because some front-ends (C++)
5426 expr_size-hook must not be given objects that are not
5427 supposed to be bit-copied or bit-initialized. */
5428 && expr_size (exp) != const0_rtx)
5429 {
5430 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5431 {
5432 if (GET_MODE (target) == BLKmode)
5433 {
5434 /* Handle calls that return BLKmode values in registers. */
5435 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5436 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5437 else
5438 store_bit_field (target,
5439 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5440 0, 0, 0, GET_MODE (temp), temp);
5441 }
5442 else
5443 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5444 }
5445
5446 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5447 {
5448 /* Handle copying a string constant into an array. The string
5449 constant may be shorter than the array. So copy just the string's
5450 actual length, and clear the rest. First get the size of the data
5451 type of the string, which is actually the size of the target. */
5452 rtx size = expr_size (exp);
5453
5454 if (CONST_INT_P (size)
5455 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5456 emit_block_move (target, temp, size,
5457 (call_param_p
5458 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5459 else
5460 {
5461 machine_mode pointer_mode
5462 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5463 machine_mode address_mode = get_address_mode (target);
5464
5465 /* Compute the size of the data to copy from the string. */
5466 tree copy_size
5467 = size_binop_loc (loc, MIN_EXPR,
5468 make_tree (sizetype, size),
5469 size_int (TREE_STRING_LENGTH (exp)));
5470 rtx copy_size_rtx
5471 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5472 (call_param_p
5473 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5474 rtx_code_label *label = 0;
5475
5476 /* Copy that much. */
5477 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5478 TYPE_UNSIGNED (sizetype));
5479 emit_block_move (target, temp, copy_size_rtx,
5480 (call_param_p
5481 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5482
5483 /* Figure out how much is left in TARGET that we have to clear.
5484 Do all calculations in pointer_mode. */
5485 if (CONST_INT_P (copy_size_rtx))
5486 {
5487 size = plus_constant (address_mode, size,
5488 -INTVAL (copy_size_rtx));
5489 target = adjust_address (target, BLKmode,
5490 INTVAL (copy_size_rtx));
5491 }
5492 else
5493 {
5494 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5495 copy_size_rtx, NULL_RTX, 0,
5496 OPTAB_LIB_WIDEN);
5497
5498 if (GET_MODE (copy_size_rtx) != address_mode)
5499 copy_size_rtx = convert_to_mode (address_mode,
5500 copy_size_rtx,
5501 TYPE_UNSIGNED (sizetype));
5502
5503 target = offset_address (target, copy_size_rtx,
5504 highest_pow2_factor (copy_size));
5505 label = gen_label_rtx ();
5506 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5507 GET_MODE (size), 0, label);
5508 }
5509
5510 if (size != const0_rtx)
5511 clear_storage (target, size, BLOCK_OP_NORMAL);
5512
5513 if (label)
5514 emit_label (label);
5515 }
5516 }
5517 /* Handle calls that return values in multiple non-contiguous locations.
5518 The Irix 6 ABI has examples of this. */
5519 else if (GET_CODE (target) == PARALLEL)
5520 {
5521 if (GET_CODE (temp) == PARALLEL)
5522 emit_group_move (target, temp);
5523 else
5524 emit_group_load (target, temp, TREE_TYPE (exp),
5525 int_size_in_bytes (TREE_TYPE (exp)));
5526 }
5527 else if (GET_CODE (temp) == PARALLEL)
5528 emit_group_store (target, temp, TREE_TYPE (exp),
5529 int_size_in_bytes (TREE_TYPE (exp)));
5530 else if (GET_MODE (temp) == BLKmode)
5531 emit_block_move (target, temp, expr_size (exp),
5532 (call_param_p
5533 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5534 /* If we emit a nontemporal store, there is nothing else to do. */
5535 else if (nontemporal && emit_storent_insn (target, temp))
5536 ;
5537 else
5538 {
5539 temp = force_operand (temp, target);
5540 if (temp != target)
5541 emit_move_insn (target, temp);
5542 }
5543 }
5544
5545 return NULL_RTX;
5546 }
5547
5548 /* Same as store_expr_with_bounds but ignoring bounds of EXP. */
5549 rtx
5550 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5551 {
5552 return store_expr_with_bounds (exp, target, call_param_p, nontemporal, NULL);
5553 }
5554 \f
5555 /* Return true if field F of structure TYPE is a flexible array. */
5556
5557 static bool
5558 flexible_array_member_p (const_tree f, const_tree type)
5559 {
5560 const_tree tf;
5561
5562 tf = TREE_TYPE (f);
5563 return (DECL_CHAIN (f) == NULL
5564 && TREE_CODE (tf) == ARRAY_TYPE
5565 && TYPE_DOMAIN (tf)
5566 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5567 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5568 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5569 && int_size_in_bytes (type) >= 0);
5570 }
5571
5572 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5573 must have in order for it to completely initialize a value of type TYPE.
5574 Return -1 if the number isn't known.
5575
5576 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5577
5578 static HOST_WIDE_INT
5579 count_type_elements (const_tree type, bool for_ctor_p)
5580 {
5581 switch (TREE_CODE (type))
5582 {
5583 case ARRAY_TYPE:
5584 {
5585 tree nelts;
5586
5587 nelts = array_type_nelts (type);
5588 if (nelts && tree_fits_uhwi_p (nelts))
5589 {
5590 unsigned HOST_WIDE_INT n;
5591
5592 n = tree_to_uhwi (nelts) + 1;
5593 if (n == 0 || for_ctor_p)
5594 return n;
5595 else
5596 return n * count_type_elements (TREE_TYPE (type), false);
5597 }
5598 return for_ctor_p ? -1 : 1;
5599 }
5600
5601 case RECORD_TYPE:
5602 {
5603 unsigned HOST_WIDE_INT n;
5604 tree f;
5605
5606 n = 0;
5607 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5608 if (TREE_CODE (f) == FIELD_DECL)
5609 {
5610 if (!for_ctor_p)
5611 n += count_type_elements (TREE_TYPE (f), false);
5612 else if (!flexible_array_member_p (f, type))
5613 /* Don't count flexible arrays, which are not supposed
5614 to be initialized. */
5615 n += 1;
5616 }
5617
5618 return n;
5619 }
5620
5621 case UNION_TYPE:
5622 case QUAL_UNION_TYPE:
5623 {
5624 tree f;
5625 HOST_WIDE_INT n, m;
5626
5627 gcc_assert (!for_ctor_p);
5628 /* Estimate the number of scalars in each field and pick the
5629 maximum. Other estimates would do instead; the idea is simply
5630 to make sure that the estimate is not sensitive to the ordering
5631 of the fields. */
5632 n = 1;
5633 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5634 if (TREE_CODE (f) == FIELD_DECL)
5635 {
5636 m = count_type_elements (TREE_TYPE (f), false);
5637 /* If the field doesn't span the whole union, add an extra
5638 scalar for the rest. */
5639 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5640 TYPE_SIZE (type)) != 1)
5641 m++;
5642 if (n < m)
5643 n = m;
5644 }
5645 return n;
5646 }
5647
5648 case COMPLEX_TYPE:
5649 return 2;
5650
5651 case VECTOR_TYPE:
5652 return TYPE_VECTOR_SUBPARTS (type);
5653
5654 case INTEGER_TYPE:
5655 case REAL_TYPE:
5656 case FIXED_POINT_TYPE:
5657 case ENUMERAL_TYPE:
5658 case BOOLEAN_TYPE:
5659 case POINTER_TYPE:
5660 case OFFSET_TYPE:
5661 case REFERENCE_TYPE:
5662 case NULLPTR_TYPE:
5663 return 1;
5664
5665 case ERROR_MARK:
5666 return 0;
5667
5668 case VOID_TYPE:
5669 case METHOD_TYPE:
5670 case FUNCTION_TYPE:
5671 case LANG_TYPE:
5672 default:
5673 gcc_unreachable ();
5674 }
5675 }
5676
5677 /* Helper for categorize_ctor_elements. Identical interface. */
5678
5679 static bool
5680 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5681 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5682 {
5683 unsigned HOST_WIDE_INT idx;
5684 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5685 tree value, purpose, elt_type;
5686
5687 /* Whether CTOR is a valid constant initializer, in accordance with what
5688 initializer_constant_valid_p does. If inferred from the constructor
5689 elements, true until proven otherwise. */
5690 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5691 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5692
5693 nz_elts = 0;
5694 init_elts = 0;
5695 num_fields = 0;
5696 elt_type = NULL_TREE;
5697
5698 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5699 {
5700 HOST_WIDE_INT mult = 1;
5701
5702 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5703 {
5704 tree lo_index = TREE_OPERAND (purpose, 0);
5705 tree hi_index = TREE_OPERAND (purpose, 1);
5706
5707 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5708 mult = (tree_to_uhwi (hi_index)
5709 - tree_to_uhwi (lo_index) + 1);
5710 }
5711 num_fields += mult;
5712 elt_type = TREE_TYPE (value);
5713
5714 switch (TREE_CODE (value))
5715 {
5716 case CONSTRUCTOR:
5717 {
5718 HOST_WIDE_INT nz = 0, ic = 0;
5719
5720 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5721 p_complete);
5722
5723 nz_elts += mult * nz;
5724 init_elts += mult * ic;
5725
5726 if (const_from_elts_p && const_p)
5727 const_p = const_elt_p;
5728 }
5729 break;
5730
5731 case INTEGER_CST:
5732 case REAL_CST:
5733 case FIXED_CST:
5734 if (!initializer_zerop (value))
5735 nz_elts += mult;
5736 init_elts += mult;
5737 break;
5738
5739 case STRING_CST:
5740 nz_elts += mult * TREE_STRING_LENGTH (value);
5741 init_elts += mult * TREE_STRING_LENGTH (value);
5742 break;
5743
5744 case COMPLEX_CST:
5745 if (!initializer_zerop (TREE_REALPART (value)))
5746 nz_elts += mult;
5747 if (!initializer_zerop (TREE_IMAGPART (value)))
5748 nz_elts += mult;
5749 init_elts += mult;
5750 break;
5751
5752 case VECTOR_CST:
5753 {
5754 unsigned i;
5755 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5756 {
5757 tree v = VECTOR_CST_ELT (value, i);
5758 if (!initializer_zerop (v))
5759 nz_elts += mult;
5760 init_elts += mult;
5761 }
5762 }
5763 break;
5764
5765 default:
5766 {
5767 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5768 nz_elts += mult * tc;
5769 init_elts += mult * tc;
5770
5771 if (const_from_elts_p && const_p)
5772 const_p = initializer_constant_valid_p (value, elt_type)
5773 != NULL_TREE;
5774 }
5775 break;
5776 }
5777 }
5778
5779 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5780 num_fields, elt_type))
5781 *p_complete = false;
5782
5783 *p_nz_elts += nz_elts;
5784 *p_init_elts += init_elts;
5785
5786 return const_p;
5787 }
5788
5789 /* Examine CTOR to discover:
5790 * how many scalar fields are set to nonzero values,
5791 and place it in *P_NZ_ELTS;
5792 * how many scalar fields in total are in CTOR,
5793 and place it in *P_ELT_COUNT.
5794 * whether the constructor is complete -- in the sense that every
5795 meaningful byte is explicitly given a value --
5796 and place it in *P_COMPLETE.
5797
5798 Return whether or not CTOR is a valid static constant initializer, the same
5799 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5800
5801 bool
5802 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5803 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5804 {
5805 *p_nz_elts = 0;
5806 *p_init_elts = 0;
5807 *p_complete = true;
5808
5809 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5810 }
5811
5812 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5813 of which had type LAST_TYPE. Each element was itself a complete
5814 initializer, in the sense that every meaningful byte was explicitly
5815 given a value. Return true if the same is true for the constructor
5816 as a whole. */
5817
5818 bool
5819 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5820 const_tree last_type)
5821 {
5822 if (TREE_CODE (type) == UNION_TYPE
5823 || TREE_CODE (type) == QUAL_UNION_TYPE)
5824 {
5825 if (num_elts == 0)
5826 return false;
5827
5828 gcc_assert (num_elts == 1 && last_type);
5829
5830 /* ??? We could look at each element of the union, and find the
5831 largest element. Which would avoid comparing the size of the
5832 initialized element against any tail padding in the union.
5833 Doesn't seem worth the effort... */
5834 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5835 }
5836
5837 return count_type_elements (type, true) == num_elts;
5838 }
5839
5840 /* Return 1 if EXP contains mostly (3/4) zeros. */
5841
5842 static int
5843 mostly_zeros_p (const_tree exp)
5844 {
5845 if (TREE_CODE (exp) == CONSTRUCTOR)
5846 {
5847 HOST_WIDE_INT nz_elts, init_elts;
5848 bool complete_p;
5849
5850 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5851 return !complete_p || nz_elts < init_elts / 4;
5852 }
5853
5854 return initializer_zerop (exp);
5855 }
5856
5857 /* Return 1 if EXP contains all zeros. */
5858
5859 static int
5860 all_zeros_p (const_tree exp)
5861 {
5862 if (TREE_CODE (exp) == CONSTRUCTOR)
5863 {
5864 HOST_WIDE_INT nz_elts, init_elts;
5865 bool complete_p;
5866
5867 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5868 return nz_elts == 0;
5869 }
5870
5871 return initializer_zerop (exp);
5872 }
5873 \f
5874 /* Helper function for store_constructor.
5875 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5876 CLEARED is as for store_constructor.
5877 ALIAS_SET is the alias set to use for any stores.
5878
5879 This provides a recursive shortcut back to store_constructor when it isn't
5880 necessary to go through store_field. This is so that we can pass through
5881 the cleared field to let store_constructor know that we may not have to
5882 clear a substructure if the outer structure has already been cleared. */
5883
5884 static void
5885 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5886 HOST_WIDE_INT bitpos, machine_mode mode,
5887 tree exp, int cleared, alias_set_type alias_set)
5888 {
5889 if (TREE_CODE (exp) == CONSTRUCTOR
5890 /* We can only call store_constructor recursively if the size and
5891 bit position are on a byte boundary. */
5892 && bitpos % BITS_PER_UNIT == 0
5893 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5894 /* If we have a nonzero bitpos for a register target, then we just
5895 let store_field do the bitfield handling. This is unlikely to
5896 generate unnecessary clear instructions anyways. */
5897 && (bitpos == 0 || MEM_P (target)))
5898 {
5899 if (MEM_P (target))
5900 target
5901 = adjust_address (target,
5902 GET_MODE (target) == BLKmode
5903 || 0 != (bitpos
5904 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5905 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5906
5907
5908 /* Update the alias set, if required. */
5909 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5910 && MEM_ALIAS_SET (target) != 0)
5911 {
5912 target = copy_rtx (target);
5913 set_mem_alias_set (target, alias_set);
5914 }
5915
5916 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5917 }
5918 else
5919 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false);
5920 }
5921
5922
5923 /* Returns the number of FIELD_DECLs in TYPE. */
5924
5925 static int
5926 fields_length (const_tree type)
5927 {
5928 tree t = TYPE_FIELDS (type);
5929 int count = 0;
5930
5931 for (; t; t = DECL_CHAIN (t))
5932 if (TREE_CODE (t) == FIELD_DECL)
5933 ++count;
5934
5935 return count;
5936 }
5937
5938
5939 /* Store the value of constructor EXP into the rtx TARGET.
5940 TARGET is either a REG or a MEM; we know it cannot conflict, since
5941 safe_from_p has been called.
5942 CLEARED is true if TARGET is known to have been zero'd.
5943 SIZE is the number of bytes of TARGET we are allowed to modify: this
5944 may not be the same as the size of EXP if we are assigning to a field
5945 which has been packed to exclude padding bits. */
5946
5947 static void
5948 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5949 {
5950 tree type = TREE_TYPE (exp);
5951 #ifdef WORD_REGISTER_OPERATIONS
5952 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5953 #endif
5954
5955 switch (TREE_CODE (type))
5956 {
5957 case RECORD_TYPE:
5958 case UNION_TYPE:
5959 case QUAL_UNION_TYPE:
5960 {
5961 unsigned HOST_WIDE_INT idx;
5962 tree field, value;
5963
5964 /* If size is zero or the target is already cleared, do nothing. */
5965 if (size == 0 || cleared)
5966 cleared = 1;
5967 /* We either clear the aggregate or indicate the value is dead. */
5968 else if ((TREE_CODE (type) == UNION_TYPE
5969 || TREE_CODE (type) == QUAL_UNION_TYPE)
5970 && ! CONSTRUCTOR_ELTS (exp))
5971 /* If the constructor is empty, clear the union. */
5972 {
5973 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5974 cleared = 1;
5975 }
5976
5977 /* If we are building a static constructor into a register,
5978 set the initial value as zero so we can fold the value into
5979 a constant. But if more than one register is involved,
5980 this probably loses. */
5981 else if (REG_P (target) && TREE_STATIC (exp)
5982 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5983 {
5984 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5985 cleared = 1;
5986 }
5987
5988 /* If the constructor has fewer fields than the structure or
5989 if we are initializing the structure to mostly zeros, clear
5990 the whole structure first. Don't do this if TARGET is a
5991 register whose mode size isn't equal to SIZE since
5992 clear_storage can't handle this case. */
5993 else if (size > 0
5994 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
5995 != fields_length (type))
5996 || mostly_zeros_p (exp))
5997 && (!REG_P (target)
5998 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5999 == size)))
6000 {
6001 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6002 cleared = 1;
6003 }
6004
6005 if (REG_P (target) && !cleared)
6006 emit_clobber (target);
6007
6008 /* Store each element of the constructor into the
6009 corresponding field of TARGET. */
6010 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
6011 {
6012 machine_mode mode;
6013 HOST_WIDE_INT bitsize;
6014 HOST_WIDE_INT bitpos = 0;
6015 tree offset;
6016 rtx to_rtx = target;
6017
6018 /* Just ignore missing fields. We cleared the whole
6019 structure, above, if any fields are missing. */
6020 if (field == 0)
6021 continue;
6022
6023 if (cleared && initializer_zerop (value))
6024 continue;
6025
6026 if (tree_fits_uhwi_p (DECL_SIZE (field)))
6027 bitsize = tree_to_uhwi (DECL_SIZE (field));
6028 else
6029 bitsize = -1;
6030
6031 mode = DECL_MODE (field);
6032 if (DECL_BIT_FIELD (field))
6033 mode = VOIDmode;
6034
6035 offset = DECL_FIELD_OFFSET (field);
6036 if (tree_fits_shwi_p (offset)
6037 && tree_fits_shwi_p (bit_position (field)))
6038 {
6039 bitpos = int_bit_position (field);
6040 offset = 0;
6041 }
6042 else
6043 bitpos = tree_to_shwi (DECL_FIELD_BIT_OFFSET (field));
6044
6045 if (offset)
6046 {
6047 machine_mode address_mode;
6048 rtx offset_rtx;
6049
6050 offset
6051 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
6052 make_tree (TREE_TYPE (exp),
6053 target));
6054
6055 offset_rtx = expand_normal (offset);
6056 gcc_assert (MEM_P (to_rtx));
6057
6058 address_mode = get_address_mode (to_rtx);
6059 if (GET_MODE (offset_rtx) != address_mode)
6060 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
6061
6062 to_rtx = offset_address (to_rtx, offset_rtx,
6063 highest_pow2_factor (offset));
6064 }
6065
6066 #ifdef WORD_REGISTER_OPERATIONS
6067 /* If this initializes a field that is smaller than a
6068 word, at the start of a word, try to widen it to a full
6069 word. This special case allows us to output C++ member
6070 function initializations in a form that the optimizers
6071 can understand. */
6072 if (REG_P (target)
6073 && bitsize < BITS_PER_WORD
6074 && bitpos % BITS_PER_WORD == 0
6075 && GET_MODE_CLASS (mode) == MODE_INT
6076 && TREE_CODE (value) == INTEGER_CST
6077 && exp_size >= 0
6078 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6079 {
6080 tree type = TREE_TYPE (value);
6081
6082 if (TYPE_PRECISION (type) < BITS_PER_WORD)
6083 {
6084 type = lang_hooks.types.type_for_mode
6085 (word_mode, TYPE_UNSIGNED (type));
6086 value = fold_convert (type, value);
6087 }
6088
6089 if (BYTES_BIG_ENDIAN)
6090 value
6091 = fold_build2 (LSHIFT_EXPR, type, value,
6092 build_int_cst (type,
6093 BITS_PER_WORD - bitsize));
6094 bitsize = BITS_PER_WORD;
6095 mode = word_mode;
6096 }
6097 #endif
6098
6099 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6100 && DECL_NONADDRESSABLE_P (field))
6101 {
6102 to_rtx = copy_rtx (to_rtx);
6103 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6104 }
6105
6106 store_constructor_field (to_rtx, bitsize, bitpos, mode,
6107 value, cleared,
6108 get_alias_set (TREE_TYPE (field)));
6109 }
6110 break;
6111 }
6112 case ARRAY_TYPE:
6113 {
6114 tree value, index;
6115 unsigned HOST_WIDE_INT i;
6116 int need_to_clear;
6117 tree domain;
6118 tree elttype = TREE_TYPE (type);
6119 int const_bounds_p;
6120 HOST_WIDE_INT minelt = 0;
6121 HOST_WIDE_INT maxelt = 0;
6122
6123 domain = TYPE_DOMAIN (type);
6124 const_bounds_p = (TYPE_MIN_VALUE (domain)
6125 && TYPE_MAX_VALUE (domain)
6126 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6127 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6128
6129 /* If we have constant bounds for the range of the type, get them. */
6130 if (const_bounds_p)
6131 {
6132 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6133 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6134 }
6135
6136 /* If the constructor has fewer elements than the array, clear
6137 the whole array first. Similarly if this is static
6138 constructor of a non-BLKmode object. */
6139 if (cleared)
6140 need_to_clear = 0;
6141 else if (REG_P (target) && TREE_STATIC (exp))
6142 need_to_clear = 1;
6143 else
6144 {
6145 unsigned HOST_WIDE_INT idx;
6146 tree index, value;
6147 HOST_WIDE_INT count = 0, zero_count = 0;
6148 need_to_clear = ! const_bounds_p;
6149
6150 /* This loop is a more accurate version of the loop in
6151 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6152 is also needed to check for missing elements. */
6153 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6154 {
6155 HOST_WIDE_INT this_node_count;
6156
6157 if (need_to_clear)
6158 break;
6159
6160 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6161 {
6162 tree lo_index = TREE_OPERAND (index, 0);
6163 tree hi_index = TREE_OPERAND (index, 1);
6164
6165 if (! tree_fits_uhwi_p (lo_index)
6166 || ! tree_fits_uhwi_p (hi_index))
6167 {
6168 need_to_clear = 1;
6169 break;
6170 }
6171
6172 this_node_count = (tree_to_uhwi (hi_index)
6173 - tree_to_uhwi (lo_index) + 1);
6174 }
6175 else
6176 this_node_count = 1;
6177
6178 count += this_node_count;
6179 if (mostly_zeros_p (value))
6180 zero_count += this_node_count;
6181 }
6182
6183 /* Clear the entire array first if there are any missing
6184 elements, or if the incidence of zero elements is >=
6185 75%. */
6186 if (! need_to_clear
6187 && (count < maxelt - minelt + 1
6188 || 4 * zero_count >= 3 * count))
6189 need_to_clear = 1;
6190 }
6191
6192 if (need_to_clear && size > 0)
6193 {
6194 if (REG_P (target))
6195 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6196 else
6197 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6198 cleared = 1;
6199 }
6200
6201 if (!cleared && REG_P (target))
6202 /* Inform later passes that the old value is dead. */
6203 emit_clobber (target);
6204
6205 /* Store each element of the constructor into the
6206 corresponding element of TARGET, determined by counting the
6207 elements. */
6208 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6209 {
6210 machine_mode mode;
6211 HOST_WIDE_INT bitsize;
6212 HOST_WIDE_INT bitpos;
6213 rtx xtarget = target;
6214
6215 if (cleared && initializer_zerop (value))
6216 continue;
6217
6218 mode = TYPE_MODE (elttype);
6219 if (mode == BLKmode)
6220 bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6221 ? tree_to_uhwi (TYPE_SIZE (elttype))
6222 : -1);
6223 else
6224 bitsize = GET_MODE_BITSIZE (mode);
6225
6226 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6227 {
6228 tree lo_index = TREE_OPERAND (index, 0);
6229 tree hi_index = TREE_OPERAND (index, 1);
6230 rtx index_r, pos_rtx;
6231 HOST_WIDE_INT lo, hi, count;
6232 tree position;
6233
6234 /* If the range is constant and "small", unroll the loop. */
6235 if (const_bounds_p
6236 && tree_fits_shwi_p (lo_index)
6237 && tree_fits_shwi_p (hi_index)
6238 && (lo = tree_to_shwi (lo_index),
6239 hi = tree_to_shwi (hi_index),
6240 count = hi - lo + 1,
6241 (!MEM_P (target)
6242 || count <= 2
6243 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6244 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6245 <= 40 * 8)))))
6246 {
6247 lo -= minelt; hi -= minelt;
6248 for (; lo <= hi; lo++)
6249 {
6250 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6251
6252 if (MEM_P (target)
6253 && !MEM_KEEP_ALIAS_SET_P (target)
6254 && TREE_CODE (type) == ARRAY_TYPE
6255 && TYPE_NONALIASED_COMPONENT (type))
6256 {
6257 target = copy_rtx (target);
6258 MEM_KEEP_ALIAS_SET_P (target) = 1;
6259 }
6260
6261 store_constructor_field
6262 (target, bitsize, bitpos, mode, value, cleared,
6263 get_alias_set (elttype));
6264 }
6265 }
6266 else
6267 {
6268 rtx_code_label *loop_start = gen_label_rtx ();
6269 rtx_code_label *loop_end = gen_label_rtx ();
6270 tree exit_cond;
6271
6272 expand_normal (hi_index);
6273
6274 index = build_decl (EXPR_LOCATION (exp),
6275 VAR_DECL, NULL_TREE, domain);
6276 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6277 SET_DECL_RTL (index, index_r);
6278 store_expr (lo_index, index_r, 0, false);
6279
6280 /* Build the head of the loop. */
6281 do_pending_stack_adjust ();
6282 emit_label (loop_start);
6283
6284 /* Assign value to element index. */
6285 position =
6286 fold_convert (ssizetype,
6287 fold_build2 (MINUS_EXPR,
6288 TREE_TYPE (index),
6289 index,
6290 TYPE_MIN_VALUE (domain)));
6291
6292 position =
6293 size_binop (MULT_EXPR, position,
6294 fold_convert (ssizetype,
6295 TYPE_SIZE_UNIT (elttype)));
6296
6297 pos_rtx = expand_normal (position);
6298 xtarget = offset_address (target, pos_rtx,
6299 highest_pow2_factor (position));
6300 xtarget = adjust_address (xtarget, mode, 0);
6301 if (TREE_CODE (value) == CONSTRUCTOR)
6302 store_constructor (value, xtarget, cleared,
6303 bitsize / BITS_PER_UNIT);
6304 else
6305 store_expr (value, xtarget, 0, false);
6306
6307 /* Generate a conditional jump to exit the loop. */
6308 exit_cond = build2 (LT_EXPR, integer_type_node,
6309 index, hi_index);
6310 jumpif (exit_cond, loop_end, -1);
6311
6312 /* Update the loop counter, and jump to the head of
6313 the loop. */
6314 expand_assignment (index,
6315 build2 (PLUS_EXPR, TREE_TYPE (index),
6316 index, integer_one_node),
6317 false);
6318
6319 emit_jump (loop_start);
6320
6321 /* Build the end of the loop. */
6322 emit_label (loop_end);
6323 }
6324 }
6325 else if ((index != 0 && ! tree_fits_shwi_p (index))
6326 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6327 {
6328 tree position;
6329
6330 if (index == 0)
6331 index = ssize_int (1);
6332
6333 if (minelt)
6334 index = fold_convert (ssizetype,
6335 fold_build2 (MINUS_EXPR,
6336 TREE_TYPE (index),
6337 index,
6338 TYPE_MIN_VALUE (domain)));
6339
6340 position =
6341 size_binop (MULT_EXPR, index,
6342 fold_convert (ssizetype,
6343 TYPE_SIZE_UNIT (elttype)));
6344 xtarget = offset_address (target,
6345 expand_normal (position),
6346 highest_pow2_factor (position));
6347 xtarget = adjust_address (xtarget, mode, 0);
6348 store_expr (value, xtarget, 0, false);
6349 }
6350 else
6351 {
6352 if (index != 0)
6353 bitpos = ((tree_to_shwi (index) - minelt)
6354 * tree_to_uhwi (TYPE_SIZE (elttype)));
6355 else
6356 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6357
6358 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6359 && TREE_CODE (type) == ARRAY_TYPE
6360 && TYPE_NONALIASED_COMPONENT (type))
6361 {
6362 target = copy_rtx (target);
6363 MEM_KEEP_ALIAS_SET_P (target) = 1;
6364 }
6365 store_constructor_field (target, bitsize, bitpos, mode, value,
6366 cleared, get_alias_set (elttype));
6367 }
6368 }
6369 break;
6370 }
6371
6372 case VECTOR_TYPE:
6373 {
6374 unsigned HOST_WIDE_INT idx;
6375 constructor_elt *ce;
6376 int i;
6377 int need_to_clear;
6378 int icode = CODE_FOR_nothing;
6379 tree elttype = TREE_TYPE (type);
6380 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6381 machine_mode eltmode = TYPE_MODE (elttype);
6382 HOST_WIDE_INT bitsize;
6383 HOST_WIDE_INT bitpos;
6384 rtvec vector = NULL;
6385 unsigned n_elts;
6386 alias_set_type alias;
6387
6388 gcc_assert (eltmode != BLKmode);
6389
6390 n_elts = TYPE_VECTOR_SUBPARTS (type);
6391 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6392 {
6393 machine_mode mode = GET_MODE (target);
6394
6395 icode = (int) optab_handler (vec_init_optab, mode);
6396 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6397 if (icode != CODE_FOR_nothing)
6398 {
6399 tree value;
6400
6401 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6402 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
6403 {
6404 icode = CODE_FOR_nothing;
6405 break;
6406 }
6407 }
6408 if (icode != CODE_FOR_nothing)
6409 {
6410 unsigned int i;
6411
6412 vector = rtvec_alloc (n_elts);
6413 for (i = 0; i < n_elts; i++)
6414 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6415 }
6416 }
6417
6418 /* If the constructor has fewer elements than the vector,
6419 clear the whole array first. Similarly if this is static
6420 constructor of a non-BLKmode object. */
6421 if (cleared)
6422 need_to_clear = 0;
6423 else if (REG_P (target) && TREE_STATIC (exp))
6424 need_to_clear = 1;
6425 else
6426 {
6427 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6428 tree value;
6429
6430 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6431 {
6432 int n_elts_here = tree_to_uhwi
6433 (int_const_binop (TRUNC_DIV_EXPR,
6434 TYPE_SIZE (TREE_TYPE (value)),
6435 TYPE_SIZE (elttype)));
6436
6437 count += n_elts_here;
6438 if (mostly_zeros_p (value))
6439 zero_count += n_elts_here;
6440 }
6441
6442 /* Clear the entire vector first if there are any missing elements,
6443 or if the incidence of zero elements is >= 75%. */
6444 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6445 }
6446
6447 if (need_to_clear && size > 0 && !vector)
6448 {
6449 if (REG_P (target))
6450 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6451 else
6452 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6453 cleared = 1;
6454 }
6455
6456 /* Inform later passes that the old value is dead. */
6457 if (!cleared && !vector && REG_P (target))
6458 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6459
6460 if (MEM_P (target))
6461 alias = MEM_ALIAS_SET (target);
6462 else
6463 alias = get_alias_set (elttype);
6464
6465 /* Store each element of the constructor into the corresponding
6466 element of TARGET, determined by counting the elements. */
6467 for (idx = 0, i = 0;
6468 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6469 idx++, i += bitsize / elt_size)
6470 {
6471 HOST_WIDE_INT eltpos;
6472 tree value = ce->value;
6473
6474 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6475 if (cleared && initializer_zerop (value))
6476 continue;
6477
6478 if (ce->index)
6479 eltpos = tree_to_uhwi (ce->index);
6480 else
6481 eltpos = i;
6482
6483 if (vector)
6484 {
6485 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6486 elements. */
6487 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6488 RTVEC_ELT (vector, eltpos)
6489 = expand_normal (value);
6490 }
6491 else
6492 {
6493 machine_mode value_mode =
6494 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6495 ? TYPE_MODE (TREE_TYPE (value))
6496 : eltmode;
6497 bitpos = eltpos * elt_size;
6498 store_constructor_field (target, bitsize, bitpos, value_mode,
6499 value, cleared, alias);
6500 }
6501 }
6502
6503 if (vector)
6504 emit_insn (GEN_FCN (icode)
6505 (target,
6506 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6507 break;
6508 }
6509
6510 default:
6511 gcc_unreachable ();
6512 }
6513 }
6514
6515 /* Store the value of EXP (an expression tree)
6516 into a subfield of TARGET which has mode MODE and occupies
6517 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6518 If MODE is VOIDmode, it means that we are storing into a bit-field.
6519
6520 BITREGION_START is bitpos of the first bitfield in this region.
6521 BITREGION_END is the bitpos of the ending bitfield in this region.
6522 These two fields are 0, if the C++ memory model does not apply,
6523 or we are not interested in keeping track of bitfield regions.
6524
6525 Always return const0_rtx unless we have something particular to
6526 return.
6527
6528 ALIAS_SET is the alias set for the destination. This value will
6529 (in general) be different from that for TARGET, since TARGET is a
6530 reference to the containing structure.
6531
6532 If NONTEMPORAL is true, try generating a nontemporal store. */
6533
6534 static rtx
6535 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6536 unsigned HOST_WIDE_INT bitregion_start,
6537 unsigned HOST_WIDE_INT bitregion_end,
6538 machine_mode mode, tree exp,
6539 alias_set_type alias_set, bool nontemporal)
6540 {
6541 if (TREE_CODE (exp) == ERROR_MARK)
6542 return const0_rtx;
6543
6544 /* If we have nothing to store, do nothing unless the expression has
6545 side-effects. */
6546 if (bitsize == 0)
6547 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6548
6549 if (GET_CODE (target) == CONCAT)
6550 {
6551 /* We're storing into a struct containing a single __complex. */
6552
6553 gcc_assert (!bitpos);
6554 return store_expr (exp, target, 0, nontemporal);
6555 }
6556
6557 /* If the structure is in a register or if the component
6558 is a bit field, we cannot use addressing to access it.
6559 Use bit-field techniques or SUBREG to store in it. */
6560
6561 if (mode == VOIDmode
6562 || (mode != BLKmode && ! direct_store[(int) mode]
6563 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6564 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6565 || REG_P (target)
6566 || GET_CODE (target) == SUBREG
6567 /* If the field isn't aligned enough to store as an ordinary memref,
6568 store it as a bit field. */
6569 || (mode != BLKmode
6570 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6571 || bitpos % GET_MODE_ALIGNMENT (mode))
6572 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6573 || (bitpos % BITS_PER_UNIT != 0)))
6574 || (bitsize >= 0 && mode != BLKmode
6575 && GET_MODE_BITSIZE (mode) > bitsize)
6576 /* If the RHS and field are a constant size and the size of the
6577 RHS isn't the same size as the bitfield, we must use bitfield
6578 operations. */
6579 || (bitsize >= 0
6580 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6581 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6582 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6583 decl we must use bitfield operations. */
6584 || (bitsize >= 0
6585 && TREE_CODE (exp) == MEM_REF
6586 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6587 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6588 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6589 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6590 {
6591 rtx temp;
6592 gimple nop_def;
6593
6594 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6595 implies a mask operation. If the precision is the same size as
6596 the field we're storing into, that mask is redundant. This is
6597 particularly common with bit field assignments generated by the
6598 C front end. */
6599 nop_def = get_def_for_expr (exp, NOP_EXPR);
6600 if (nop_def)
6601 {
6602 tree type = TREE_TYPE (exp);
6603 if (INTEGRAL_TYPE_P (type)
6604 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6605 && bitsize == TYPE_PRECISION (type))
6606 {
6607 tree op = gimple_assign_rhs1 (nop_def);
6608 type = TREE_TYPE (op);
6609 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6610 exp = op;
6611 }
6612 }
6613
6614 temp = expand_normal (exp);
6615
6616 /* If BITSIZE is narrower than the size of the type of EXP
6617 we will be narrowing TEMP. Normally, what's wanted are the
6618 low-order bits. However, if EXP's type is a record and this is
6619 big-endian machine, we want the upper BITSIZE bits. */
6620 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6621 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6622 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6623 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6624 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6625 NULL_RTX, 1);
6626
6627 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6628 if (mode != VOIDmode && mode != BLKmode
6629 && mode != TYPE_MODE (TREE_TYPE (exp)))
6630 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6631
6632 /* If the modes of TEMP and TARGET are both BLKmode, both
6633 must be in memory and BITPOS must be aligned on a byte
6634 boundary. If so, we simply do a block copy. Likewise
6635 for a BLKmode-like TARGET. */
6636 if (GET_MODE (temp) == BLKmode
6637 && (GET_MODE (target) == BLKmode
6638 || (MEM_P (target)
6639 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6640 && (bitpos % BITS_PER_UNIT) == 0
6641 && (bitsize % BITS_PER_UNIT) == 0)))
6642 {
6643 gcc_assert (MEM_P (target) && MEM_P (temp)
6644 && (bitpos % BITS_PER_UNIT) == 0);
6645
6646 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6647 emit_block_move (target, temp,
6648 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6649 / BITS_PER_UNIT),
6650 BLOCK_OP_NORMAL);
6651
6652 return const0_rtx;
6653 }
6654
6655 /* Handle calls that return values in multiple non-contiguous locations.
6656 The Irix 6 ABI has examples of this. */
6657 if (GET_CODE (temp) == PARALLEL)
6658 {
6659 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6660 rtx temp_target;
6661 if (mode == BLKmode || mode == VOIDmode)
6662 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6663 temp_target = gen_reg_rtx (mode);
6664 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6665 temp = temp_target;
6666 }
6667 else if (mode == BLKmode)
6668 {
6669 /* Handle calls that return BLKmode values in registers. */
6670 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6671 {
6672 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6673 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6674 temp = temp_target;
6675 }
6676 else
6677 {
6678 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6679 rtx temp_target;
6680 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6681 temp_target = gen_reg_rtx (mode);
6682 temp_target
6683 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6684 temp_target, mode, mode);
6685 temp = temp_target;
6686 }
6687 }
6688
6689 /* Store the value in the bitfield. */
6690 store_bit_field (target, bitsize, bitpos,
6691 bitregion_start, bitregion_end,
6692 mode, temp);
6693
6694 return const0_rtx;
6695 }
6696 else
6697 {
6698 /* Now build a reference to just the desired component. */
6699 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6700
6701 if (to_rtx == target)
6702 to_rtx = copy_rtx (to_rtx);
6703
6704 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6705 set_mem_alias_set (to_rtx, alias_set);
6706
6707 return store_expr (exp, to_rtx, 0, nontemporal);
6708 }
6709 }
6710 \f
6711 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6712 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6713 codes and find the ultimate containing object, which we return.
6714
6715 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6716 bit position, and *PUNSIGNEDP to the signedness of the field.
6717 If the position of the field is variable, we store a tree
6718 giving the variable offset (in units) in *POFFSET.
6719 This offset is in addition to the bit position.
6720 If the position is not variable, we store 0 in *POFFSET.
6721
6722 If any of the extraction expressions is volatile,
6723 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6724
6725 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6726 Otherwise, it is a mode that can be used to access the field.
6727
6728 If the field describes a variable-sized object, *PMODE is set to
6729 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6730 this case, but the address of the object can be found.
6731
6732 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6733 look through nodes that serve as markers of a greater alignment than
6734 the one that can be deduced from the expression. These nodes make it
6735 possible for front-ends to prevent temporaries from being created by
6736 the middle-end on alignment considerations. For that purpose, the
6737 normal operating mode at high-level is to always pass FALSE so that
6738 the ultimate containing object is really returned; moreover, the
6739 associated predicate handled_component_p will always return TRUE
6740 on these nodes, thus indicating that they are essentially handled
6741 by get_inner_reference. TRUE should only be passed when the caller
6742 is scanning the expression in order to build another representation
6743 and specifically knows how to handle these nodes; as such, this is
6744 the normal operating mode in the RTL expanders. */
6745
6746 tree
6747 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6748 HOST_WIDE_INT *pbitpos, tree *poffset,
6749 machine_mode *pmode, int *punsignedp,
6750 int *pvolatilep, bool keep_aligning)
6751 {
6752 tree size_tree = 0;
6753 machine_mode mode = VOIDmode;
6754 bool blkmode_bitfield = false;
6755 tree offset = size_zero_node;
6756 offset_int bit_offset = 0;
6757
6758 /* First get the mode, signedness, and size. We do this from just the
6759 outermost expression. */
6760 *pbitsize = -1;
6761 if (TREE_CODE (exp) == COMPONENT_REF)
6762 {
6763 tree field = TREE_OPERAND (exp, 1);
6764 size_tree = DECL_SIZE (field);
6765 if (flag_strict_volatile_bitfields > 0
6766 && TREE_THIS_VOLATILE (exp)
6767 && DECL_BIT_FIELD_TYPE (field)
6768 && DECL_MODE (field) != BLKmode)
6769 /* Volatile bitfields should be accessed in the mode of the
6770 field's type, not the mode computed based on the bit
6771 size. */
6772 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6773 else if (!DECL_BIT_FIELD (field))
6774 mode = DECL_MODE (field);
6775 else if (DECL_MODE (field) == BLKmode)
6776 blkmode_bitfield = true;
6777
6778 *punsignedp = DECL_UNSIGNED (field);
6779 }
6780 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6781 {
6782 size_tree = TREE_OPERAND (exp, 1);
6783 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6784 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6785
6786 /* For vector types, with the correct size of access, use the mode of
6787 inner type. */
6788 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6789 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6790 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6791 mode = TYPE_MODE (TREE_TYPE (exp));
6792 }
6793 else
6794 {
6795 mode = TYPE_MODE (TREE_TYPE (exp));
6796 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6797
6798 if (mode == BLKmode)
6799 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6800 else
6801 *pbitsize = GET_MODE_BITSIZE (mode);
6802 }
6803
6804 if (size_tree != 0)
6805 {
6806 if (! tree_fits_uhwi_p (size_tree))
6807 mode = BLKmode, *pbitsize = -1;
6808 else
6809 *pbitsize = tree_to_uhwi (size_tree);
6810 }
6811
6812 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6813 and find the ultimate containing object. */
6814 while (1)
6815 {
6816 switch (TREE_CODE (exp))
6817 {
6818 case BIT_FIELD_REF:
6819 bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
6820 break;
6821
6822 case COMPONENT_REF:
6823 {
6824 tree field = TREE_OPERAND (exp, 1);
6825 tree this_offset = component_ref_field_offset (exp);
6826
6827 /* If this field hasn't been filled in yet, don't go past it.
6828 This should only happen when folding expressions made during
6829 type construction. */
6830 if (this_offset == 0)
6831 break;
6832
6833 offset = size_binop (PLUS_EXPR, offset, this_offset);
6834 bit_offset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
6835
6836 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6837 }
6838 break;
6839
6840 case ARRAY_REF:
6841 case ARRAY_RANGE_REF:
6842 {
6843 tree index = TREE_OPERAND (exp, 1);
6844 tree low_bound = array_ref_low_bound (exp);
6845 tree unit_size = array_ref_element_size (exp);
6846
6847 /* We assume all arrays have sizes that are a multiple of a byte.
6848 First subtract the lower bound, if any, in the type of the
6849 index, then convert to sizetype and multiply by the size of
6850 the array element. */
6851 if (! integer_zerop (low_bound))
6852 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6853 index, low_bound);
6854
6855 offset = size_binop (PLUS_EXPR, offset,
6856 size_binop (MULT_EXPR,
6857 fold_convert (sizetype, index),
6858 unit_size));
6859 }
6860 break;
6861
6862 case REALPART_EXPR:
6863 break;
6864
6865 case IMAGPART_EXPR:
6866 bit_offset += *pbitsize;
6867 break;
6868
6869 case VIEW_CONVERT_EXPR:
6870 if (keep_aligning && STRICT_ALIGNMENT
6871 && (TYPE_ALIGN (TREE_TYPE (exp))
6872 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6873 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6874 < BIGGEST_ALIGNMENT)
6875 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6876 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6877 goto done;
6878 break;
6879
6880 case MEM_REF:
6881 /* Hand back the decl for MEM[&decl, off]. */
6882 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6883 {
6884 tree off = TREE_OPERAND (exp, 1);
6885 if (!integer_zerop (off))
6886 {
6887 offset_int boff, coff = mem_ref_offset (exp);
6888 boff = wi::lshift (coff, LOG2_BITS_PER_UNIT);
6889 bit_offset += boff;
6890 }
6891 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6892 }
6893 goto done;
6894
6895 default:
6896 goto done;
6897 }
6898
6899 /* If any reference in the chain is volatile, the effect is volatile. */
6900 if (TREE_THIS_VOLATILE (exp))
6901 *pvolatilep = 1;
6902
6903 exp = TREE_OPERAND (exp, 0);
6904 }
6905 done:
6906
6907 /* If OFFSET is constant, see if we can return the whole thing as a
6908 constant bit position. Make sure to handle overflow during
6909 this conversion. */
6910 if (TREE_CODE (offset) == INTEGER_CST)
6911 {
6912 offset_int tem = wi::sext (wi::to_offset (offset),
6913 TYPE_PRECISION (sizetype));
6914 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
6915 tem += bit_offset;
6916 if (wi::fits_shwi_p (tem))
6917 {
6918 *pbitpos = tem.to_shwi ();
6919 *poffset = offset = NULL_TREE;
6920 }
6921 }
6922
6923 /* Otherwise, split it up. */
6924 if (offset)
6925 {
6926 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6927 if (wi::neg_p (bit_offset))
6928 {
6929 offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
6930 offset_int tem = bit_offset.and_not (mask);
6931 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6932 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6933 bit_offset -= tem;
6934 tem = wi::arshift (tem, LOG2_BITS_PER_UNIT);
6935 offset = size_binop (PLUS_EXPR, offset,
6936 wide_int_to_tree (sizetype, tem));
6937 }
6938
6939 *pbitpos = bit_offset.to_shwi ();
6940 *poffset = offset;
6941 }
6942
6943 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6944 if (mode == VOIDmode
6945 && blkmode_bitfield
6946 && (*pbitpos % BITS_PER_UNIT) == 0
6947 && (*pbitsize % BITS_PER_UNIT) == 0)
6948 *pmode = BLKmode;
6949 else
6950 *pmode = mode;
6951
6952 return exp;
6953 }
6954
6955 /* Return a tree of sizetype representing the size, in bytes, of the element
6956 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6957
6958 tree
6959 array_ref_element_size (tree exp)
6960 {
6961 tree aligned_size = TREE_OPERAND (exp, 3);
6962 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6963 location_t loc = EXPR_LOCATION (exp);
6964
6965 /* If a size was specified in the ARRAY_REF, it's the size measured
6966 in alignment units of the element type. So multiply by that value. */
6967 if (aligned_size)
6968 {
6969 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6970 sizetype from another type of the same width and signedness. */
6971 if (TREE_TYPE (aligned_size) != sizetype)
6972 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6973 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6974 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6975 }
6976
6977 /* Otherwise, take the size from that of the element type. Substitute
6978 any PLACEHOLDER_EXPR that we have. */
6979 else
6980 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6981 }
6982
6983 /* Return a tree representing the lower bound of the array mentioned in
6984 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6985
6986 tree
6987 array_ref_low_bound (tree exp)
6988 {
6989 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6990
6991 /* If a lower bound is specified in EXP, use it. */
6992 if (TREE_OPERAND (exp, 2))
6993 return TREE_OPERAND (exp, 2);
6994
6995 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6996 substituting for a PLACEHOLDER_EXPR as needed. */
6997 if (domain_type && TYPE_MIN_VALUE (domain_type))
6998 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6999
7000 /* Otherwise, return a zero of the appropriate type. */
7001 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
7002 }
7003
7004 /* Returns true if REF is an array reference to an array at the end of
7005 a structure. If this is the case, the array may be allocated larger
7006 than its upper bound implies. */
7007
7008 bool
7009 array_at_struct_end_p (tree ref)
7010 {
7011 if (TREE_CODE (ref) != ARRAY_REF
7012 && TREE_CODE (ref) != ARRAY_RANGE_REF)
7013 return false;
7014
7015 while (handled_component_p (ref))
7016 {
7017 /* If the reference chain contains a component reference to a
7018 non-union type and there follows another field the reference
7019 is not at the end of a structure. */
7020 if (TREE_CODE (ref) == COMPONENT_REF
7021 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
7022 {
7023 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
7024 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
7025 nextf = DECL_CHAIN (nextf);
7026 if (nextf)
7027 return false;
7028 }
7029
7030 ref = TREE_OPERAND (ref, 0);
7031 }
7032
7033 /* If the reference is based on a declared entity, the size of the array
7034 is constrained by its given domain. */
7035 if (DECL_P (ref))
7036 return false;
7037
7038 return true;
7039 }
7040
7041 /* Return a tree representing the upper bound of the array mentioned in
7042 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
7043
7044 tree
7045 array_ref_up_bound (tree exp)
7046 {
7047 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
7048
7049 /* If there is a domain type and it has an upper bound, use it, substituting
7050 for a PLACEHOLDER_EXPR as needed. */
7051 if (domain_type && TYPE_MAX_VALUE (domain_type))
7052 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
7053
7054 /* Otherwise fail. */
7055 return NULL_TREE;
7056 }
7057
7058 /* Return a tree representing the offset, in bytes, of the field referenced
7059 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
7060
7061 tree
7062 component_ref_field_offset (tree exp)
7063 {
7064 tree aligned_offset = TREE_OPERAND (exp, 2);
7065 tree field = TREE_OPERAND (exp, 1);
7066 location_t loc = EXPR_LOCATION (exp);
7067
7068 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
7069 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
7070 value. */
7071 if (aligned_offset)
7072 {
7073 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
7074 sizetype from another type of the same width and signedness. */
7075 if (TREE_TYPE (aligned_offset) != sizetype)
7076 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
7077 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
7078 size_int (DECL_OFFSET_ALIGN (field)
7079 / BITS_PER_UNIT));
7080 }
7081
7082 /* Otherwise, take the offset from that of the field. Substitute
7083 any PLACEHOLDER_EXPR that we have. */
7084 else
7085 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
7086 }
7087
7088 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7089
7090 static unsigned HOST_WIDE_INT
7091 target_align (const_tree target)
7092 {
7093 /* We might have a chain of nested references with intermediate misaligning
7094 bitfields components, so need to recurse to find out. */
7095
7096 unsigned HOST_WIDE_INT this_align, outer_align;
7097
7098 switch (TREE_CODE (target))
7099 {
7100 case BIT_FIELD_REF:
7101 return 1;
7102
7103 case COMPONENT_REF:
7104 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7105 outer_align = target_align (TREE_OPERAND (target, 0));
7106 return MIN (this_align, outer_align);
7107
7108 case ARRAY_REF:
7109 case ARRAY_RANGE_REF:
7110 this_align = TYPE_ALIGN (TREE_TYPE (target));
7111 outer_align = target_align (TREE_OPERAND (target, 0));
7112 return MIN (this_align, outer_align);
7113
7114 CASE_CONVERT:
7115 case NON_LVALUE_EXPR:
7116 case VIEW_CONVERT_EXPR:
7117 this_align = TYPE_ALIGN (TREE_TYPE (target));
7118 outer_align = target_align (TREE_OPERAND (target, 0));
7119 return MAX (this_align, outer_align);
7120
7121 default:
7122 return TYPE_ALIGN (TREE_TYPE (target));
7123 }
7124 }
7125
7126 \f
7127 /* Given an rtx VALUE that may contain additions and multiplications, return
7128 an equivalent value that just refers to a register, memory, or constant.
7129 This is done by generating instructions to perform the arithmetic and
7130 returning a pseudo-register containing the value.
7131
7132 The returned value may be a REG, SUBREG, MEM or constant. */
7133
7134 rtx
7135 force_operand (rtx value, rtx target)
7136 {
7137 rtx op1, op2;
7138 /* Use subtarget as the target for operand 0 of a binary operation. */
7139 rtx subtarget = get_subtarget (target);
7140 enum rtx_code code = GET_CODE (value);
7141
7142 /* Check for subreg applied to an expression produced by loop optimizer. */
7143 if (code == SUBREG
7144 && !REG_P (SUBREG_REG (value))
7145 && !MEM_P (SUBREG_REG (value)))
7146 {
7147 value
7148 = simplify_gen_subreg (GET_MODE (value),
7149 force_reg (GET_MODE (SUBREG_REG (value)),
7150 force_operand (SUBREG_REG (value),
7151 NULL_RTX)),
7152 GET_MODE (SUBREG_REG (value)),
7153 SUBREG_BYTE (value));
7154 code = GET_CODE (value);
7155 }
7156
7157 /* Check for a PIC address load. */
7158 if ((code == PLUS || code == MINUS)
7159 && XEXP (value, 0) == pic_offset_table_rtx
7160 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7161 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7162 || GET_CODE (XEXP (value, 1)) == CONST))
7163 {
7164 if (!subtarget)
7165 subtarget = gen_reg_rtx (GET_MODE (value));
7166 emit_move_insn (subtarget, value);
7167 return subtarget;
7168 }
7169
7170 if (ARITHMETIC_P (value))
7171 {
7172 op2 = XEXP (value, 1);
7173 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7174 subtarget = 0;
7175 if (code == MINUS && CONST_INT_P (op2))
7176 {
7177 code = PLUS;
7178 op2 = negate_rtx (GET_MODE (value), op2);
7179 }
7180
7181 /* Check for an addition with OP2 a constant integer and our first
7182 operand a PLUS of a virtual register and something else. In that
7183 case, we want to emit the sum of the virtual register and the
7184 constant first and then add the other value. This allows virtual
7185 register instantiation to simply modify the constant rather than
7186 creating another one around this addition. */
7187 if (code == PLUS && CONST_INT_P (op2)
7188 && GET_CODE (XEXP (value, 0)) == PLUS
7189 && REG_P (XEXP (XEXP (value, 0), 0))
7190 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7191 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7192 {
7193 rtx temp = expand_simple_binop (GET_MODE (value), code,
7194 XEXP (XEXP (value, 0), 0), op2,
7195 subtarget, 0, OPTAB_LIB_WIDEN);
7196 return expand_simple_binop (GET_MODE (value), code, temp,
7197 force_operand (XEXP (XEXP (value,
7198 0), 1), 0),
7199 target, 0, OPTAB_LIB_WIDEN);
7200 }
7201
7202 op1 = force_operand (XEXP (value, 0), subtarget);
7203 op2 = force_operand (op2, NULL_RTX);
7204 switch (code)
7205 {
7206 case MULT:
7207 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7208 case DIV:
7209 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7210 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7211 target, 1, OPTAB_LIB_WIDEN);
7212 else
7213 return expand_divmod (0,
7214 FLOAT_MODE_P (GET_MODE (value))
7215 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7216 GET_MODE (value), op1, op2, target, 0);
7217 case MOD:
7218 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7219 target, 0);
7220 case UDIV:
7221 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7222 target, 1);
7223 case UMOD:
7224 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7225 target, 1);
7226 case ASHIFTRT:
7227 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7228 target, 0, OPTAB_LIB_WIDEN);
7229 default:
7230 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7231 target, 1, OPTAB_LIB_WIDEN);
7232 }
7233 }
7234 if (UNARY_P (value))
7235 {
7236 if (!target)
7237 target = gen_reg_rtx (GET_MODE (value));
7238 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7239 switch (code)
7240 {
7241 case ZERO_EXTEND:
7242 case SIGN_EXTEND:
7243 case TRUNCATE:
7244 case FLOAT_EXTEND:
7245 case FLOAT_TRUNCATE:
7246 convert_move (target, op1, code == ZERO_EXTEND);
7247 return target;
7248
7249 case FIX:
7250 case UNSIGNED_FIX:
7251 expand_fix (target, op1, code == UNSIGNED_FIX);
7252 return target;
7253
7254 case FLOAT:
7255 case UNSIGNED_FLOAT:
7256 expand_float (target, op1, code == UNSIGNED_FLOAT);
7257 return target;
7258
7259 default:
7260 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7261 }
7262 }
7263
7264 #ifdef INSN_SCHEDULING
7265 /* On machines that have insn scheduling, we want all memory reference to be
7266 explicit, so we need to deal with such paradoxical SUBREGs. */
7267 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7268 value
7269 = simplify_gen_subreg (GET_MODE (value),
7270 force_reg (GET_MODE (SUBREG_REG (value)),
7271 force_operand (SUBREG_REG (value),
7272 NULL_RTX)),
7273 GET_MODE (SUBREG_REG (value)),
7274 SUBREG_BYTE (value));
7275 #endif
7276
7277 return value;
7278 }
7279 \f
7280 /* Subroutine of expand_expr: return nonzero iff there is no way that
7281 EXP can reference X, which is being modified. TOP_P is nonzero if this
7282 call is going to be used to determine whether we need a temporary
7283 for EXP, as opposed to a recursive call to this function.
7284
7285 It is always safe for this routine to return zero since it merely
7286 searches for optimization opportunities. */
7287
7288 int
7289 safe_from_p (const_rtx x, tree exp, int top_p)
7290 {
7291 rtx exp_rtl = 0;
7292 int i, nops;
7293
7294 if (x == 0
7295 /* If EXP has varying size, we MUST use a target since we currently
7296 have no way of allocating temporaries of variable size
7297 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7298 So we assume here that something at a higher level has prevented a
7299 clash. This is somewhat bogus, but the best we can do. Only
7300 do this when X is BLKmode and when we are at the top level. */
7301 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7302 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7303 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7304 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7305 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7306 != INTEGER_CST)
7307 && GET_MODE (x) == BLKmode)
7308 /* If X is in the outgoing argument area, it is always safe. */
7309 || (MEM_P (x)
7310 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7311 || (GET_CODE (XEXP (x, 0)) == PLUS
7312 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7313 return 1;
7314
7315 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7316 find the underlying pseudo. */
7317 if (GET_CODE (x) == SUBREG)
7318 {
7319 x = SUBREG_REG (x);
7320 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7321 return 0;
7322 }
7323
7324 /* Now look at our tree code and possibly recurse. */
7325 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7326 {
7327 case tcc_declaration:
7328 exp_rtl = DECL_RTL_IF_SET (exp);
7329 break;
7330
7331 case tcc_constant:
7332 return 1;
7333
7334 case tcc_exceptional:
7335 if (TREE_CODE (exp) == TREE_LIST)
7336 {
7337 while (1)
7338 {
7339 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7340 return 0;
7341 exp = TREE_CHAIN (exp);
7342 if (!exp)
7343 return 1;
7344 if (TREE_CODE (exp) != TREE_LIST)
7345 return safe_from_p (x, exp, 0);
7346 }
7347 }
7348 else if (TREE_CODE (exp) == CONSTRUCTOR)
7349 {
7350 constructor_elt *ce;
7351 unsigned HOST_WIDE_INT idx;
7352
7353 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7354 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7355 || !safe_from_p (x, ce->value, 0))
7356 return 0;
7357 return 1;
7358 }
7359 else if (TREE_CODE (exp) == ERROR_MARK)
7360 return 1; /* An already-visited SAVE_EXPR? */
7361 else
7362 return 0;
7363
7364 case tcc_statement:
7365 /* The only case we look at here is the DECL_INITIAL inside a
7366 DECL_EXPR. */
7367 return (TREE_CODE (exp) != DECL_EXPR
7368 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7369 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7370 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7371
7372 case tcc_binary:
7373 case tcc_comparison:
7374 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7375 return 0;
7376 /* Fall through. */
7377
7378 case tcc_unary:
7379 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7380
7381 case tcc_expression:
7382 case tcc_reference:
7383 case tcc_vl_exp:
7384 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7385 the expression. If it is set, we conflict iff we are that rtx or
7386 both are in memory. Otherwise, we check all operands of the
7387 expression recursively. */
7388
7389 switch (TREE_CODE (exp))
7390 {
7391 case ADDR_EXPR:
7392 /* If the operand is static or we are static, we can't conflict.
7393 Likewise if we don't conflict with the operand at all. */
7394 if (staticp (TREE_OPERAND (exp, 0))
7395 || TREE_STATIC (exp)
7396 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7397 return 1;
7398
7399 /* Otherwise, the only way this can conflict is if we are taking
7400 the address of a DECL a that address if part of X, which is
7401 very rare. */
7402 exp = TREE_OPERAND (exp, 0);
7403 if (DECL_P (exp))
7404 {
7405 if (!DECL_RTL_SET_P (exp)
7406 || !MEM_P (DECL_RTL (exp)))
7407 return 0;
7408 else
7409 exp_rtl = XEXP (DECL_RTL (exp), 0);
7410 }
7411 break;
7412
7413 case MEM_REF:
7414 if (MEM_P (x)
7415 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7416 get_alias_set (exp)))
7417 return 0;
7418 break;
7419
7420 case CALL_EXPR:
7421 /* Assume that the call will clobber all hard registers and
7422 all of memory. */
7423 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7424 || MEM_P (x))
7425 return 0;
7426 break;
7427
7428 case WITH_CLEANUP_EXPR:
7429 case CLEANUP_POINT_EXPR:
7430 /* Lowered by gimplify.c. */
7431 gcc_unreachable ();
7432
7433 case SAVE_EXPR:
7434 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7435
7436 default:
7437 break;
7438 }
7439
7440 /* If we have an rtx, we do not need to scan our operands. */
7441 if (exp_rtl)
7442 break;
7443
7444 nops = TREE_OPERAND_LENGTH (exp);
7445 for (i = 0; i < nops; i++)
7446 if (TREE_OPERAND (exp, i) != 0
7447 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7448 return 0;
7449
7450 break;
7451
7452 case tcc_type:
7453 /* Should never get a type here. */
7454 gcc_unreachable ();
7455 }
7456
7457 /* If we have an rtl, find any enclosed object. Then see if we conflict
7458 with it. */
7459 if (exp_rtl)
7460 {
7461 if (GET_CODE (exp_rtl) == SUBREG)
7462 {
7463 exp_rtl = SUBREG_REG (exp_rtl);
7464 if (REG_P (exp_rtl)
7465 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7466 return 0;
7467 }
7468
7469 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7470 are memory and they conflict. */
7471 return ! (rtx_equal_p (x, exp_rtl)
7472 || (MEM_P (x) && MEM_P (exp_rtl)
7473 && true_dependence (exp_rtl, VOIDmode, x)));
7474 }
7475
7476 /* If we reach here, it is safe. */
7477 return 1;
7478 }
7479
7480 \f
7481 /* Return the highest power of two that EXP is known to be a multiple of.
7482 This is used in updating alignment of MEMs in array references. */
7483
7484 unsigned HOST_WIDE_INT
7485 highest_pow2_factor (const_tree exp)
7486 {
7487 unsigned HOST_WIDE_INT ret;
7488 int trailing_zeros = tree_ctz (exp);
7489 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7490 return BIGGEST_ALIGNMENT;
7491 ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7492 if (ret > BIGGEST_ALIGNMENT)
7493 return BIGGEST_ALIGNMENT;
7494 return ret;
7495 }
7496
7497 /* Similar, except that the alignment requirements of TARGET are
7498 taken into account. Assume it is at least as aligned as its
7499 type, unless it is a COMPONENT_REF in which case the layout of
7500 the structure gives the alignment. */
7501
7502 static unsigned HOST_WIDE_INT
7503 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7504 {
7505 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7506 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7507
7508 return MAX (factor, talign);
7509 }
7510 \f
7511 #ifdef HAVE_conditional_move
7512 /* Convert the tree comparison code TCODE to the rtl one where the
7513 signedness is UNSIGNEDP. */
7514
7515 static enum rtx_code
7516 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7517 {
7518 enum rtx_code code;
7519 switch (tcode)
7520 {
7521 case EQ_EXPR:
7522 code = EQ;
7523 break;
7524 case NE_EXPR:
7525 code = NE;
7526 break;
7527 case LT_EXPR:
7528 code = unsignedp ? LTU : LT;
7529 break;
7530 case LE_EXPR:
7531 code = unsignedp ? LEU : LE;
7532 break;
7533 case GT_EXPR:
7534 code = unsignedp ? GTU : GT;
7535 break;
7536 case GE_EXPR:
7537 code = unsignedp ? GEU : GE;
7538 break;
7539 case UNORDERED_EXPR:
7540 code = UNORDERED;
7541 break;
7542 case ORDERED_EXPR:
7543 code = ORDERED;
7544 break;
7545 case UNLT_EXPR:
7546 code = UNLT;
7547 break;
7548 case UNLE_EXPR:
7549 code = UNLE;
7550 break;
7551 case UNGT_EXPR:
7552 code = UNGT;
7553 break;
7554 case UNGE_EXPR:
7555 code = UNGE;
7556 break;
7557 case UNEQ_EXPR:
7558 code = UNEQ;
7559 break;
7560 case LTGT_EXPR:
7561 code = LTGT;
7562 break;
7563
7564 default:
7565 gcc_unreachable ();
7566 }
7567 return code;
7568 }
7569 #endif
7570
7571 /* Subroutine of expand_expr. Expand the two operands of a binary
7572 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7573 The value may be stored in TARGET if TARGET is nonzero. The
7574 MODIFIER argument is as documented by expand_expr. */
7575
7576 static void
7577 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7578 enum expand_modifier modifier)
7579 {
7580 if (! safe_from_p (target, exp1, 1))
7581 target = 0;
7582 if (operand_equal_p (exp0, exp1, 0))
7583 {
7584 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7585 *op1 = copy_rtx (*op0);
7586 }
7587 else
7588 {
7589 /* If we need to preserve evaluation order, copy exp0 into its own
7590 temporary variable so that it can't be clobbered by exp1. */
7591 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7592 exp0 = save_expr (exp0);
7593 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7594 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7595 }
7596 }
7597
7598 \f
7599 /* Return a MEM that contains constant EXP. DEFER is as for
7600 output_constant_def and MODIFIER is as for expand_expr. */
7601
7602 static rtx
7603 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7604 {
7605 rtx mem;
7606
7607 mem = output_constant_def (exp, defer);
7608 if (modifier != EXPAND_INITIALIZER)
7609 mem = use_anchored_address (mem);
7610 return mem;
7611 }
7612
7613 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7614 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7615
7616 static rtx
7617 expand_expr_addr_expr_1 (tree exp, rtx target, machine_mode tmode,
7618 enum expand_modifier modifier, addr_space_t as)
7619 {
7620 rtx result, subtarget;
7621 tree inner, offset;
7622 HOST_WIDE_INT bitsize, bitpos;
7623 int volatilep, unsignedp;
7624 machine_mode mode1;
7625
7626 /* If we are taking the address of a constant and are at the top level,
7627 we have to use output_constant_def since we can't call force_const_mem
7628 at top level. */
7629 /* ??? This should be considered a front-end bug. We should not be
7630 generating ADDR_EXPR of something that isn't an LVALUE. The only
7631 exception here is STRING_CST. */
7632 if (CONSTANT_CLASS_P (exp))
7633 {
7634 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7635 if (modifier < EXPAND_SUM)
7636 result = force_operand (result, target);
7637 return result;
7638 }
7639
7640 /* Everything must be something allowed by is_gimple_addressable. */
7641 switch (TREE_CODE (exp))
7642 {
7643 case INDIRECT_REF:
7644 /* This case will happen via recursion for &a->b. */
7645 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7646
7647 case MEM_REF:
7648 {
7649 tree tem = TREE_OPERAND (exp, 0);
7650 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7651 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7652 return expand_expr (tem, target, tmode, modifier);
7653 }
7654
7655 case CONST_DECL:
7656 /* Expand the initializer like constants above. */
7657 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7658 0, modifier), 0);
7659 if (modifier < EXPAND_SUM)
7660 result = force_operand (result, target);
7661 return result;
7662
7663 case REALPART_EXPR:
7664 /* The real part of the complex number is always first, therefore
7665 the address is the same as the address of the parent object. */
7666 offset = 0;
7667 bitpos = 0;
7668 inner = TREE_OPERAND (exp, 0);
7669 break;
7670
7671 case IMAGPART_EXPR:
7672 /* The imaginary part of the complex number is always second.
7673 The expression is therefore always offset by the size of the
7674 scalar type. */
7675 offset = 0;
7676 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7677 inner = TREE_OPERAND (exp, 0);
7678 break;
7679
7680 case COMPOUND_LITERAL_EXPR:
7681 /* Allow COMPOUND_LITERAL_EXPR in initializers, if e.g.
7682 rtl_for_decl_init is called on DECL_INITIAL with
7683 COMPOUNT_LITERAL_EXPRs in it, they aren't gimplified. */
7684 if (modifier == EXPAND_INITIALIZER
7685 && COMPOUND_LITERAL_EXPR_DECL (exp))
7686 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7687 target, tmode, modifier, as);
7688 /* FALLTHRU */
7689 default:
7690 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7691 expand_expr, as that can have various side effects; LABEL_DECLs for
7692 example, may not have their DECL_RTL set yet. Expand the rtl of
7693 CONSTRUCTORs too, which should yield a memory reference for the
7694 constructor's contents. Assume language specific tree nodes can
7695 be expanded in some interesting way. */
7696 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7697 if (DECL_P (exp)
7698 || TREE_CODE (exp) == CONSTRUCTOR
7699 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7700 {
7701 result = expand_expr (exp, target, tmode,
7702 modifier == EXPAND_INITIALIZER
7703 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7704
7705 /* If the DECL isn't in memory, then the DECL wasn't properly
7706 marked TREE_ADDRESSABLE, which will be either a front-end
7707 or a tree optimizer bug. */
7708
7709 if (TREE_ADDRESSABLE (exp)
7710 && ! MEM_P (result)
7711 && ! targetm.calls.allocate_stack_slots_for_args ())
7712 {
7713 error ("local frame unavailable (naked function?)");
7714 return result;
7715 }
7716 else
7717 gcc_assert (MEM_P (result));
7718 result = XEXP (result, 0);
7719
7720 /* ??? Is this needed anymore? */
7721 if (DECL_P (exp))
7722 TREE_USED (exp) = 1;
7723
7724 if (modifier != EXPAND_INITIALIZER
7725 && modifier != EXPAND_CONST_ADDRESS
7726 && modifier != EXPAND_SUM)
7727 result = force_operand (result, target);
7728 return result;
7729 }
7730
7731 /* Pass FALSE as the last argument to get_inner_reference although
7732 we are expanding to RTL. The rationale is that we know how to
7733 handle "aligning nodes" here: we can just bypass them because
7734 they won't change the final object whose address will be returned
7735 (they actually exist only for that purpose). */
7736 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7737 &mode1, &unsignedp, &volatilep, false);
7738 break;
7739 }
7740
7741 /* We must have made progress. */
7742 gcc_assert (inner != exp);
7743
7744 subtarget = offset || bitpos ? NULL_RTX : target;
7745 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7746 inner alignment, force the inner to be sufficiently aligned. */
7747 if (CONSTANT_CLASS_P (inner)
7748 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7749 {
7750 inner = copy_node (inner);
7751 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7752 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7753 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7754 }
7755 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7756
7757 if (offset)
7758 {
7759 rtx tmp;
7760
7761 if (modifier != EXPAND_NORMAL)
7762 result = force_operand (result, NULL);
7763 tmp = expand_expr (offset, NULL_RTX, tmode,
7764 modifier == EXPAND_INITIALIZER
7765 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7766
7767 /* expand_expr is allowed to return an object in a mode other
7768 than TMODE. If it did, we need to convert. */
7769 if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
7770 tmp = convert_modes (tmode, GET_MODE (tmp),
7771 tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
7772 result = convert_memory_address_addr_space (tmode, result, as);
7773 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7774
7775 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7776 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7777 else
7778 {
7779 subtarget = bitpos ? NULL_RTX : target;
7780 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7781 1, OPTAB_LIB_WIDEN);
7782 }
7783 }
7784
7785 if (bitpos)
7786 {
7787 /* Someone beforehand should have rejected taking the address
7788 of such an object. */
7789 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7790
7791 result = convert_memory_address_addr_space (tmode, result, as);
7792 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7793 if (modifier < EXPAND_SUM)
7794 result = force_operand (result, target);
7795 }
7796
7797 return result;
7798 }
7799
7800 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7801 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7802
7803 static rtx
7804 expand_expr_addr_expr (tree exp, rtx target, machine_mode tmode,
7805 enum expand_modifier modifier)
7806 {
7807 addr_space_t as = ADDR_SPACE_GENERIC;
7808 machine_mode address_mode = Pmode;
7809 machine_mode pointer_mode = ptr_mode;
7810 machine_mode rmode;
7811 rtx result;
7812
7813 /* Target mode of VOIDmode says "whatever's natural". */
7814 if (tmode == VOIDmode)
7815 tmode = TYPE_MODE (TREE_TYPE (exp));
7816
7817 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7818 {
7819 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7820 address_mode = targetm.addr_space.address_mode (as);
7821 pointer_mode = targetm.addr_space.pointer_mode (as);
7822 }
7823
7824 /* We can get called with some Weird Things if the user does silliness
7825 like "(short) &a". In that case, convert_memory_address won't do
7826 the right thing, so ignore the given target mode. */
7827 if (tmode != address_mode && tmode != pointer_mode)
7828 tmode = address_mode;
7829
7830 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7831 tmode, modifier, as);
7832
7833 /* Despite expand_expr claims concerning ignoring TMODE when not
7834 strictly convenient, stuff breaks if we don't honor it. Note
7835 that combined with the above, we only do this for pointer modes. */
7836 rmode = GET_MODE (result);
7837 if (rmode == VOIDmode)
7838 rmode = tmode;
7839 if (rmode != tmode)
7840 result = convert_memory_address_addr_space (tmode, result, as);
7841
7842 return result;
7843 }
7844
7845 /* Generate code for computing CONSTRUCTOR EXP.
7846 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7847 is TRUE, instead of creating a temporary variable in memory
7848 NULL is returned and the caller needs to handle it differently. */
7849
7850 static rtx
7851 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7852 bool avoid_temp_mem)
7853 {
7854 tree type = TREE_TYPE (exp);
7855 machine_mode mode = TYPE_MODE (type);
7856
7857 /* Try to avoid creating a temporary at all. This is possible
7858 if all of the initializer is zero.
7859 FIXME: try to handle all [0..255] initializers we can handle
7860 with memset. */
7861 if (TREE_STATIC (exp)
7862 && !TREE_ADDRESSABLE (exp)
7863 && target != 0 && mode == BLKmode
7864 && all_zeros_p (exp))
7865 {
7866 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7867 return target;
7868 }
7869
7870 /* All elts simple constants => refer to a constant in memory. But
7871 if this is a non-BLKmode mode, let it store a field at a time
7872 since that should make a CONST_INT, CONST_WIDE_INT or
7873 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7874 use, it is best to store directly into the target unless the type
7875 is large enough that memcpy will be used. If we are making an
7876 initializer and all operands are constant, put it in memory as
7877 well.
7878
7879 FIXME: Avoid trying to fill vector constructors piece-meal.
7880 Output them with output_constant_def below unless we're sure
7881 they're zeros. This should go away when vector initializers
7882 are treated like VECTOR_CST instead of arrays. */
7883 if ((TREE_STATIC (exp)
7884 && ((mode == BLKmode
7885 && ! (target != 0 && safe_from_p (target, exp, 1)))
7886 || TREE_ADDRESSABLE (exp)
7887 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
7888 && (! can_move_by_pieces
7889 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
7890 TYPE_ALIGN (type)))
7891 && ! mostly_zeros_p (exp))))
7892 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7893 && TREE_CONSTANT (exp)))
7894 {
7895 rtx constructor;
7896
7897 if (avoid_temp_mem)
7898 return NULL_RTX;
7899
7900 constructor = expand_expr_constant (exp, 1, modifier);
7901
7902 if (modifier != EXPAND_CONST_ADDRESS
7903 && modifier != EXPAND_INITIALIZER
7904 && modifier != EXPAND_SUM)
7905 constructor = validize_mem (constructor);
7906
7907 return constructor;
7908 }
7909
7910 /* Handle calls that pass values in multiple non-contiguous
7911 locations. The Irix 6 ABI has examples of this. */
7912 if (target == 0 || ! safe_from_p (target, exp, 1)
7913 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7914 {
7915 if (avoid_temp_mem)
7916 return NULL_RTX;
7917
7918 target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
7919 }
7920
7921 store_constructor (exp, target, 0, int_expr_size (exp));
7922 return target;
7923 }
7924
7925
7926 /* expand_expr: generate code for computing expression EXP.
7927 An rtx for the computed value is returned. The value is never null.
7928 In the case of a void EXP, const0_rtx is returned.
7929
7930 The value may be stored in TARGET if TARGET is nonzero.
7931 TARGET is just a suggestion; callers must assume that
7932 the rtx returned may not be the same as TARGET.
7933
7934 If TARGET is CONST0_RTX, it means that the value will be ignored.
7935
7936 If TMODE is not VOIDmode, it suggests generating the
7937 result in mode TMODE. But this is done only when convenient.
7938 Otherwise, TMODE is ignored and the value generated in its natural mode.
7939 TMODE is just a suggestion; callers must assume that
7940 the rtx returned may not have mode TMODE.
7941
7942 Note that TARGET may have neither TMODE nor MODE. In that case, it
7943 probably will not be used.
7944
7945 If MODIFIER is EXPAND_SUM then when EXP is an addition
7946 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7947 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7948 products as above, or REG or MEM, or constant.
7949 Ordinarily in such cases we would output mul or add instructions
7950 and then return a pseudo reg containing the sum.
7951
7952 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7953 it also marks a label as absolutely required (it can't be dead).
7954 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7955 This is used for outputting expressions used in initializers.
7956
7957 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7958 with a constant address even if that address is not normally legitimate.
7959 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7960
7961 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7962 a call parameter. Such targets require special care as we haven't yet
7963 marked TARGET so that it's safe from being trashed by libcalls. We
7964 don't want to use TARGET for anything but the final result;
7965 Intermediate values must go elsewhere. Additionally, calls to
7966 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7967
7968 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7969 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7970 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7971 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7972 recursively.
7973
7974 If INNER_REFERENCE_P is true, we are expanding an inner reference.
7975 In this case, we don't adjust a returned MEM rtx that wouldn't be
7976 sufficiently aligned for its mode; instead, it's up to the caller
7977 to deal with it afterwards. This is used to make sure that unaligned
7978 base objects for which out-of-bounds accesses are supported, for
7979 example record types with trailing arrays, aren't realigned behind
7980 the back of the caller.
7981 The normal operating mode is to pass FALSE for this parameter. */
7982
7983 rtx
7984 expand_expr_real (tree exp, rtx target, machine_mode tmode,
7985 enum expand_modifier modifier, rtx *alt_rtl,
7986 bool inner_reference_p)
7987 {
7988 rtx ret;
7989
7990 /* Handle ERROR_MARK before anybody tries to access its type. */
7991 if (TREE_CODE (exp) == ERROR_MARK
7992 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7993 {
7994 ret = CONST0_RTX (tmode);
7995 return ret ? ret : const0_rtx;
7996 }
7997
7998 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
7999 inner_reference_p);
8000 return ret;
8001 }
8002
8003 /* Try to expand the conditional expression which is represented by
8004 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
8005 return the rtl reg which repsents the result. Otherwise return
8006 NULL_RTL. */
8007
8008 static rtx
8009 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
8010 tree treeop1 ATTRIBUTE_UNUSED,
8011 tree treeop2 ATTRIBUTE_UNUSED)
8012 {
8013 #ifdef HAVE_conditional_move
8014 rtx insn;
8015 rtx op00, op01, op1, op2;
8016 enum rtx_code comparison_code;
8017 machine_mode comparison_mode;
8018 gimple srcstmt;
8019 rtx temp;
8020 tree type = TREE_TYPE (treeop1);
8021 int unsignedp = TYPE_UNSIGNED (type);
8022 machine_mode mode = TYPE_MODE (type);
8023 machine_mode orig_mode = mode;
8024
8025 /* If we cannot do a conditional move on the mode, try doing it
8026 with the promoted mode. */
8027 if (!can_conditionally_move_p (mode))
8028 {
8029 mode = promote_mode (type, mode, &unsignedp);
8030 if (!can_conditionally_move_p (mode))
8031 return NULL_RTX;
8032 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
8033 }
8034 else
8035 temp = assign_temp (type, 0, 1);
8036
8037 start_sequence ();
8038 expand_operands (treeop1, treeop2,
8039 temp, &op1, &op2, EXPAND_NORMAL);
8040
8041 if (TREE_CODE (treeop0) == SSA_NAME
8042 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
8043 {
8044 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
8045 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
8046 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
8047 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
8048 comparison_mode = TYPE_MODE (type);
8049 unsignedp = TYPE_UNSIGNED (type);
8050 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8051 }
8052 else if (TREE_CODE_CLASS (TREE_CODE (treeop0)) == tcc_comparison)
8053 {
8054 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
8055 enum tree_code cmpcode = TREE_CODE (treeop0);
8056 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
8057 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
8058 unsignedp = TYPE_UNSIGNED (type);
8059 comparison_mode = TYPE_MODE (type);
8060 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8061 }
8062 else
8063 {
8064 op00 = expand_normal (treeop0);
8065 op01 = const0_rtx;
8066 comparison_code = NE;
8067 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
8068 }
8069
8070 if (GET_MODE (op1) != mode)
8071 op1 = gen_lowpart (mode, op1);
8072
8073 if (GET_MODE (op2) != mode)
8074 op2 = gen_lowpart (mode, op2);
8075
8076 /* Try to emit the conditional move. */
8077 insn = emit_conditional_move (temp, comparison_code,
8078 op00, op01, comparison_mode,
8079 op1, op2, mode,
8080 unsignedp);
8081
8082 /* If we could do the conditional move, emit the sequence,
8083 and return. */
8084 if (insn)
8085 {
8086 rtx_insn *seq = get_insns ();
8087 end_sequence ();
8088 emit_insn (seq);
8089 return convert_modes (orig_mode, mode, temp, 0);
8090 }
8091
8092 /* Otherwise discard the sequence and fall back to code with
8093 branches. */
8094 end_sequence ();
8095 #endif
8096 return NULL_RTX;
8097 }
8098
8099 rtx
8100 expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
8101 enum expand_modifier modifier)
8102 {
8103 rtx op0, op1, op2, temp;
8104 tree type;
8105 int unsignedp;
8106 machine_mode mode;
8107 enum tree_code code = ops->code;
8108 optab this_optab;
8109 rtx subtarget, original_target;
8110 int ignore;
8111 bool reduce_bit_field;
8112 location_t loc = ops->location;
8113 tree treeop0, treeop1, treeop2;
8114 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8115 ? reduce_to_bit_field_precision ((expr), \
8116 target, \
8117 type) \
8118 : (expr))
8119
8120 type = ops->type;
8121 mode = TYPE_MODE (type);
8122 unsignedp = TYPE_UNSIGNED (type);
8123
8124 treeop0 = ops->op0;
8125 treeop1 = ops->op1;
8126 treeop2 = ops->op2;
8127
8128 /* We should be called only on simple (binary or unary) expressions,
8129 exactly those that are valid in gimple expressions that aren't
8130 GIMPLE_SINGLE_RHS (or invalid). */
8131 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8132 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8133 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8134
8135 ignore = (target == const0_rtx
8136 || ((CONVERT_EXPR_CODE_P (code)
8137 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8138 && TREE_CODE (type) == VOID_TYPE));
8139
8140 /* We should be called only if we need the result. */
8141 gcc_assert (!ignore);
8142
8143 /* An operation in what may be a bit-field type needs the
8144 result to be reduced to the precision of the bit-field type,
8145 which is narrower than that of the type's mode. */
8146 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8147 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8148
8149 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8150 target = 0;
8151
8152 /* Use subtarget as the target for operand 0 of a binary operation. */
8153 subtarget = get_subtarget (target);
8154 original_target = target;
8155
8156 switch (code)
8157 {
8158 case NON_LVALUE_EXPR:
8159 case PAREN_EXPR:
8160 CASE_CONVERT:
8161 if (treeop0 == error_mark_node)
8162 return const0_rtx;
8163
8164 if (TREE_CODE (type) == UNION_TYPE)
8165 {
8166 tree valtype = TREE_TYPE (treeop0);
8167
8168 /* If both input and output are BLKmode, this conversion isn't doing
8169 anything except possibly changing memory attribute. */
8170 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8171 {
8172 rtx result = expand_expr (treeop0, target, tmode,
8173 modifier);
8174
8175 result = copy_rtx (result);
8176 set_mem_attributes (result, type, 0);
8177 return result;
8178 }
8179
8180 if (target == 0)
8181 {
8182 if (TYPE_MODE (type) != BLKmode)
8183 target = gen_reg_rtx (TYPE_MODE (type));
8184 else
8185 target = assign_temp (type, 1, 1);
8186 }
8187
8188 if (MEM_P (target))
8189 /* Store data into beginning of memory target. */
8190 store_expr (treeop0,
8191 adjust_address (target, TYPE_MODE (valtype), 0),
8192 modifier == EXPAND_STACK_PARM,
8193 false);
8194
8195 else
8196 {
8197 gcc_assert (REG_P (target));
8198
8199 /* Store this field into a union of the proper type. */
8200 store_field (target,
8201 MIN ((int_size_in_bytes (TREE_TYPE
8202 (treeop0))
8203 * BITS_PER_UNIT),
8204 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8205 0, 0, 0, TYPE_MODE (valtype), treeop0, 0, false);
8206 }
8207
8208 /* Return the entire union. */
8209 return target;
8210 }
8211
8212 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8213 {
8214 op0 = expand_expr (treeop0, target, VOIDmode,
8215 modifier);
8216
8217 /* If the signedness of the conversion differs and OP0 is
8218 a promoted SUBREG, clear that indication since we now
8219 have to do the proper extension. */
8220 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8221 && GET_CODE (op0) == SUBREG)
8222 SUBREG_PROMOTED_VAR_P (op0) = 0;
8223
8224 return REDUCE_BIT_FIELD (op0);
8225 }
8226
8227 op0 = expand_expr (treeop0, NULL_RTX, mode,
8228 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8229 if (GET_MODE (op0) == mode)
8230 ;
8231
8232 /* If OP0 is a constant, just convert it into the proper mode. */
8233 else if (CONSTANT_P (op0))
8234 {
8235 tree inner_type = TREE_TYPE (treeop0);
8236 machine_mode inner_mode = GET_MODE (op0);
8237
8238 if (inner_mode == VOIDmode)
8239 inner_mode = TYPE_MODE (inner_type);
8240
8241 if (modifier == EXPAND_INITIALIZER)
8242 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8243 subreg_lowpart_offset (mode,
8244 inner_mode));
8245 else
8246 op0= convert_modes (mode, inner_mode, op0,
8247 TYPE_UNSIGNED (inner_type));
8248 }
8249
8250 else if (modifier == EXPAND_INITIALIZER)
8251 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8252
8253 else if (target == 0)
8254 op0 = convert_to_mode (mode, op0,
8255 TYPE_UNSIGNED (TREE_TYPE
8256 (treeop0)));
8257 else
8258 {
8259 convert_move (target, op0,
8260 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8261 op0 = target;
8262 }
8263
8264 return REDUCE_BIT_FIELD (op0);
8265
8266 case ADDR_SPACE_CONVERT_EXPR:
8267 {
8268 tree treeop0_type = TREE_TYPE (treeop0);
8269 addr_space_t as_to;
8270 addr_space_t as_from;
8271
8272 gcc_assert (POINTER_TYPE_P (type));
8273 gcc_assert (POINTER_TYPE_P (treeop0_type));
8274
8275 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8276 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8277
8278 /* Conversions between pointers to the same address space should
8279 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8280 gcc_assert (as_to != as_from);
8281
8282 /* Ask target code to handle conversion between pointers
8283 to overlapping address spaces. */
8284 if (targetm.addr_space.subset_p (as_to, as_from)
8285 || targetm.addr_space.subset_p (as_from, as_to))
8286 {
8287 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8288 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8289 gcc_assert (op0);
8290 return op0;
8291 }
8292
8293 /* For disjoint address spaces, converting anything but
8294 a null pointer invokes undefined behaviour. We simply
8295 always return a null pointer here. */
8296 return CONST0_RTX (mode);
8297 }
8298
8299 case POINTER_PLUS_EXPR:
8300 /* Even though the sizetype mode and the pointer's mode can be different
8301 expand is able to handle this correctly and get the correct result out
8302 of the PLUS_EXPR code. */
8303 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8304 if sizetype precision is smaller than pointer precision. */
8305 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8306 treeop1 = fold_convert_loc (loc, type,
8307 fold_convert_loc (loc, ssizetype,
8308 treeop1));
8309 /* If sizetype precision is larger than pointer precision, truncate the
8310 offset to have matching modes. */
8311 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8312 treeop1 = fold_convert_loc (loc, type, treeop1);
8313
8314 case PLUS_EXPR:
8315 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8316 something else, make sure we add the register to the constant and
8317 then to the other thing. This case can occur during strength
8318 reduction and doing it this way will produce better code if the
8319 frame pointer or argument pointer is eliminated.
8320
8321 fold-const.c will ensure that the constant is always in the inner
8322 PLUS_EXPR, so the only case we need to do anything about is if
8323 sp, ap, or fp is our second argument, in which case we must swap
8324 the innermost first argument and our second argument. */
8325
8326 if (TREE_CODE (treeop0) == PLUS_EXPR
8327 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8328 && TREE_CODE (treeop1) == VAR_DECL
8329 && (DECL_RTL (treeop1) == frame_pointer_rtx
8330 || DECL_RTL (treeop1) == stack_pointer_rtx
8331 || DECL_RTL (treeop1) == arg_pointer_rtx))
8332 {
8333 gcc_unreachable ();
8334 }
8335
8336 /* If the result is to be ptr_mode and we are adding an integer to
8337 something, we might be forming a constant. So try to use
8338 plus_constant. If it produces a sum and we can't accept it,
8339 use force_operand. This allows P = &ARR[const] to generate
8340 efficient code on machines where a SYMBOL_REF is not a valid
8341 address.
8342
8343 If this is an EXPAND_SUM call, always return the sum. */
8344 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8345 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8346 {
8347 if (modifier == EXPAND_STACK_PARM)
8348 target = 0;
8349 if (TREE_CODE (treeop0) == INTEGER_CST
8350 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8351 && TREE_CONSTANT (treeop1))
8352 {
8353 rtx constant_part;
8354 HOST_WIDE_INT wc;
8355 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8356
8357 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8358 EXPAND_SUM);
8359 /* Use wi::shwi to ensure that the constant is
8360 truncated according to the mode of OP1, then sign extended
8361 to a HOST_WIDE_INT. Using the constant directly can result
8362 in non-canonical RTL in a 64x32 cross compile. */
8363 wc = TREE_INT_CST_LOW (treeop0);
8364 constant_part =
8365 immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8366 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8367 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8368 op1 = force_operand (op1, target);
8369 return REDUCE_BIT_FIELD (op1);
8370 }
8371
8372 else if (TREE_CODE (treeop1) == INTEGER_CST
8373 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8374 && TREE_CONSTANT (treeop0))
8375 {
8376 rtx constant_part;
8377 HOST_WIDE_INT wc;
8378 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8379
8380 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8381 (modifier == EXPAND_INITIALIZER
8382 ? EXPAND_INITIALIZER : EXPAND_SUM));
8383 if (! CONSTANT_P (op0))
8384 {
8385 op1 = expand_expr (treeop1, NULL_RTX,
8386 VOIDmode, modifier);
8387 /* Return a PLUS if modifier says it's OK. */
8388 if (modifier == EXPAND_SUM
8389 || modifier == EXPAND_INITIALIZER)
8390 return simplify_gen_binary (PLUS, mode, op0, op1);
8391 goto binop2;
8392 }
8393 /* Use wi::shwi to ensure that the constant is
8394 truncated according to the mode of OP1, then sign extended
8395 to a HOST_WIDE_INT. Using the constant directly can result
8396 in non-canonical RTL in a 64x32 cross compile. */
8397 wc = TREE_INT_CST_LOW (treeop1);
8398 constant_part
8399 = immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8400 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8401 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8402 op0 = force_operand (op0, target);
8403 return REDUCE_BIT_FIELD (op0);
8404 }
8405 }
8406
8407 /* Use TER to expand pointer addition of a negated value
8408 as pointer subtraction. */
8409 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8410 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8411 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8412 && TREE_CODE (treeop1) == SSA_NAME
8413 && TYPE_MODE (TREE_TYPE (treeop0))
8414 == TYPE_MODE (TREE_TYPE (treeop1)))
8415 {
8416 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8417 if (def)
8418 {
8419 treeop1 = gimple_assign_rhs1 (def);
8420 code = MINUS_EXPR;
8421 goto do_minus;
8422 }
8423 }
8424
8425 /* No sense saving up arithmetic to be done
8426 if it's all in the wrong mode to form part of an address.
8427 And force_operand won't know whether to sign-extend or
8428 zero-extend. */
8429 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8430 || mode != ptr_mode)
8431 {
8432 expand_operands (treeop0, treeop1,
8433 subtarget, &op0, &op1, EXPAND_NORMAL);
8434 if (op0 == const0_rtx)
8435 return op1;
8436 if (op1 == const0_rtx)
8437 return op0;
8438 goto binop2;
8439 }
8440
8441 expand_operands (treeop0, treeop1,
8442 subtarget, &op0, &op1, modifier);
8443 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8444
8445 case MINUS_EXPR:
8446 do_minus:
8447 /* For initializers, we are allowed to return a MINUS of two
8448 symbolic constants. Here we handle all cases when both operands
8449 are constant. */
8450 /* Handle difference of two symbolic constants,
8451 for the sake of an initializer. */
8452 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8453 && really_constant_p (treeop0)
8454 && really_constant_p (treeop1))
8455 {
8456 expand_operands (treeop0, treeop1,
8457 NULL_RTX, &op0, &op1, modifier);
8458
8459 /* If the last operand is a CONST_INT, use plus_constant of
8460 the negated constant. Else make the MINUS. */
8461 if (CONST_INT_P (op1))
8462 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8463 -INTVAL (op1)));
8464 else
8465 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8466 }
8467
8468 /* No sense saving up arithmetic to be done
8469 if it's all in the wrong mode to form part of an address.
8470 And force_operand won't know whether to sign-extend or
8471 zero-extend. */
8472 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8473 || mode != ptr_mode)
8474 goto binop;
8475
8476 expand_operands (treeop0, treeop1,
8477 subtarget, &op0, &op1, modifier);
8478
8479 /* Convert A - const to A + (-const). */
8480 if (CONST_INT_P (op1))
8481 {
8482 op1 = negate_rtx (mode, op1);
8483 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8484 }
8485
8486 goto binop2;
8487
8488 case WIDEN_MULT_PLUS_EXPR:
8489 case WIDEN_MULT_MINUS_EXPR:
8490 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8491 op2 = expand_normal (treeop2);
8492 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8493 target, unsignedp);
8494 return target;
8495
8496 case WIDEN_MULT_EXPR:
8497 /* If first operand is constant, swap them.
8498 Thus the following special case checks need only
8499 check the second operand. */
8500 if (TREE_CODE (treeop0) == INTEGER_CST)
8501 {
8502 tree t1 = treeop0;
8503 treeop0 = treeop1;
8504 treeop1 = t1;
8505 }
8506
8507 /* First, check if we have a multiplication of one signed and one
8508 unsigned operand. */
8509 if (TREE_CODE (treeop1) != INTEGER_CST
8510 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8511 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8512 {
8513 machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8514 this_optab = usmul_widen_optab;
8515 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8516 != CODE_FOR_nothing)
8517 {
8518 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8519 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8520 EXPAND_NORMAL);
8521 else
8522 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8523 EXPAND_NORMAL);
8524 /* op0 and op1 might still be constant, despite the above
8525 != INTEGER_CST check. Handle it. */
8526 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8527 {
8528 op0 = convert_modes (innermode, mode, op0, true);
8529 op1 = convert_modes (innermode, mode, op1, false);
8530 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8531 target, unsignedp));
8532 }
8533 goto binop3;
8534 }
8535 }
8536 /* Check for a multiplication with matching signedness. */
8537 else if ((TREE_CODE (treeop1) == INTEGER_CST
8538 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8539 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8540 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8541 {
8542 tree op0type = TREE_TYPE (treeop0);
8543 machine_mode innermode = TYPE_MODE (op0type);
8544 bool zextend_p = TYPE_UNSIGNED (op0type);
8545 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8546 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8547
8548 if (TREE_CODE (treeop0) != INTEGER_CST)
8549 {
8550 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8551 != CODE_FOR_nothing)
8552 {
8553 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8554 EXPAND_NORMAL);
8555 /* op0 and op1 might still be constant, despite the above
8556 != INTEGER_CST check. Handle it. */
8557 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8558 {
8559 widen_mult_const:
8560 op0 = convert_modes (innermode, mode, op0, zextend_p);
8561 op1
8562 = convert_modes (innermode, mode, op1,
8563 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8564 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8565 target,
8566 unsignedp));
8567 }
8568 temp = expand_widening_mult (mode, op0, op1, target,
8569 unsignedp, this_optab);
8570 return REDUCE_BIT_FIELD (temp);
8571 }
8572 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8573 != CODE_FOR_nothing
8574 && innermode == word_mode)
8575 {
8576 rtx htem, hipart;
8577 op0 = expand_normal (treeop0);
8578 if (TREE_CODE (treeop1) == INTEGER_CST)
8579 op1 = convert_modes (innermode, mode,
8580 expand_normal (treeop1),
8581 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8582 else
8583 op1 = expand_normal (treeop1);
8584 /* op0 and op1 might still be constant, despite the above
8585 != INTEGER_CST check. Handle it. */
8586 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8587 goto widen_mult_const;
8588 temp = expand_binop (mode, other_optab, op0, op1, target,
8589 unsignedp, OPTAB_LIB_WIDEN);
8590 hipart = gen_highpart (innermode, temp);
8591 htem = expand_mult_highpart_adjust (innermode, hipart,
8592 op0, op1, hipart,
8593 zextend_p);
8594 if (htem != hipart)
8595 emit_move_insn (hipart, htem);
8596 return REDUCE_BIT_FIELD (temp);
8597 }
8598 }
8599 }
8600 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8601 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8602 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8603 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8604
8605 case FMA_EXPR:
8606 {
8607 optab opt = fma_optab;
8608 gimple def0, def2;
8609
8610 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8611 call. */
8612 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8613 {
8614 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8615 tree call_expr;
8616
8617 gcc_assert (fn != NULL_TREE);
8618 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8619 return expand_builtin (call_expr, target, subtarget, mode, false);
8620 }
8621
8622 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8623 /* The multiplication is commutative - look at its 2nd operand
8624 if the first isn't fed by a negate. */
8625 if (!def0)
8626 {
8627 def0 = get_def_for_expr (treeop1, NEGATE_EXPR);
8628 /* Swap operands if the 2nd operand is fed by a negate. */
8629 if (def0)
8630 {
8631 tree tem = treeop0;
8632 treeop0 = treeop1;
8633 treeop1 = tem;
8634 }
8635 }
8636 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8637
8638 op0 = op2 = NULL;
8639
8640 if (def0 && def2
8641 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8642 {
8643 opt = fnms_optab;
8644 op0 = expand_normal (gimple_assign_rhs1 (def0));
8645 op2 = expand_normal (gimple_assign_rhs1 (def2));
8646 }
8647 else if (def0
8648 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8649 {
8650 opt = fnma_optab;
8651 op0 = expand_normal (gimple_assign_rhs1 (def0));
8652 }
8653 else if (def2
8654 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8655 {
8656 opt = fms_optab;
8657 op2 = expand_normal (gimple_assign_rhs1 (def2));
8658 }
8659
8660 if (op0 == NULL)
8661 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8662 if (op2 == NULL)
8663 op2 = expand_normal (treeop2);
8664 op1 = expand_normal (treeop1);
8665
8666 return expand_ternary_op (TYPE_MODE (type), opt,
8667 op0, op1, op2, target, 0);
8668 }
8669
8670 case MULT_EXPR:
8671 /* If this is a fixed-point operation, then we cannot use the code
8672 below because "expand_mult" doesn't support sat/no-sat fixed-point
8673 multiplications. */
8674 if (ALL_FIXED_POINT_MODE_P (mode))
8675 goto binop;
8676
8677 /* If first operand is constant, swap them.
8678 Thus the following special case checks need only
8679 check the second operand. */
8680 if (TREE_CODE (treeop0) == INTEGER_CST)
8681 {
8682 tree t1 = treeop0;
8683 treeop0 = treeop1;
8684 treeop1 = t1;
8685 }
8686
8687 /* Attempt to return something suitable for generating an
8688 indexed address, for machines that support that. */
8689
8690 if (modifier == EXPAND_SUM && mode == ptr_mode
8691 && tree_fits_shwi_p (treeop1))
8692 {
8693 tree exp1 = treeop1;
8694
8695 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8696 EXPAND_SUM);
8697
8698 if (!REG_P (op0))
8699 op0 = force_operand (op0, NULL_RTX);
8700 if (!REG_P (op0))
8701 op0 = copy_to_mode_reg (mode, op0);
8702
8703 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8704 gen_int_mode (tree_to_shwi (exp1),
8705 TYPE_MODE (TREE_TYPE (exp1)))));
8706 }
8707
8708 if (modifier == EXPAND_STACK_PARM)
8709 target = 0;
8710
8711 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8712 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8713
8714 case TRUNC_DIV_EXPR:
8715 case FLOOR_DIV_EXPR:
8716 case CEIL_DIV_EXPR:
8717 case ROUND_DIV_EXPR:
8718 case EXACT_DIV_EXPR:
8719 /* If this is a fixed-point operation, then we cannot use the code
8720 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8721 divisions. */
8722 if (ALL_FIXED_POINT_MODE_P (mode))
8723 goto binop;
8724
8725 if (modifier == EXPAND_STACK_PARM)
8726 target = 0;
8727 /* Possible optimization: compute the dividend with EXPAND_SUM
8728 then if the divisor is constant can optimize the case
8729 where some terms of the dividend have coeffs divisible by it. */
8730 expand_operands (treeop0, treeop1,
8731 subtarget, &op0, &op1, EXPAND_NORMAL);
8732 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8733
8734 case RDIV_EXPR:
8735 goto binop;
8736
8737 case MULT_HIGHPART_EXPR:
8738 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8739 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8740 gcc_assert (temp);
8741 return temp;
8742
8743 case TRUNC_MOD_EXPR:
8744 case FLOOR_MOD_EXPR:
8745 case CEIL_MOD_EXPR:
8746 case ROUND_MOD_EXPR:
8747 if (modifier == EXPAND_STACK_PARM)
8748 target = 0;
8749 expand_operands (treeop0, treeop1,
8750 subtarget, &op0, &op1, EXPAND_NORMAL);
8751 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8752
8753 case FIXED_CONVERT_EXPR:
8754 op0 = expand_normal (treeop0);
8755 if (target == 0 || modifier == EXPAND_STACK_PARM)
8756 target = gen_reg_rtx (mode);
8757
8758 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8759 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8760 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8761 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8762 else
8763 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8764 return target;
8765
8766 case FIX_TRUNC_EXPR:
8767 op0 = expand_normal (treeop0);
8768 if (target == 0 || modifier == EXPAND_STACK_PARM)
8769 target = gen_reg_rtx (mode);
8770 expand_fix (target, op0, unsignedp);
8771 return target;
8772
8773 case FLOAT_EXPR:
8774 op0 = expand_normal (treeop0);
8775 if (target == 0 || modifier == EXPAND_STACK_PARM)
8776 target = gen_reg_rtx (mode);
8777 /* expand_float can't figure out what to do if FROM has VOIDmode.
8778 So give it the correct mode. With -O, cse will optimize this. */
8779 if (GET_MODE (op0) == VOIDmode)
8780 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8781 op0);
8782 expand_float (target, op0,
8783 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8784 return target;
8785
8786 case NEGATE_EXPR:
8787 op0 = expand_expr (treeop0, subtarget,
8788 VOIDmode, EXPAND_NORMAL);
8789 if (modifier == EXPAND_STACK_PARM)
8790 target = 0;
8791 temp = expand_unop (mode,
8792 optab_for_tree_code (NEGATE_EXPR, type,
8793 optab_default),
8794 op0, target, 0);
8795 gcc_assert (temp);
8796 return REDUCE_BIT_FIELD (temp);
8797
8798 case ABS_EXPR:
8799 op0 = expand_expr (treeop0, subtarget,
8800 VOIDmode, EXPAND_NORMAL);
8801 if (modifier == EXPAND_STACK_PARM)
8802 target = 0;
8803
8804 /* ABS_EXPR is not valid for complex arguments. */
8805 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8806 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8807
8808 /* Unsigned abs is simply the operand. Testing here means we don't
8809 risk generating incorrect code below. */
8810 if (TYPE_UNSIGNED (type))
8811 return op0;
8812
8813 return expand_abs (mode, op0, target, unsignedp,
8814 safe_from_p (target, treeop0, 1));
8815
8816 case MAX_EXPR:
8817 case MIN_EXPR:
8818 target = original_target;
8819 if (target == 0
8820 || modifier == EXPAND_STACK_PARM
8821 || (MEM_P (target) && MEM_VOLATILE_P (target))
8822 || GET_MODE (target) != mode
8823 || (REG_P (target)
8824 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8825 target = gen_reg_rtx (mode);
8826 expand_operands (treeop0, treeop1,
8827 target, &op0, &op1, EXPAND_NORMAL);
8828
8829 /* First try to do it with a special MIN or MAX instruction.
8830 If that does not win, use a conditional jump to select the proper
8831 value. */
8832 this_optab = optab_for_tree_code (code, type, optab_default);
8833 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8834 OPTAB_WIDEN);
8835 if (temp != 0)
8836 return temp;
8837
8838 /* At this point, a MEM target is no longer useful; we will get better
8839 code without it. */
8840
8841 if (! REG_P (target))
8842 target = gen_reg_rtx (mode);
8843
8844 /* If op1 was placed in target, swap op0 and op1. */
8845 if (target != op0 && target == op1)
8846 {
8847 temp = op0;
8848 op0 = op1;
8849 op1 = temp;
8850 }
8851
8852 /* We generate better code and avoid problems with op1 mentioning
8853 target by forcing op1 into a pseudo if it isn't a constant. */
8854 if (! CONSTANT_P (op1))
8855 op1 = force_reg (mode, op1);
8856
8857 {
8858 enum rtx_code comparison_code;
8859 rtx cmpop1 = op1;
8860
8861 if (code == MAX_EXPR)
8862 comparison_code = unsignedp ? GEU : GE;
8863 else
8864 comparison_code = unsignedp ? LEU : LE;
8865
8866 /* Canonicalize to comparisons against 0. */
8867 if (op1 == const1_rtx)
8868 {
8869 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8870 or (a != 0 ? a : 1) for unsigned.
8871 For MIN we are safe converting (a <= 1 ? a : 1)
8872 into (a <= 0 ? a : 1) */
8873 cmpop1 = const0_rtx;
8874 if (code == MAX_EXPR)
8875 comparison_code = unsignedp ? NE : GT;
8876 }
8877 if (op1 == constm1_rtx && !unsignedp)
8878 {
8879 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8880 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8881 cmpop1 = const0_rtx;
8882 if (code == MIN_EXPR)
8883 comparison_code = LT;
8884 }
8885 #ifdef HAVE_conditional_move
8886 /* Use a conditional move if possible. */
8887 if (can_conditionally_move_p (mode))
8888 {
8889 rtx insn;
8890
8891 start_sequence ();
8892
8893 /* Try to emit the conditional move. */
8894 insn = emit_conditional_move (target, comparison_code,
8895 op0, cmpop1, mode,
8896 op0, op1, mode,
8897 unsignedp);
8898
8899 /* If we could do the conditional move, emit the sequence,
8900 and return. */
8901 if (insn)
8902 {
8903 rtx_insn *seq = get_insns ();
8904 end_sequence ();
8905 emit_insn (seq);
8906 return target;
8907 }
8908
8909 /* Otherwise discard the sequence and fall back to code with
8910 branches. */
8911 end_sequence ();
8912 }
8913 #endif
8914 if (target != op0)
8915 emit_move_insn (target, op0);
8916
8917 temp = gen_label_rtx ();
8918 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8919 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8920 -1);
8921 }
8922 emit_move_insn (target, op1);
8923 emit_label (temp);
8924 return target;
8925
8926 case BIT_NOT_EXPR:
8927 op0 = expand_expr (treeop0, subtarget,
8928 VOIDmode, EXPAND_NORMAL);
8929 if (modifier == EXPAND_STACK_PARM)
8930 target = 0;
8931 /* In case we have to reduce the result to bitfield precision
8932 for unsigned bitfield expand this as XOR with a proper constant
8933 instead. */
8934 if (reduce_bit_field && TYPE_UNSIGNED (type))
8935 {
8936 wide_int mask = wi::mask (TYPE_PRECISION (type),
8937 false, GET_MODE_PRECISION (mode));
8938
8939 temp = expand_binop (mode, xor_optab, op0,
8940 immed_wide_int_const (mask, mode),
8941 target, 1, OPTAB_LIB_WIDEN);
8942 }
8943 else
8944 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8945 gcc_assert (temp);
8946 return temp;
8947
8948 /* ??? Can optimize bitwise operations with one arg constant.
8949 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8950 and (a bitwise1 b) bitwise2 b (etc)
8951 but that is probably not worth while. */
8952
8953 case BIT_AND_EXPR:
8954 case BIT_IOR_EXPR:
8955 case BIT_XOR_EXPR:
8956 goto binop;
8957
8958 case LROTATE_EXPR:
8959 case RROTATE_EXPR:
8960 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8961 || (GET_MODE_PRECISION (TYPE_MODE (type))
8962 == TYPE_PRECISION (type)));
8963 /* fall through */
8964
8965 case LSHIFT_EXPR:
8966 case RSHIFT_EXPR:
8967 /* If this is a fixed-point operation, then we cannot use the code
8968 below because "expand_shift" doesn't support sat/no-sat fixed-point
8969 shifts. */
8970 if (ALL_FIXED_POINT_MODE_P (mode))
8971 goto binop;
8972
8973 if (! safe_from_p (subtarget, treeop1, 1))
8974 subtarget = 0;
8975 if (modifier == EXPAND_STACK_PARM)
8976 target = 0;
8977 op0 = expand_expr (treeop0, subtarget,
8978 VOIDmode, EXPAND_NORMAL);
8979 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8980 unsignedp);
8981 if (code == LSHIFT_EXPR)
8982 temp = REDUCE_BIT_FIELD (temp);
8983 return temp;
8984
8985 /* Could determine the answer when only additive constants differ. Also,
8986 the addition of one can be handled by changing the condition. */
8987 case LT_EXPR:
8988 case LE_EXPR:
8989 case GT_EXPR:
8990 case GE_EXPR:
8991 case EQ_EXPR:
8992 case NE_EXPR:
8993 case UNORDERED_EXPR:
8994 case ORDERED_EXPR:
8995 case UNLT_EXPR:
8996 case UNLE_EXPR:
8997 case UNGT_EXPR:
8998 case UNGE_EXPR:
8999 case UNEQ_EXPR:
9000 case LTGT_EXPR:
9001 temp = do_store_flag (ops,
9002 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
9003 tmode != VOIDmode ? tmode : mode);
9004 if (temp)
9005 return temp;
9006
9007 /* Use a compare and a jump for BLKmode comparisons, or for function
9008 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
9009
9010 if ((target == 0
9011 || modifier == EXPAND_STACK_PARM
9012 || ! safe_from_p (target, treeop0, 1)
9013 || ! safe_from_p (target, treeop1, 1)
9014 /* Make sure we don't have a hard reg (such as function's return
9015 value) live across basic blocks, if not optimizing. */
9016 || (!optimize && REG_P (target)
9017 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9018 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9019
9020 emit_move_insn (target, const0_rtx);
9021
9022 op1 = gen_label_rtx ();
9023 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
9024
9025 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
9026 emit_move_insn (target, constm1_rtx);
9027 else
9028 emit_move_insn (target, const1_rtx);
9029
9030 emit_label (op1);
9031 return target;
9032
9033 case COMPLEX_EXPR:
9034 /* Get the rtx code of the operands. */
9035 op0 = expand_normal (treeop0);
9036 op1 = expand_normal (treeop1);
9037
9038 if (!target)
9039 target = gen_reg_rtx (TYPE_MODE (type));
9040 else
9041 /* If target overlaps with op1, then either we need to force
9042 op1 into a pseudo (if target also overlaps with op0),
9043 or write the complex parts in reverse order. */
9044 switch (GET_CODE (target))
9045 {
9046 case CONCAT:
9047 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
9048 {
9049 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
9050 {
9051 complex_expr_force_op1:
9052 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
9053 emit_move_insn (temp, op1);
9054 op1 = temp;
9055 break;
9056 }
9057 complex_expr_swap_order:
9058 /* Move the imaginary (op1) and real (op0) parts to their
9059 location. */
9060 write_complex_part (target, op1, true);
9061 write_complex_part (target, op0, false);
9062
9063 return target;
9064 }
9065 break;
9066 case MEM:
9067 temp = adjust_address_nv (target,
9068 GET_MODE_INNER (GET_MODE (target)), 0);
9069 if (reg_overlap_mentioned_p (temp, op1))
9070 {
9071 machine_mode imode = GET_MODE_INNER (GET_MODE (target));
9072 temp = adjust_address_nv (target, imode,
9073 GET_MODE_SIZE (imode));
9074 if (reg_overlap_mentioned_p (temp, op0))
9075 goto complex_expr_force_op1;
9076 goto complex_expr_swap_order;
9077 }
9078 break;
9079 default:
9080 if (reg_overlap_mentioned_p (target, op1))
9081 {
9082 if (reg_overlap_mentioned_p (target, op0))
9083 goto complex_expr_force_op1;
9084 goto complex_expr_swap_order;
9085 }
9086 break;
9087 }
9088
9089 /* Move the real (op0) and imaginary (op1) parts to their location. */
9090 write_complex_part (target, op0, false);
9091 write_complex_part (target, op1, true);
9092
9093 return target;
9094
9095 case WIDEN_SUM_EXPR:
9096 {
9097 tree oprnd0 = treeop0;
9098 tree oprnd1 = treeop1;
9099
9100 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9101 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9102 target, unsignedp);
9103 return target;
9104 }
9105
9106 case REDUC_MAX_EXPR:
9107 case REDUC_MIN_EXPR:
9108 case REDUC_PLUS_EXPR:
9109 {
9110 op0 = expand_normal (treeop0);
9111 this_optab = optab_for_tree_code (code, type, optab_default);
9112 machine_mode vec_mode = TYPE_MODE (TREE_TYPE (treeop0));
9113
9114 if (optab_handler (this_optab, vec_mode) != CODE_FOR_nothing)
9115 {
9116 struct expand_operand ops[2];
9117 enum insn_code icode = optab_handler (this_optab, vec_mode);
9118
9119 create_output_operand (&ops[0], target, mode);
9120 create_input_operand (&ops[1], op0, vec_mode);
9121 if (maybe_expand_insn (icode, 2, ops))
9122 {
9123 target = ops[0].value;
9124 if (GET_MODE (target) != mode)
9125 return gen_lowpart (tmode, target);
9126 return target;
9127 }
9128 }
9129 /* Fall back to optab with vector result, and then extract scalar. */
9130 this_optab = scalar_reduc_to_vector (this_optab, type);
9131 temp = expand_unop (vec_mode, this_optab, op0, NULL_RTX, unsignedp);
9132 gcc_assert (temp);
9133 /* The tree code produces a scalar result, but (somewhat by convention)
9134 the optab produces a vector with the result in element 0 if
9135 little-endian, or element N-1 if big-endian. So pull the scalar
9136 result out of that element. */
9137 int index = BYTES_BIG_ENDIAN ? GET_MODE_NUNITS (vec_mode) - 1 : 0;
9138 int bitsize = GET_MODE_BITSIZE (GET_MODE_INNER (vec_mode));
9139 temp = extract_bit_field (temp, bitsize, bitsize * index, unsignedp,
9140 target, mode, mode);
9141 gcc_assert (temp);
9142 return temp;
9143 }
9144
9145 case VEC_RSHIFT_EXPR:
9146 {
9147 target = expand_vec_shift_expr (ops, target);
9148 return target;
9149 }
9150
9151 case VEC_UNPACK_HI_EXPR:
9152 case VEC_UNPACK_LO_EXPR:
9153 {
9154 op0 = expand_normal (treeop0);
9155 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9156 target, unsignedp);
9157 gcc_assert (temp);
9158 return temp;
9159 }
9160
9161 case VEC_UNPACK_FLOAT_HI_EXPR:
9162 case VEC_UNPACK_FLOAT_LO_EXPR:
9163 {
9164 op0 = expand_normal (treeop0);
9165 /* The signedness is determined from input operand. */
9166 temp = expand_widen_pattern_expr
9167 (ops, op0, NULL_RTX, NULL_RTX,
9168 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9169
9170 gcc_assert (temp);
9171 return temp;
9172 }
9173
9174 case VEC_WIDEN_MULT_HI_EXPR:
9175 case VEC_WIDEN_MULT_LO_EXPR:
9176 case VEC_WIDEN_MULT_EVEN_EXPR:
9177 case VEC_WIDEN_MULT_ODD_EXPR:
9178 case VEC_WIDEN_LSHIFT_HI_EXPR:
9179 case VEC_WIDEN_LSHIFT_LO_EXPR:
9180 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9181 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9182 target, unsignedp);
9183 gcc_assert (target);
9184 return target;
9185
9186 case VEC_PACK_TRUNC_EXPR:
9187 case VEC_PACK_SAT_EXPR:
9188 case VEC_PACK_FIX_TRUNC_EXPR:
9189 mode = TYPE_MODE (TREE_TYPE (treeop0));
9190 goto binop;
9191
9192 case VEC_PERM_EXPR:
9193 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9194 op2 = expand_normal (treeop2);
9195
9196 /* Careful here: if the target doesn't support integral vector modes,
9197 a constant selection vector could wind up smooshed into a normal
9198 integral constant. */
9199 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9200 {
9201 tree sel_type = TREE_TYPE (treeop2);
9202 machine_mode vmode
9203 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9204 TYPE_VECTOR_SUBPARTS (sel_type));
9205 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9206 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9207 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9208 }
9209 else
9210 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9211
9212 temp = expand_vec_perm (mode, op0, op1, op2, target);
9213 gcc_assert (temp);
9214 return temp;
9215
9216 case DOT_PROD_EXPR:
9217 {
9218 tree oprnd0 = treeop0;
9219 tree oprnd1 = treeop1;
9220 tree oprnd2 = treeop2;
9221 rtx op2;
9222
9223 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9224 op2 = expand_normal (oprnd2);
9225 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9226 target, unsignedp);
9227 return target;
9228 }
9229
9230 case SAD_EXPR:
9231 {
9232 tree oprnd0 = treeop0;
9233 tree oprnd1 = treeop1;
9234 tree oprnd2 = treeop2;
9235 rtx op2;
9236
9237 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9238 op2 = expand_normal (oprnd2);
9239 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9240 target, unsignedp);
9241 return target;
9242 }
9243
9244 case REALIGN_LOAD_EXPR:
9245 {
9246 tree oprnd0 = treeop0;
9247 tree oprnd1 = treeop1;
9248 tree oprnd2 = treeop2;
9249 rtx op2;
9250
9251 this_optab = optab_for_tree_code (code, type, optab_default);
9252 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9253 op2 = expand_normal (oprnd2);
9254 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9255 target, unsignedp);
9256 gcc_assert (temp);
9257 return temp;
9258 }
9259
9260 case COND_EXPR:
9261 /* A COND_EXPR with its type being VOID_TYPE represents a
9262 conditional jump and is handled in
9263 expand_gimple_cond_expr. */
9264 gcc_assert (!VOID_TYPE_P (type));
9265
9266 /* Note that COND_EXPRs whose type is a structure or union
9267 are required to be constructed to contain assignments of
9268 a temporary variable, so that we can evaluate them here
9269 for side effect only. If type is void, we must do likewise. */
9270
9271 gcc_assert (!TREE_ADDRESSABLE (type)
9272 && !ignore
9273 && TREE_TYPE (treeop1) != void_type_node
9274 && TREE_TYPE (treeop2) != void_type_node);
9275
9276 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9277 if (temp)
9278 return temp;
9279
9280 /* If we are not to produce a result, we have no target. Otherwise,
9281 if a target was specified use it; it will not be used as an
9282 intermediate target unless it is safe. If no target, use a
9283 temporary. */
9284
9285 if (modifier != EXPAND_STACK_PARM
9286 && original_target
9287 && safe_from_p (original_target, treeop0, 1)
9288 && GET_MODE (original_target) == mode
9289 && !MEM_P (original_target))
9290 temp = original_target;
9291 else
9292 temp = assign_temp (type, 0, 1);
9293
9294 do_pending_stack_adjust ();
9295 NO_DEFER_POP;
9296 op0 = gen_label_rtx ();
9297 op1 = gen_label_rtx ();
9298 jumpifnot (treeop0, op0, -1);
9299 store_expr (treeop1, temp,
9300 modifier == EXPAND_STACK_PARM,
9301 false);
9302
9303 emit_jump_insn (gen_jump (op1));
9304 emit_barrier ();
9305 emit_label (op0);
9306 store_expr (treeop2, temp,
9307 modifier == EXPAND_STACK_PARM,
9308 false);
9309
9310 emit_label (op1);
9311 OK_DEFER_POP;
9312 return temp;
9313
9314 case VEC_COND_EXPR:
9315 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9316 return target;
9317
9318 default:
9319 gcc_unreachable ();
9320 }
9321
9322 /* Here to do an ordinary binary operator. */
9323 binop:
9324 expand_operands (treeop0, treeop1,
9325 subtarget, &op0, &op1, EXPAND_NORMAL);
9326 binop2:
9327 this_optab = optab_for_tree_code (code, type, optab_default);
9328 binop3:
9329 if (modifier == EXPAND_STACK_PARM)
9330 target = 0;
9331 temp = expand_binop (mode, this_optab, op0, op1, target,
9332 unsignedp, OPTAB_LIB_WIDEN);
9333 gcc_assert (temp);
9334 /* Bitwise operations do not need bitfield reduction as we expect their
9335 operands being properly truncated. */
9336 if (code == BIT_XOR_EXPR
9337 || code == BIT_AND_EXPR
9338 || code == BIT_IOR_EXPR)
9339 return temp;
9340 return REDUCE_BIT_FIELD (temp);
9341 }
9342 #undef REDUCE_BIT_FIELD
9343
9344
9345 /* Return TRUE if expression STMT is suitable for replacement.
9346 Never consider memory loads as replaceable, because those don't ever lead
9347 into constant expressions. */
9348
9349 static bool
9350 stmt_is_replaceable_p (gimple stmt)
9351 {
9352 if (ssa_is_replaceable_p (stmt))
9353 {
9354 /* Don't move around loads. */
9355 if (!gimple_assign_single_p (stmt)
9356 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9357 return true;
9358 }
9359 return false;
9360 }
9361
9362 rtx
9363 expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
9364 enum expand_modifier modifier, rtx *alt_rtl,
9365 bool inner_reference_p)
9366 {
9367 rtx op0, op1, temp, decl_rtl;
9368 tree type;
9369 int unsignedp;
9370 machine_mode mode;
9371 enum tree_code code = TREE_CODE (exp);
9372 rtx subtarget, original_target;
9373 int ignore;
9374 tree context;
9375 bool reduce_bit_field;
9376 location_t loc = EXPR_LOCATION (exp);
9377 struct separate_ops ops;
9378 tree treeop0, treeop1, treeop2;
9379 tree ssa_name = NULL_TREE;
9380 gimple g;
9381
9382 type = TREE_TYPE (exp);
9383 mode = TYPE_MODE (type);
9384 unsignedp = TYPE_UNSIGNED (type);
9385
9386 treeop0 = treeop1 = treeop2 = NULL_TREE;
9387 if (!VL_EXP_CLASS_P (exp))
9388 switch (TREE_CODE_LENGTH (code))
9389 {
9390 default:
9391 case 3: treeop2 = TREE_OPERAND (exp, 2);
9392 case 2: treeop1 = TREE_OPERAND (exp, 1);
9393 case 1: treeop0 = TREE_OPERAND (exp, 0);
9394 case 0: break;
9395 }
9396 ops.code = code;
9397 ops.type = type;
9398 ops.op0 = treeop0;
9399 ops.op1 = treeop1;
9400 ops.op2 = treeop2;
9401 ops.location = loc;
9402
9403 ignore = (target == const0_rtx
9404 || ((CONVERT_EXPR_CODE_P (code)
9405 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9406 && TREE_CODE (type) == VOID_TYPE));
9407
9408 /* An operation in what may be a bit-field type needs the
9409 result to be reduced to the precision of the bit-field type,
9410 which is narrower than that of the type's mode. */
9411 reduce_bit_field = (!ignore
9412 && INTEGRAL_TYPE_P (type)
9413 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9414
9415 /* If we are going to ignore this result, we need only do something
9416 if there is a side-effect somewhere in the expression. If there
9417 is, short-circuit the most common cases here. Note that we must
9418 not call expand_expr with anything but const0_rtx in case this
9419 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9420
9421 if (ignore)
9422 {
9423 if (! TREE_SIDE_EFFECTS (exp))
9424 return const0_rtx;
9425
9426 /* Ensure we reference a volatile object even if value is ignored, but
9427 don't do this if all we are doing is taking its address. */
9428 if (TREE_THIS_VOLATILE (exp)
9429 && TREE_CODE (exp) != FUNCTION_DECL
9430 && mode != VOIDmode && mode != BLKmode
9431 && modifier != EXPAND_CONST_ADDRESS)
9432 {
9433 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9434 if (MEM_P (temp))
9435 copy_to_reg (temp);
9436 return const0_rtx;
9437 }
9438
9439 if (TREE_CODE_CLASS (code) == tcc_unary
9440 || code == BIT_FIELD_REF
9441 || code == COMPONENT_REF
9442 || code == INDIRECT_REF)
9443 return expand_expr (treeop0, const0_rtx, VOIDmode,
9444 modifier);
9445
9446 else if (TREE_CODE_CLASS (code) == tcc_binary
9447 || TREE_CODE_CLASS (code) == tcc_comparison
9448 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9449 {
9450 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9451 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9452 return const0_rtx;
9453 }
9454
9455 target = 0;
9456 }
9457
9458 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9459 target = 0;
9460
9461 /* Use subtarget as the target for operand 0 of a binary operation. */
9462 subtarget = get_subtarget (target);
9463 original_target = target;
9464
9465 switch (code)
9466 {
9467 case LABEL_DECL:
9468 {
9469 tree function = decl_function_context (exp);
9470
9471 temp = label_rtx (exp);
9472 temp = gen_rtx_LABEL_REF (Pmode, temp);
9473
9474 if (function != current_function_decl
9475 && function != 0)
9476 LABEL_REF_NONLOCAL_P (temp) = 1;
9477
9478 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9479 return temp;
9480 }
9481
9482 case SSA_NAME:
9483 /* ??? ivopts calls expander, without any preparation from
9484 out-of-ssa. So fake instructions as if this was an access to the
9485 base variable. This unnecessarily allocates a pseudo, see how we can
9486 reuse it, if partition base vars have it set already. */
9487 if (!currently_expanding_to_rtl)
9488 {
9489 tree var = SSA_NAME_VAR (exp);
9490 if (var && DECL_RTL_SET_P (var))
9491 return DECL_RTL (var);
9492 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9493 LAST_VIRTUAL_REGISTER + 1);
9494 }
9495
9496 g = get_gimple_for_ssa_name (exp);
9497 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9498 if (g == NULL
9499 && modifier == EXPAND_INITIALIZER
9500 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9501 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9502 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9503 g = SSA_NAME_DEF_STMT (exp);
9504 if (g)
9505 {
9506 rtx r;
9507 ops.code = gimple_assign_rhs_code (g);
9508 switch (get_gimple_rhs_class (ops.code))
9509 {
9510 case GIMPLE_TERNARY_RHS:
9511 ops.op2 = gimple_assign_rhs3 (g);
9512 /* Fallthru */
9513 case GIMPLE_BINARY_RHS:
9514 ops.op1 = gimple_assign_rhs2 (g);
9515 /* Fallthru */
9516 case GIMPLE_UNARY_RHS:
9517 ops.op0 = gimple_assign_rhs1 (g);
9518 ops.type = TREE_TYPE (gimple_assign_lhs (g));
9519 ops.location = gimple_location (g);
9520 r = expand_expr_real_2 (&ops, target, tmode, modifier);
9521 break;
9522 case GIMPLE_SINGLE_RHS:
9523 {
9524 location_t saved_loc = curr_insn_location ();
9525 set_curr_insn_location (gimple_location (g));
9526 r = expand_expr_real (gimple_assign_rhs1 (g), target,
9527 tmode, modifier, NULL, inner_reference_p);
9528 set_curr_insn_location (saved_loc);
9529 break;
9530 }
9531 default:
9532 gcc_unreachable ();
9533 }
9534 if (REG_P (r) && !REG_EXPR (r))
9535 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9536 return r;
9537 }
9538
9539 ssa_name = exp;
9540 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9541 exp = SSA_NAME_VAR (ssa_name);
9542 goto expand_decl_rtl;
9543
9544 case PARM_DECL:
9545 case VAR_DECL:
9546 /* If a static var's type was incomplete when the decl was written,
9547 but the type is complete now, lay out the decl now. */
9548 if (DECL_SIZE (exp) == 0
9549 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9550 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9551 layout_decl (exp, 0);
9552
9553 /* ... fall through ... */
9554
9555 case FUNCTION_DECL:
9556 case RESULT_DECL:
9557 decl_rtl = DECL_RTL (exp);
9558 expand_decl_rtl:
9559 gcc_assert (decl_rtl);
9560 decl_rtl = copy_rtx (decl_rtl);
9561 /* Record writes to register variables. */
9562 if (modifier == EXPAND_WRITE
9563 && REG_P (decl_rtl)
9564 && HARD_REGISTER_P (decl_rtl))
9565 add_to_hard_reg_set (&crtl->asm_clobbers,
9566 GET_MODE (decl_rtl), REGNO (decl_rtl));
9567
9568 /* Ensure variable marked as used even if it doesn't go through
9569 a parser. If it hasn't be used yet, write out an external
9570 definition. */
9571 TREE_USED (exp) = 1;
9572
9573 /* Show we haven't gotten RTL for this yet. */
9574 temp = 0;
9575
9576 /* Variables inherited from containing functions should have
9577 been lowered by this point. */
9578 context = decl_function_context (exp);
9579 gcc_assert (SCOPE_FILE_SCOPE_P (context)
9580 || context == current_function_decl
9581 || TREE_STATIC (exp)
9582 || DECL_EXTERNAL (exp)
9583 /* ??? C++ creates functions that are not TREE_STATIC. */
9584 || TREE_CODE (exp) == FUNCTION_DECL);
9585
9586 /* This is the case of an array whose size is to be determined
9587 from its initializer, while the initializer is still being parsed.
9588 ??? We aren't parsing while expanding anymore. */
9589
9590 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9591 temp = validize_mem (decl_rtl);
9592
9593 /* If DECL_RTL is memory, we are in the normal case and the
9594 address is not valid, get the address into a register. */
9595
9596 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9597 {
9598 if (alt_rtl)
9599 *alt_rtl = decl_rtl;
9600 decl_rtl = use_anchored_address (decl_rtl);
9601 if (modifier != EXPAND_CONST_ADDRESS
9602 && modifier != EXPAND_SUM
9603 && !memory_address_addr_space_p (DECL_MODE (exp),
9604 XEXP (decl_rtl, 0),
9605 MEM_ADDR_SPACE (decl_rtl)))
9606 temp = replace_equiv_address (decl_rtl,
9607 copy_rtx (XEXP (decl_rtl, 0)));
9608 }
9609
9610 /* If we got something, return it. But first, set the alignment
9611 if the address is a register. */
9612 if (temp != 0)
9613 {
9614 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9615 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9616
9617 return temp;
9618 }
9619
9620 /* If the mode of DECL_RTL does not match that of the decl,
9621 there are two cases: we are dealing with a BLKmode value
9622 that is returned in a register, or we are dealing with
9623 a promoted value. In the latter case, return a SUBREG
9624 of the wanted mode, but mark it so that we know that it
9625 was already extended. */
9626 if (REG_P (decl_rtl)
9627 && DECL_MODE (exp) != BLKmode
9628 && GET_MODE (decl_rtl) != DECL_MODE (exp))
9629 {
9630 machine_mode pmode;
9631
9632 /* Get the signedness to be used for this variable. Ensure we get
9633 the same mode we got when the variable was declared. */
9634 if (code == SSA_NAME
9635 && (g = SSA_NAME_DEF_STMT (ssa_name))
9636 && gimple_code (g) == GIMPLE_CALL
9637 && !gimple_call_internal_p (g))
9638 pmode = promote_function_mode (type, mode, &unsignedp,
9639 gimple_call_fntype (g),
9640 2);
9641 else
9642 pmode = promote_decl_mode (exp, &unsignedp);
9643 gcc_assert (GET_MODE (decl_rtl) == pmode);
9644
9645 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9646 SUBREG_PROMOTED_VAR_P (temp) = 1;
9647 SUBREG_PROMOTED_SET (temp, unsignedp);
9648 return temp;
9649 }
9650
9651 return decl_rtl;
9652
9653 case INTEGER_CST:
9654 /* Given that TYPE_PRECISION (type) is not always equal to
9655 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9656 the former to the latter according to the signedness of the
9657 type. */
9658 temp = immed_wide_int_const (wide_int::from
9659 (exp,
9660 GET_MODE_PRECISION (TYPE_MODE (type)),
9661 TYPE_SIGN (type)),
9662 TYPE_MODE (type));
9663 return temp;
9664
9665 case VECTOR_CST:
9666 {
9667 tree tmp = NULL_TREE;
9668 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9669 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9670 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9671 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9672 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9673 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9674 return const_vector_from_tree (exp);
9675 if (GET_MODE_CLASS (mode) == MODE_INT)
9676 {
9677 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9678 if (type_for_mode)
9679 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9680 }
9681 if (!tmp)
9682 {
9683 vec<constructor_elt, va_gc> *v;
9684 unsigned i;
9685 vec_alloc (v, VECTOR_CST_NELTS (exp));
9686 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9687 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9688 tmp = build_constructor (type, v);
9689 }
9690 return expand_expr (tmp, ignore ? const0_rtx : target,
9691 tmode, modifier);
9692 }
9693
9694 case CONST_DECL:
9695 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9696
9697 case REAL_CST:
9698 /* If optimized, generate immediate CONST_DOUBLE
9699 which will be turned into memory by reload if necessary.
9700
9701 We used to force a register so that loop.c could see it. But
9702 this does not allow gen_* patterns to perform optimizations with
9703 the constants. It also produces two insns in cases like "x = 1.0;".
9704 On most machines, floating-point constants are not permitted in
9705 many insns, so we'd end up copying it to a register in any case.
9706
9707 Now, we do the copying in expand_binop, if appropriate. */
9708 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9709 TYPE_MODE (TREE_TYPE (exp)));
9710
9711 case FIXED_CST:
9712 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9713 TYPE_MODE (TREE_TYPE (exp)));
9714
9715 case COMPLEX_CST:
9716 /* Handle evaluating a complex constant in a CONCAT target. */
9717 if (original_target && GET_CODE (original_target) == CONCAT)
9718 {
9719 machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9720 rtx rtarg, itarg;
9721
9722 rtarg = XEXP (original_target, 0);
9723 itarg = XEXP (original_target, 1);
9724
9725 /* Move the real and imaginary parts separately. */
9726 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9727 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9728
9729 if (op0 != rtarg)
9730 emit_move_insn (rtarg, op0);
9731 if (op1 != itarg)
9732 emit_move_insn (itarg, op1);
9733
9734 return original_target;
9735 }
9736
9737 /* ... fall through ... */
9738
9739 case STRING_CST:
9740 temp = expand_expr_constant (exp, 1, modifier);
9741
9742 /* temp contains a constant address.
9743 On RISC machines where a constant address isn't valid,
9744 make some insns to get that address into a register. */
9745 if (modifier != EXPAND_CONST_ADDRESS
9746 && modifier != EXPAND_INITIALIZER
9747 && modifier != EXPAND_SUM
9748 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9749 MEM_ADDR_SPACE (temp)))
9750 return replace_equiv_address (temp,
9751 copy_rtx (XEXP (temp, 0)));
9752 return temp;
9753
9754 case SAVE_EXPR:
9755 {
9756 tree val = treeop0;
9757 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
9758 inner_reference_p);
9759
9760 if (!SAVE_EXPR_RESOLVED_P (exp))
9761 {
9762 /* We can indeed still hit this case, typically via builtin
9763 expanders calling save_expr immediately before expanding
9764 something. Assume this means that we only have to deal
9765 with non-BLKmode values. */
9766 gcc_assert (GET_MODE (ret) != BLKmode);
9767
9768 val = build_decl (curr_insn_location (),
9769 VAR_DECL, NULL, TREE_TYPE (exp));
9770 DECL_ARTIFICIAL (val) = 1;
9771 DECL_IGNORED_P (val) = 1;
9772 treeop0 = val;
9773 TREE_OPERAND (exp, 0) = treeop0;
9774 SAVE_EXPR_RESOLVED_P (exp) = 1;
9775
9776 if (!CONSTANT_P (ret))
9777 ret = copy_to_reg (ret);
9778 SET_DECL_RTL (val, ret);
9779 }
9780
9781 return ret;
9782 }
9783
9784
9785 case CONSTRUCTOR:
9786 /* If we don't need the result, just ensure we evaluate any
9787 subexpressions. */
9788 if (ignore)
9789 {
9790 unsigned HOST_WIDE_INT idx;
9791 tree value;
9792
9793 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9794 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9795
9796 return const0_rtx;
9797 }
9798
9799 return expand_constructor (exp, target, modifier, false);
9800
9801 case TARGET_MEM_REF:
9802 {
9803 addr_space_t as
9804 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9805 enum insn_code icode;
9806 unsigned int align;
9807
9808 op0 = addr_for_mem_ref (exp, as, true);
9809 op0 = memory_address_addr_space (mode, op0, as);
9810 temp = gen_rtx_MEM (mode, op0);
9811 set_mem_attributes (temp, exp, 0);
9812 set_mem_addr_space (temp, as);
9813 align = get_object_alignment (exp);
9814 if (modifier != EXPAND_WRITE
9815 && modifier != EXPAND_MEMORY
9816 && mode != BLKmode
9817 && align < GET_MODE_ALIGNMENT (mode)
9818 /* If the target does not have special handling for unaligned
9819 loads of mode then it can use regular moves for them. */
9820 && ((icode = optab_handler (movmisalign_optab, mode))
9821 != CODE_FOR_nothing))
9822 {
9823 struct expand_operand ops[2];
9824
9825 /* We've already validated the memory, and we're creating a
9826 new pseudo destination. The predicates really can't fail,
9827 nor can the generator. */
9828 create_output_operand (&ops[0], NULL_RTX, mode);
9829 create_fixed_operand (&ops[1], temp);
9830 expand_insn (icode, 2, ops);
9831 temp = ops[0].value;
9832 }
9833 return temp;
9834 }
9835
9836 case MEM_REF:
9837 {
9838 addr_space_t as
9839 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9840 machine_mode address_mode;
9841 tree base = TREE_OPERAND (exp, 0);
9842 gimple def_stmt;
9843 enum insn_code icode;
9844 unsigned align;
9845 /* Handle expansion of non-aliased memory with non-BLKmode. That
9846 might end up in a register. */
9847 if (mem_ref_refers_to_non_mem_p (exp))
9848 {
9849 HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr ();
9850 base = TREE_OPERAND (base, 0);
9851 if (offset == 0
9852 && tree_fits_uhwi_p (TYPE_SIZE (type))
9853 && (GET_MODE_BITSIZE (DECL_MODE (base))
9854 == tree_to_uhwi (TYPE_SIZE (type))))
9855 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9856 target, tmode, modifier);
9857 if (TYPE_MODE (type) == BLKmode)
9858 {
9859 temp = assign_stack_temp (DECL_MODE (base),
9860 GET_MODE_SIZE (DECL_MODE (base)));
9861 store_expr (base, temp, 0, false);
9862 temp = adjust_address (temp, BLKmode, offset);
9863 set_mem_size (temp, int_size_in_bytes (type));
9864 return temp;
9865 }
9866 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9867 bitsize_int (offset * BITS_PER_UNIT));
9868 return expand_expr (exp, target, tmode, modifier);
9869 }
9870 address_mode = targetm.addr_space.address_mode (as);
9871 base = TREE_OPERAND (exp, 0);
9872 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9873 {
9874 tree mask = gimple_assign_rhs2 (def_stmt);
9875 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9876 gimple_assign_rhs1 (def_stmt), mask);
9877 TREE_OPERAND (exp, 0) = base;
9878 }
9879 align = get_object_alignment (exp);
9880 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9881 op0 = memory_address_addr_space (mode, op0, as);
9882 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9883 {
9884 rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
9885 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9886 op0 = memory_address_addr_space (mode, op0, as);
9887 }
9888 temp = gen_rtx_MEM (mode, op0);
9889 set_mem_attributes (temp, exp, 0);
9890 set_mem_addr_space (temp, as);
9891 if (TREE_THIS_VOLATILE (exp))
9892 MEM_VOLATILE_P (temp) = 1;
9893 if (modifier != EXPAND_WRITE
9894 && modifier != EXPAND_MEMORY
9895 && !inner_reference_p
9896 && mode != BLKmode
9897 && align < GET_MODE_ALIGNMENT (mode))
9898 {
9899 if ((icode = optab_handler (movmisalign_optab, mode))
9900 != CODE_FOR_nothing)
9901 {
9902 struct expand_operand ops[2];
9903
9904 /* We've already validated the memory, and we're creating a
9905 new pseudo destination. The predicates really can't fail,
9906 nor can the generator. */
9907 create_output_operand (&ops[0], NULL_RTX, mode);
9908 create_fixed_operand (&ops[1], temp);
9909 expand_insn (icode, 2, ops);
9910 temp = ops[0].value;
9911 }
9912 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9913 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9914 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9915 (modifier == EXPAND_STACK_PARM
9916 ? NULL_RTX : target),
9917 mode, mode);
9918 }
9919 return temp;
9920 }
9921
9922 case ARRAY_REF:
9923
9924 {
9925 tree array = treeop0;
9926 tree index = treeop1;
9927 tree init;
9928
9929 /* Fold an expression like: "foo"[2].
9930 This is not done in fold so it won't happen inside &.
9931 Don't fold if this is for wide characters since it's too
9932 difficult to do correctly and this is a very rare case. */
9933
9934 if (modifier != EXPAND_CONST_ADDRESS
9935 && modifier != EXPAND_INITIALIZER
9936 && modifier != EXPAND_MEMORY)
9937 {
9938 tree t = fold_read_from_constant_string (exp);
9939
9940 if (t)
9941 return expand_expr (t, target, tmode, modifier);
9942 }
9943
9944 /* If this is a constant index into a constant array,
9945 just get the value from the array. Handle both the cases when
9946 we have an explicit constructor and when our operand is a variable
9947 that was declared const. */
9948
9949 if (modifier != EXPAND_CONST_ADDRESS
9950 && modifier != EXPAND_INITIALIZER
9951 && modifier != EXPAND_MEMORY
9952 && TREE_CODE (array) == CONSTRUCTOR
9953 && ! TREE_SIDE_EFFECTS (array)
9954 && TREE_CODE (index) == INTEGER_CST)
9955 {
9956 unsigned HOST_WIDE_INT ix;
9957 tree field, value;
9958
9959 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9960 field, value)
9961 if (tree_int_cst_equal (field, index))
9962 {
9963 if (!TREE_SIDE_EFFECTS (value))
9964 return expand_expr (fold (value), target, tmode, modifier);
9965 break;
9966 }
9967 }
9968
9969 else if (optimize >= 1
9970 && modifier != EXPAND_CONST_ADDRESS
9971 && modifier != EXPAND_INITIALIZER
9972 && modifier != EXPAND_MEMORY
9973 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9974 && TREE_CODE (index) == INTEGER_CST
9975 && (TREE_CODE (array) == VAR_DECL
9976 || TREE_CODE (array) == CONST_DECL)
9977 && (init = ctor_for_folding (array)) != error_mark_node)
9978 {
9979 if (init == NULL_TREE)
9980 {
9981 tree value = build_zero_cst (type);
9982 if (TREE_CODE (value) == CONSTRUCTOR)
9983 {
9984 /* If VALUE is a CONSTRUCTOR, this optimization is only
9985 useful if this doesn't store the CONSTRUCTOR into
9986 memory. If it does, it is more efficient to just
9987 load the data from the array directly. */
9988 rtx ret = expand_constructor (value, target,
9989 modifier, true);
9990 if (ret == NULL_RTX)
9991 value = NULL_TREE;
9992 }
9993
9994 if (value)
9995 return expand_expr (value, target, tmode, modifier);
9996 }
9997 else if (TREE_CODE (init) == CONSTRUCTOR)
9998 {
9999 unsigned HOST_WIDE_INT ix;
10000 tree field, value;
10001
10002 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
10003 field, value)
10004 if (tree_int_cst_equal (field, index))
10005 {
10006 if (TREE_SIDE_EFFECTS (value))
10007 break;
10008
10009 if (TREE_CODE (value) == CONSTRUCTOR)
10010 {
10011 /* If VALUE is a CONSTRUCTOR, this
10012 optimization is only useful if
10013 this doesn't store the CONSTRUCTOR
10014 into memory. If it does, it is more
10015 efficient to just load the data from
10016 the array directly. */
10017 rtx ret = expand_constructor (value, target,
10018 modifier, true);
10019 if (ret == NULL_RTX)
10020 break;
10021 }
10022
10023 return
10024 expand_expr (fold (value), target, tmode, modifier);
10025 }
10026 }
10027 else if (TREE_CODE (init) == STRING_CST)
10028 {
10029 tree low_bound = array_ref_low_bound (exp);
10030 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
10031
10032 /* Optimize the special case of a zero lower bound.
10033
10034 We convert the lower bound to sizetype to avoid problems
10035 with constant folding. E.g. suppose the lower bound is
10036 1 and its mode is QI. Without the conversion
10037 (ARRAY + (INDEX - (unsigned char)1))
10038 becomes
10039 (ARRAY + (-(unsigned char)1) + INDEX)
10040 which becomes
10041 (ARRAY + 255 + INDEX). Oops! */
10042 if (!integer_zerop (low_bound))
10043 index1 = size_diffop_loc (loc, index1,
10044 fold_convert_loc (loc, sizetype,
10045 low_bound));
10046
10047 if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
10048 {
10049 tree type = TREE_TYPE (TREE_TYPE (init));
10050 machine_mode mode = TYPE_MODE (type);
10051
10052 if (GET_MODE_CLASS (mode) == MODE_INT
10053 && GET_MODE_SIZE (mode) == 1)
10054 return gen_int_mode (TREE_STRING_POINTER (init)
10055 [TREE_INT_CST_LOW (index1)],
10056 mode);
10057 }
10058 }
10059 }
10060 }
10061 goto normal_inner_ref;
10062
10063 case COMPONENT_REF:
10064 /* If the operand is a CONSTRUCTOR, we can just extract the
10065 appropriate field if it is present. */
10066 if (TREE_CODE (treeop0) == CONSTRUCTOR)
10067 {
10068 unsigned HOST_WIDE_INT idx;
10069 tree field, value;
10070
10071 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
10072 idx, field, value)
10073 if (field == treeop1
10074 /* We can normally use the value of the field in the
10075 CONSTRUCTOR. However, if this is a bitfield in
10076 an integral mode that we can fit in a HOST_WIDE_INT,
10077 we must mask only the number of bits in the bitfield,
10078 since this is done implicitly by the constructor. If
10079 the bitfield does not meet either of those conditions,
10080 we can't do this optimization. */
10081 && (! DECL_BIT_FIELD (field)
10082 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
10083 && (GET_MODE_PRECISION (DECL_MODE (field))
10084 <= HOST_BITS_PER_WIDE_INT))))
10085 {
10086 if (DECL_BIT_FIELD (field)
10087 && modifier == EXPAND_STACK_PARM)
10088 target = 0;
10089 op0 = expand_expr (value, target, tmode, modifier);
10090 if (DECL_BIT_FIELD (field))
10091 {
10092 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
10093 machine_mode imode = TYPE_MODE (TREE_TYPE (field));
10094
10095 if (TYPE_UNSIGNED (TREE_TYPE (field)))
10096 {
10097 op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
10098 imode);
10099 op0 = expand_and (imode, op0, op1, target);
10100 }
10101 else
10102 {
10103 int count = GET_MODE_PRECISION (imode) - bitsize;
10104
10105 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
10106 target, 0);
10107 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
10108 target, 0);
10109 }
10110 }
10111
10112 return op0;
10113 }
10114 }
10115 goto normal_inner_ref;
10116
10117 case BIT_FIELD_REF:
10118 case ARRAY_RANGE_REF:
10119 normal_inner_ref:
10120 {
10121 machine_mode mode1, mode2;
10122 HOST_WIDE_INT bitsize, bitpos;
10123 tree offset;
10124 int volatilep = 0, must_force_mem;
10125 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
10126 &mode1, &unsignedp, &volatilep, true);
10127 rtx orig_op0, memloc;
10128 bool mem_attrs_from_type = false;
10129
10130 /* If we got back the original object, something is wrong. Perhaps
10131 we are evaluating an expression too early. In any event, don't
10132 infinitely recurse. */
10133 gcc_assert (tem != exp);
10134
10135 /* If TEM's type is a union of variable size, pass TARGET to the inner
10136 computation, since it will need a temporary and TARGET is known
10137 to have to do. This occurs in unchecked conversion in Ada. */
10138 orig_op0 = op0
10139 = expand_expr_real (tem,
10140 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10141 && COMPLETE_TYPE_P (TREE_TYPE (tem))
10142 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10143 != INTEGER_CST)
10144 && modifier != EXPAND_STACK_PARM
10145 ? target : NULL_RTX),
10146 VOIDmode,
10147 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10148 NULL, true);
10149
10150 /* If the field has a mode, we want to access it in the
10151 field's mode, not the computed mode.
10152 If a MEM has VOIDmode (external with incomplete type),
10153 use BLKmode for it instead. */
10154 if (MEM_P (op0))
10155 {
10156 if (mode1 != VOIDmode)
10157 op0 = adjust_address (op0, mode1, 0);
10158 else if (GET_MODE (op0) == VOIDmode)
10159 op0 = adjust_address (op0, BLKmode, 0);
10160 }
10161
10162 mode2
10163 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10164
10165 /* If we have either an offset, a BLKmode result, or a reference
10166 outside the underlying object, we must force it to memory.
10167 Such a case can occur in Ada if we have unchecked conversion
10168 of an expression from a scalar type to an aggregate type or
10169 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10170 passed a partially uninitialized object or a view-conversion
10171 to a larger size. */
10172 must_force_mem = (offset
10173 || mode1 == BLKmode
10174 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
10175
10176 /* Handle CONCAT first. */
10177 if (GET_CODE (op0) == CONCAT && !must_force_mem)
10178 {
10179 if (bitpos == 0
10180 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
10181 return op0;
10182 if (bitpos == 0
10183 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10184 && bitsize)
10185 {
10186 op0 = XEXP (op0, 0);
10187 mode2 = GET_MODE (op0);
10188 }
10189 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10190 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10191 && bitpos
10192 && bitsize)
10193 {
10194 op0 = XEXP (op0, 1);
10195 bitpos = 0;
10196 mode2 = GET_MODE (op0);
10197 }
10198 else
10199 /* Otherwise force into memory. */
10200 must_force_mem = 1;
10201 }
10202
10203 /* If this is a constant, put it in a register if it is a legitimate
10204 constant and we don't need a memory reference. */
10205 if (CONSTANT_P (op0)
10206 && mode2 != BLKmode
10207 && targetm.legitimate_constant_p (mode2, op0)
10208 && !must_force_mem)
10209 op0 = force_reg (mode2, op0);
10210
10211 /* Otherwise, if this is a constant, try to force it to the constant
10212 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10213 is a legitimate constant. */
10214 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10215 op0 = validize_mem (memloc);
10216
10217 /* Otherwise, if this is a constant or the object is not in memory
10218 and need be, put it there. */
10219 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10220 {
10221 memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10222 emit_move_insn (memloc, op0);
10223 op0 = memloc;
10224 mem_attrs_from_type = true;
10225 }
10226
10227 if (offset)
10228 {
10229 machine_mode address_mode;
10230 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10231 EXPAND_SUM);
10232
10233 gcc_assert (MEM_P (op0));
10234
10235 address_mode = get_address_mode (op0);
10236 if (GET_MODE (offset_rtx) != address_mode)
10237 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10238
10239 /* See the comment in expand_assignment for the rationale. */
10240 if (mode1 != VOIDmode
10241 && bitpos != 0
10242 && bitsize > 0
10243 && (bitpos % bitsize) == 0
10244 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10245 && MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10246 {
10247 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10248 bitpos = 0;
10249 }
10250
10251 op0 = offset_address (op0, offset_rtx,
10252 highest_pow2_factor (offset));
10253 }
10254
10255 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10256 record its alignment as BIGGEST_ALIGNMENT. */
10257 if (MEM_P (op0) && bitpos == 0 && offset != 0
10258 && is_aligning_offset (offset, tem))
10259 set_mem_align (op0, BIGGEST_ALIGNMENT);
10260
10261 /* Don't forget about volatility even if this is a bitfield. */
10262 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10263 {
10264 if (op0 == orig_op0)
10265 op0 = copy_rtx (op0);
10266
10267 MEM_VOLATILE_P (op0) = 1;
10268 }
10269
10270 /* In cases where an aligned union has an unaligned object
10271 as a field, we might be extracting a BLKmode value from
10272 an integer-mode (e.g., SImode) object. Handle this case
10273 by doing the extract into an object as wide as the field
10274 (which we know to be the width of a basic mode), then
10275 storing into memory, and changing the mode to BLKmode. */
10276 if (mode1 == VOIDmode
10277 || REG_P (op0) || GET_CODE (op0) == SUBREG
10278 || (mode1 != BLKmode && ! direct_load[(int) mode1]
10279 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10280 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10281 && modifier != EXPAND_CONST_ADDRESS
10282 && modifier != EXPAND_INITIALIZER
10283 && modifier != EXPAND_MEMORY)
10284 /* If the bitfield is volatile and the bitsize
10285 is narrower than the access size of the bitfield,
10286 we need to extract bitfields from the access. */
10287 || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10288 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10289 && mode1 != BLKmode
10290 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
10291 /* If the field isn't aligned enough to fetch as a memref,
10292 fetch it as a bit field. */
10293 || (mode1 != BLKmode
10294 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10295 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10296 || (MEM_P (op0)
10297 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10298 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10299 && modifier != EXPAND_MEMORY
10300 && ((modifier == EXPAND_CONST_ADDRESS
10301 || modifier == EXPAND_INITIALIZER)
10302 ? STRICT_ALIGNMENT
10303 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10304 || (bitpos % BITS_PER_UNIT != 0)))
10305 /* If the type and the field are a constant size and the
10306 size of the type isn't the same size as the bitfield,
10307 we must use bitfield operations. */
10308 || (bitsize >= 0
10309 && TYPE_SIZE (TREE_TYPE (exp))
10310 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10311 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10312 bitsize)))
10313 {
10314 machine_mode ext_mode = mode;
10315
10316 if (ext_mode == BLKmode
10317 && ! (target != 0 && MEM_P (op0)
10318 && MEM_P (target)
10319 && bitpos % BITS_PER_UNIT == 0))
10320 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10321
10322 if (ext_mode == BLKmode)
10323 {
10324 if (target == 0)
10325 target = assign_temp (type, 1, 1);
10326
10327 /* ??? Unlike the similar test a few lines below, this one is
10328 very likely obsolete. */
10329 if (bitsize == 0)
10330 return target;
10331
10332 /* In this case, BITPOS must start at a byte boundary and
10333 TARGET, if specified, must be a MEM. */
10334 gcc_assert (MEM_P (op0)
10335 && (!target || MEM_P (target))
10336 && !(bitpos % BITS_PER_UNIT));
10337
10338 emit_block_move (target,
10339 adjust_address (op0, VOIDmode,
10340 bitpos / BITS_PER_UNIT),
10341 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10342 / BITS_PER_UNIT),
10343 (modifier == EXPAND_STACK_PARM
10344 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10345
10346 return target;
10347 }
10348
10349 /* If we have nothing to extract, the result will be 0 for targets
10350 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10351 return 0 for the sake of consistency, as reading a zero-sized
10352 bitfield is valid in Ada and the value is fully specified. */
10353 if (bitsize == 0)
10354 return const0_rtx;
10355
10356 op0 = validize_mem (op0);
10357
10358 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10359 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10360
10361 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10362 (modifier == EXPAND_STACK_PARM
10363 ? NULL_RTX : target),
10364 ext_mode, ext_mode);
10365
10366 /* If the result is a record type and BITSIZE is narrower than
10367 the mode of OP0, an integral mode, and this is a big endian
10368 machine, we must put the field into the high-order bits. */
10369 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10370 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10371 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10372 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10373 GET_MODE_BITSIZE (GET_MODE (op0))
10374 - bitsize, op0, 1);
10375
10376 /* If the result type is BLKmode, store the data into a temporary
10377 of the appropriate type, but with the mode corresponding to the
10378 mode for the data we have (op0's mode). */
10379 if (mode == BLKmode)
10380 {
10381 rtx new_rtx
10382 = assign_stack_temp_for_type (ext_mode,
10383 GET_MODE_BITSIZE (ext_mode),
10384 type);
10385 emit_move_insn (new_rtx, op0);
10386 op0 = copy_rtx (new_rtx);
10387 PUT_MODE (op0, BLKmode);
10388 }
10389
10390 return op0;
10391 }
10392
10393 /* If the result is BLKmode, use that to access the object
10394 now as well. */
10395 if (mode == BLKmode)
10396 mode1 = BLKmode;
10397
10398 /* Get a reference to just this component. */
10399 if (modifier == EXPAND_CONST_ADDRESS
10400 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10401 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10402 else
10403 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10404
10405 if (op0 == orig_op0)
10406 op0 = copy_rtx (op0);
10407
10408 /* If op0 is a temporary because of forcing to memory, pass only the
10409 type to set_mem_attributes so that the original expression is never
10410 marked as ADDRESSABLE through MEM_EXPR of the temporary. */
10411 if (mem_attrs_from_type)
10412 set_mem_attributes (op0, type, 0);
10413 else
10414 set_mem_attributes (op0, exp, 0);
10415
10416 if (REG_P (XEXP (op0, 0)))
10417 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10418
10419 MEM_VOLATILE_P (op0) |= volatilep;
10420 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10421 || modifier == EXPAND_CONST_ADDRESS
10422 || modifier == EXPAND_INITIALIZER)
10423 return op0;
10424
10425 if (target == 0)
10426 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10427
10428 convert_move (target, op0, unsignedp);
10429 return target;
10430 }
10431
10432 case OBJ_TYPE_REF:
10433 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10434
10435 case CALL_EXPR:
10436 /* All valid uses of __builtin_va_arg_pack () are removed during
10437 inlining. */
10438 if (CALL_EXPR_VA_ARG_PACK (exp))
10439 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10440 {
10441 tree fndecl = get_callee_fndecl (exp), attr;
10442
10443 if (fndecl
10444 && (attr = lookup_attribute ("error",
10445 DECL_ATTRIBUTES (fndecl))) != NULL)
10446 error ("%Kcall to %qs declared with attribute error: %s",
10447 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10448 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10449 if (fndecl
10450 && (attr = lookup_attribute ("warning",
10451 DECL_ATTRIBUTES (fndecl))) != NULL)
10452 warning_at (tree_nonartificial_location (exp),
10453 0, "%Kcall to %qs declared with attribute warning: %s",
10454 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10455 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10456
10457 /* Check for a built-in function. */
10458 if (fndecl && DECL_BUILT_IN (fndecl))
10459 {
10460 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10461 return expand_builtin (exp, target, subtarget, tmode, ignore);
10462 }
10463 }
10464 return expand_call (exp, target, ignore);
10465
10466 case VIEW_CONVERT_EXPR:
10467 op0 = NULL_RTX;
10468
10469 /* If we are converting to BLKmode, try to avoid an intermediate
10470 temporary by fetching an inner memory reference. */
10471 if (mode == BLKmode
10472 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10473 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10474 && handled_component_p (treeop0))
10475 {
10476 machine_mode mode1;
10477 HOST_WIDE_INT bitsize, bitpos;
10478 tree offset;
10479 int unsignedp;
10480 int volatilep = 0;
10481 tree tem
10482 = get_inner_reference (treeop0, &bitsize, &bitpos,
10483 &offset, &mode1, &unsignedp, &volatilep,
10484 true);
10485 rtx orig_op0;
10486
10487 /* ??? We should work harder and deal with non-zero offsets. */
10488 if (!offset
10489 && (bitpos % BITS_PER_UNIT) == 0
10490 && bitsize >= 0
10491 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10492 {
10493 /* See the normal_inner_ref case for the rationale. */
10494 orig_op0
10495 = expand_expr_real (tem,
10496 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10497 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10498 != INTEGER_CST)
10499 && modifier != EXPAND_STACK_PARM
10500 ? target : NULL_RTX),
10501 VOIDmode,
10502 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10503 NULL, true);
10504
10505 if (MEM_P (orig_op0))
10506 {
10507 op0 = orig_op0;
10508
10509 /* Get a reference to just this component. */
10510 if (modifier == EXPAND_CONST_ADDRESS
10511 || modifier == EXPAND_SUM
10512 || modifier == EXPAND_INITIALIZER)
10513 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10514 else
10515 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10516
10517 if (op0 == orig_op0)
10518 op0 = copy_rtx (op0);
10519
10520 set_mem_attributes (op0, treeop0, 0);
10521 if (REG_P (XEXP (op0, 0)))
10522 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10523
10524 MEM_VOLATILE_P (op0) |= volatilep;
10525 }
10526 }
10527 }
10528
10529 if (!op0)
10530 op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10531 NULL, inner_reference_p);
10532
10533 /* If the input and output modes are both the same, we are done. */
10534 if (mode == GET_MODE (op0))
10535 ;
10536 /* If neither mode is BLKmode, and both modes are the same size
10537 then we can use gen_lowpart. */
10538 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10539 && (GET_MODE_PRECISION (mode)
10540 == GET_MODE_PRECISION (GET_MODE (op0)))
10541 && !COMPLEX_MODE_P (GET_MODE (op0)))
10542 {
10543 if (GET_CODE (op0) == SUBREG)
10544 op0 = force_reg (GET_MODE (op0), op0);
10545 temp = gen_lowpart_common (mode, op0);
10546 if (temp)
10547 op0 = temp;
10548 else
10549 {
10550 if (!REG_P (op0) && !MEM_P (op0))
10551 op0 = force_reg (GET_MODE (op0), op0);
10552 op0 = gen_lowpart (mode, op0);
10553 }
10554 }
10555 /* If both types are integral, convert from one mode to the other. */
10556 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10557 op0 = convert_modes (mode, GET_MODE (op0), op0,
10558 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10559 /* If the output type is a bit-field type, do an extraction. */
10560 else if (reduce_bit_field)
10561 return extract_bit_field (op0, TYPE_PRECISION (type), 0,
10562 TYPE_UNSIGNED (type), NULL_RTX,
10563 mode, mode);
10564 /* As a last resort, spill op0 to memory, and reload it in a
10565 different mode. */
10566 else if (!MEM_P (op0))
10567 {
10568 /* If the operand is not a MEM, force it into memory. Since we
10569 are going to be changing the mode of the MEM, don't call
10570 force_const_mem for constants because we don't allow pool
10571 constants to change mode. */
10572 tree inner_type = TREE_TYPE (treeop0);
10573
10574 gcc_assert (!TREE_ADDRESSABLE (exp));
10575
10576 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10577 target
10578 = assign_stack_temp_for_type
10579 (TYPE_MODE (inner_type),
10580 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10581
10582 emit_move_insn (target, op0);
10583 op0 = target;
10584 }
10585
10586 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10587 output type is such that the operand is known to be aligned, indicate
10588 that it is. Otherwise, we need only be concerned about alignment for
10589 non-BLKmode results. */
10590 if (MEM_P (op0))
10591 {
10592 enum insn_code icode;
10593
10594 if (TYPE_ALIGN_OK (type))
10595 {
10596 /* ??? Copying the MEM without substantially changing it might
10597 run afoul of the code handling volatile memory references in
10598 store_expr, which assumes that TARGET is returned unmodified
10599 if it has been used. */
10600 op0 = copy_rtx (op0);
10601 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10602 }
10603 else if (modifier != EXPAND_WRITE
10604 && modifier != EXPAND_MEMORY
10605 && !inner_reference_p
10606 && mode != BLKmode
10607 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10608 {
10609 /* If the target does have special handling for unaligned
10610 loads of mode then use them. */
10611 if ((icode = optab_handler (movmisalign_optab, mode))
10612 != CODE_FOR_nothing)
10613 {
10614 rtx reg, insn;
10615
10616 op0 = adjust_address (op0, mode, 0);
10617 /* We've already validated the memory, and we're creating a
10618 new pseudo destination. The predicates really can't
10619 fail. */
10620 reg = gen_reg_rtx (mode);
10621
10622 /* Nor can the insn generator. */
10623 insn = GEN_FCN (icode) (reg, op0);
10624 emit_insn (insn);
10625 return reg;
10626 }
10627 else if (STRICT_ALIGNMENT)
10628 {
10629 tree inner_type = TREE_TYPE (treeop0);
10630 HOST_WIDE_INT temp_size
10631 = MAX (int_size_in_bytes (inner_type),
10632 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10633 rtx new_rtx
10634 = assign_stack_temp_for_type (mode, temp_size, type);
10635 rtx new_with_op0_mode
10636 = adjust_address (new_rtx, GET_MODE (op0), 0);
10637
10638 gcc_assert (!TREE_ADDRESSABLE (exp));
10639
10640 if (GET_MODE (op0) == BLKmode)
10641 emit_block_move (new_with_op0_mode, op0,
10642 GEN_INT (GET_MODE_SIZE (mode)),
10643 (modifier == EXPAND_STACK_PARM
10644 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10645 else
10646 emit_move_insn (new_with_op0_mode, op0);
10647
10648 op0 = new_rtx;
10649 }
10650 }
10651
10652 op0 = adjust_address (op0, mode, 0);
10653 }
10654
10655 return op0;
10656
10657 case MODIFY_EXPR:
10658 {
10659 tree lhs = treeop0;
10660 tree rhs = treeop1;
10661 gcc_assert (ignore);
10662
10663 /* Check for |= or &= of a bitfield of size one into another bitfield
10664 of size 1. In this case, (unless we need the result of the
10665 assignment) we can do this more efficiently with a
10666 test followed by an assignment, if necessary.
10667
10668 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10669 things change so we do, this code should be enhanced to
10670 support it. */
10671 if (TREE_CODE (lhs) == COMPONENT_REF
10672 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10673 || TREE_CODE (rhs) == BIT_AND_EXPR)
10674 && TREE_OPERAND (rhs, 0) == lhs
10675 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10676 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10677 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10678 {
10679 rtx_code_label *label = gen_label_rtx ();
10680 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10681 do_jump (TREE_OPERAND (rhs, 1),
10682 value ? label : 0,
10683 value ? 0 : label, -1);
10684 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10685 false);
10686 do_pending_stack_adjust ();
10687 emit_label (label);
10688 return const0_rtx;
10689 }
10690
10691 expand_assignment (lhs, rhs, false);
10692 return const0_rtx;
10693 }
10694
10695 case ADDR_EXPR:
10696 return expand_expr_addr_expr (exp, target, tmode, modifier);
10697
10698 case REALPART_EXPR:
10699 op0 = expand_normal (treeop0);
10700 return read_complex_part (op0, false);
10701
10702 case IMAGPART_EXPR:
10703 op0 = expand_normal (treeop0);
10704 return read_complex_part (op0, true);
10705
10706 case RETURN_EXPR:
10707 case LABEL_EXPR:
10708 case GOTO_EXPR:
10709 case SWITCH_EXPR:
10710 case ASM_EXPR:
10711 /* Expanded in cfgexpand.c. */
10712 gcc_unreachable ();
10713
10714 case TRY_CATCH_EXPR:
10715 case CATCH_EXPR:
10716 case EH_FILTER_EXPR:
10717 case TRY_FINALLY_EXPR:
10718 /* Lowered by tree-eh.c. */
10719 gcc_unreachable ();
10720
10721 case WITH_CLEANUP_EXPR:
10722 case CLEANUP_POINT_EXPR:
10723 case TARGET_EXPR:
10724 case CASE_LABEL_EXPR:
10725 case VA_ARG_EXPR:
10726 case BIND_EXPR:
10727 case INIT_EXPR:
10728 case CONJ_EXPR:
10729 case COMPOUND_EXPR:
10730 case PREINCREMENT_EXPR:
10731 case PREDECREMENT_EXPR:
10732 case POSTINCREMENT_EXPR:
10733 case POSTDECREMENT_EXPR:
10734 case LOOP_EXPR:
10735 case EXIT_EXPR:
10736 case COMPOUND_LITERAL_EXPR:
10737 /* Lowered by gimplify.c. */
10738 gcc_unreachable ();
10739
10740 case FDESC_EXPR:
10741 /* Function descriptors are not valid except for as
10742 initialization constants, and should not be expanded. */
10743 gcc_unreachable ();
10744
10745 case WITH_SIZE_EXPR:
10746 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10747 have pulled out the size to use in whatever context it needed. */
10748 return expand_expr_real (treeop0, original_target, tmode,
10749 modifier, alt_rtl, inner_reference_p);
10750
10751 default:
10752 return expand_expr_real_2 (&ops, target, tmode, modifier);
10753 }
10754 }
10755 \f
10756 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10757 signedness of TYPE), possibly returning the result in TARGET. */
10758 static rtx
10759 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10760 {
10761 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10762 if (target && GET_MODE (target) != GET_MODE (exp))
10763 target = 0;
10764 /* For constant values, reduce using build_int_cst_type. */
10765 if (CONST_INT_P (exp))
10766 {
10767 HOST_WIDE_INT value = INTVAL (exp);
10768 tree t = build_int_cst_type (type, value);
10769 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10770 }
10771 else if (TYPE_UNSIGNED (type))
10772 {
10773 machine_mode mode = GET_MODE (exp);
10774 rtx mask = immed_wide_int_const
10775 (wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
10776 return expand_and (mode, exp, mask, target);
10777 }
10778 else
10779 {
10780 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10781 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10782 exp, count, target, 0);
10783 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10784 exp, count, target, 0);
10785 }
10786 }
10787 \f
10788 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10789 when applied to the address of EXP produces an address known to be
10790 aligned more than BIGGEST_ALIGNMENT. */
10791
10792 static int
10793 is_aligning_offset (const_tree offset, const_tree exp)
10794 {
10795 /* Strip off any conversions. */
10796 while (CONVERT_EXPR_P (offset))
10797 offset = TREE_OPERAND (offset, 0);
10798
10799 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10800 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10801 if (TREE_CODE (offset) != BIT_AND_EXPR
10802 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
10803 || compare_tree_int (TREE_OPERAND (offset, 1),
10804 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10805 || exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
10806 return 0;
10807
10808 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10809 It must be NEGATE_EXPR. Then strip any more conversions. */
10810 offset = TREE_OPERAND (offset, 0);
10811 while (CONVERT_EXPR_P (offset))
10812 offset = TREE_OPERAND (offset, 0);
10813
10814 if (TREE_CODE (offset) != NEGATE_EXPR)
10815 return 0;
10816
10817 offset = TREE_OPERAND (offset, 0);
10818 while (CONVERT_EXPR_P (offset))
10819 offset = TREE_OPERAND (offset, 0);
10820
10821 /* This must now be the address of EXP. */
10822 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10823 }
10824 \f
10825 /* Return the tree node if an ARG corresponds to a string constant or zero
10826 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10827 in bytes within the string that ARG is accessing. The type of the
10828 offset will be `sizetype'. */
10829
10830 tree
10831 string_constant (tree arg, tree *ptr_offset)
10832 {
10833 tree array, offset, lower_bound;
10834 STRIP_NOPS (arg);
10835
10836 if (TREE_CODE (arg) == ADDR_EXPR)
10837 {
10838 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10839 {
10840 *ptr_offset = size_zero_node;
10841 return TREE_OPERAND (arg, 0);
10842 }
10843 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10844 {
10845 array = TREE_OPERAND (arg, 0);
10846 offset = size_zero_node;
10847 }
10848 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10849 {
10850 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10851 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10852 if (TREE_CODE (array) != STRING_CST
10853 && TREE_CODE (array) != VAR_DECL)
10854 return 0;
10855
10856 /* Check if the array has a nonzero lower bound. */
10857 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10858 if (!integer_zerop (lower_bound))
10859 {
10860 /* If the offset and base aren't both constants, return 0. */
10861 if (TREE_CODE (lower_bound) != INTEGER_CST)
10862 return 0;
10863 if (TREE_CODE (offset) != INTEGER_CST)
10864 return 0;
10865 /* Adjust offset by the lower bound. */
10866 offset = size_diffop (fold_convert (sizetype, offset),
10867 fold_convert (sizetype, lower_bound));
10868 }
10869 }
10870 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10871 {
10872 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10873 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10874 if (TREE_CODE (array) != ADDR_EXPR)
10875 return 0;
10876 array = TREE_OPERAND (array, 0);
10877 if (TREE_CODE (array) != STRING_CST
10878 && TREE_CODE (array) != VAR_DECL)
10879 return 0;
10880 }
10881 else
10882 return 0;
10883 }
10884 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10885 {
10886 tree arg0 = TREE_OPERAND (arg, 0);
10887 tree arg1 = TREE_OPERAND (arg, 1);
10888
10889 STRIP_NOPS (arg0);
10890 STRIP_NOPS (arg1);
10891
10892 if (TREE_CODE (arg0) == ADDR_EXPR
10893 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10894 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10895 {
10896 array = TREE_OPERAND (arg0, 0);
10897 offset = arg1;
10898 }
10899 else if (TREE_CODE (arg1) == ADDR_EXPR
10900 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10901 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10902 {
10903 array = TREE_OPERAND (arg1, 0);
10904 offset = arg0;
10905 }
10906 else
10907 return 0;
10908 }
10909 else
10910 return 0;
10911
10912 if (TREE_CODE (array) == STRING_CST)
10913 {
10914 *ptr_offset = fold_convert (sizetype, offset);
10915 return array;
10916 }
10917 else if (TREE_CODE (array) == VAR_DECL
10918 || TREE_CODE (array) == CONST_DECL)
10919 {
10920 int length;
10921 tree init = ctor_for_folding (array);
10922
10923 /* Variables initialized to string literals can be handled too. */
10924 if (init == error_mark_node
10925 || !init
10926 || TREE_CODE (init) != STRING_CST)
10927 return 0;
10928
10929 /* Avoid const char foo[4] = "abcde"; */
10930 if (DECL_SIZE_UNIT (array) == NULL_TREE
10931 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10932 || (length = TREE_STRING_LENGTH (init)) <= 0
10933 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10934 return 0;
10935
10936 /* If variable is bigger than the string literal, OFFSET must be constant
10937 and inside of the bounds of the string literal. */
10938 offset = fold_convert (sizetype, offset);
10939 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10940 && (! tree_fits_uhwi_p (offset)
10941 || compare_tree_int (offset, length) >= 0))
10942 return 0;
10943
10944 *ptr_offset = offset;
10945 return init;
10946 }
10947
10948 return 0;
10949 }
10950 \f
10951 /* Generate code to calculate OPS, and exploded expression
10952 using a store-flag instruction and return an rtx for the result.
10953 OPS reflects a comparison.
10954
10955 If TARGET is nonzero, store the result there if convenient.
10956
10957 Return zero if there is no suitable set-flag instruction
10958 available on this machine.
10959
10960 Once expand_expr has been called on the arguments of the comparison,
10961 we are committed to doing the store flag, since it is not safe to
10962 re-evaluate the expression. We emit the store-flag insn by calling
10963 emit_store_flag, but only expand the arguments if we have a reason
10964 to believe that emit_store_flag will be successful. If we think that
10965 it will, but it isn't, we have to simulate the store-flag with a
10966 set/jump/set sequence. */
10967
10968 static rtx
10969 do_store_flag (sepops ops, rtx target, machine_mode mode)
10970 {
10971 enum rtx_code code;
10972 tree arg0, arg1, type;
10973 tree tem;
10974 machine_mode operand_mode;
10975 int unsignedp;
10976 rtx op0, op1;
10977 rtx subtarget = target;
10978 location_t loc = ops->location;
10979
10980 arg0 = ops->op0;
10981 arg1 = ops->op1;
10982
10983 /* Don't crash if the comparison was erroneous. */
10984 if (arg0 == error_mark_node || arg1 == error_mark_node)
10985 return const0_rtx;
10986
10987 type = TREE_TYPE (arg0);
10988 operand_mode = TYPE_MODE (type);
10989 unsignedp = TYPE_UNSIGNED (type);
10990
10991 /* We won't bother with BLKmode store-flag operations because it would mean
10992 passing a lot of information to emit_store_flag. */
10993 if (operand_mode == BLKmode)
10994 return 0;
10995
10996 /* We won't bother with store-flag operations involving function pointers
10997 when function pointers must be canonicalized before comparisons. */
10998 #ifdef HAVE_canonicalize_funcptr_for_compare
10999 if (HAVE_canonicalize_funcptr_for_compare
11000 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
11001 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
11002 == FUNCTION_TYPE))
11003 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
11004 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
11005 == FUNCTION_TYPE))))
11006 return 0;
11007 #endif
11008
11009 STRIP_NOPS (arg0);
11010 STRIP_NOPS (arg1);
11011
11012 /* For vector typed comparisons emit code to generate the desired
11013 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
11014 expander for this. */
11015 if (TREE_CODE (ops->type) == VECTOR_TYPE)
11016 {
11017 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
11018 tree if_true = constant_boolean_node (true, ops->type);
11019 tree if_false = constant_boolean_node (false, ops->type);
11020 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
11021 }
11022
11023 /* Get the rtx comparison code to use. We know that EXP is a comparison
11024 operation of some type. Some comparisons against 1 and -1 can be
11025 converted to comparisons with zero. Do so here so that the tests
11026 below will be aware that we have a comparison with zero. These
11027 tests will not catch constants in the first operand, but constants
11028 are rarely passed as the first operand. */
11029
11030 switch (ops->code)
11031 {
11032 case EQ_EXPR:
11033 code = EQ;
11034 break;
11035 case NE_EXPR:
11036 code = NE;
11037 break;
11038 case LT_EXPR:
11039 if (integer_onep (arg1))
11040 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11041 else
11042 code = unsignedp ? LTU : LT;
11043 break;
11044 case LE_EXPR:
11045 if (! unsignedp && integer_all_onesp (arg1))
11046 arg1 = integer_zero_node, code = LT;
11047 else
11048 code = unsignedp ? LEU : LE;
11049 break;
11050 case GT_EXPR:
11051 if (! unsignedp && integer_all_onesp (arg1))
11052 arg1 = integer_zero_node, code = GE;
11053 else
11054 code = unsignedp ? GTU : GT;
11055 break;
11056 case GE_EXPR:
11057 if (integer_onep (arg1))
11058 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11059 else
11060 code = unsignedp ? GEU : GE;
11061 break;
11062
11063 case UNORDERED_EXPR:
11064 code = UNORDERED;
11065 break;
11066 case ORDERED_EXPR:
11067 code = ORDERED;
11068 break;
11069 case UNLT_EXPR:
11070 code = UNLT;
11071 break;
11072 case UNLE_EXPR:
11073 code = UNLE;
11074 break;
11075 case UNGT_EXPR:
11076 code = UNGT;
11077 break;
11078 case UNGE_EXPR:
11079 code = UNGE;
11080 break;
11081 case UNEQ_EXPR:
11082 code = UNEQ;
11083 break;
11084 case LTGT_EXPR:
11085 code = LTGT;
11086 break;
11087
11088 default:
11089 gcc_unreachable ();
11090 }
11091
11092 /* Put a constant second. */
11093 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
11094 || TREE_CODE (arg0) == FIXED_CST)
11095 {
11096 tem = arg0; arg0 = arg1; arg1 = tem;
11097 code = swap_condition (code);
11098 }
11099
11100 /* If this is an equality or inequality test of a single bit, we can
11101 do this by shifting the bit being tested to the low-order bit and
11102 masking the result with the constant 1. If the condition was EQ,
11103 we xor it with 1. This does not require an scc insn and is faster
11104 than an scc insn even if we have it.
11105
11106 The code to make this transformation was moved into fold_single_bit_test,
11107 so we just call into the folder and expand its result. */
11108
11109 if ((code == NE || code == EQ)
11110 && integer_zerop (arg1)
11111 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
11112 {
11113 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
11114 if (srcstmt
11115 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
11116 {
11117 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
11118 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
11119 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
11120 gimple_assign_rhs1 (srcstmt),
11121 gimple_assign_rhs2 (srcstmt));
11122 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
11123 if (temp)
11124 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
11125 }
11126 }
11127
11128 if (! get_subtarget (target)
11129 || GET_MODE (subtarget) != operand_mode)
11130 subtarget = 0;
11131
11132 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
11133
11134 if (target == 0)
11135 target = gen_reg_rtx (mode);
11136
11137 /* Try a cstore if possible. */
11138 return emit_store_flag_force (target, code, op0, op1,
11139 operand_mode, unsignedp,
11140 (TYPE_PRECISION (ops->type) == 1
11141 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11142 }
11143 \f
11144
11145 /* Stubs in case we haven't got a casesi insn. */
11146 #ifndef HAVE_casesi
11147 # define HAVE_casesi 0
11148 # define gen_casesi(a, b, c, d, e) (0)
11149 # define CODE_FOR_casesi CODE_FOR_nothing
11150 #endif
11151
11152 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11153 0 otherwise (i.e. if there is no casesi instruction).
11154
11155 DEFAULT_PROBABILITY is the probability of jumping to the default
11156 label. */
11157 int
11158 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11159 rtx table_label, rtx default_label, rtx fallback_label,
11160 int default_probability)
11161 {
11162 struct expand_operand ops[5];
11163 machine_mode index_mode = SImode;
11164 rtx op1, op2, index;
11165
11166 if (! HAVE_casesi)
11167 return 0;
11168
11169 /* Convert the index to SImode. */
11170 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11171 {
11172 machine_mode omode = TYPE_MODE (index_type);
11173 rtx rangertx = expand_normal (range);
11174
11175 /* We must handle the endpoints in the original mode. */
11176 index_expr = build2 (MINUS_EXPR, index_type,
11177 index_expr, minval);
11178 minval = integer_zero_node;
11179 index = expand_normal (index_expr);
11180 if (default_label)
11181 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11182 omode, 1, default_label,
11183 default_probability);
11184 /* Now we can safely truncate. */
11185 index = convert_to_mode (index_mode, index, 0);
11186 }
11187 else
11188 {
11189 if (TYPE_MODE (index_type) != index_mode)
11190 {
11191 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11192 index_expr = fold_convert (index_type, index_expr);
11193 }
11194
11195 index = expand_normal (index_expr);
11196 }
11197
11198 do_pending_stack_adjust ();
11199
11200 op1 = expand_normal (minval);
11201 op2 = expand_normal (range);
11202
11203 create_input_operand (&ops[0], index, index_mode);
11204 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11205 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11206 create_fixed_operand (&ops[3], table_label);
11207 create_fixed_operand (&ops[4], (default_label
11208 ? default_label
11209 : fallback_label));
11210 expand_jump_insn (CODE_FOR_casesi, 5, ops);
11211 return 1;
11212 }
11213
11214 /* Attempt to generate a tablejump instruction; same concept. */
11215 #ifndef HAVE_tablejump
11216 #define HAVE_tablejump 0
11217 #define gen_tablejump(x, y) (0)
11218 #endif
11219
11220 /* Subroutine of the next function.
11221
11222 INDEX is the value being switched on, with the lowest value
11223 in the table already subtracted.
11224 MODE is its expected mode (needed if INDEX is constant).
11225 RANGE is the length of the jump table.
11226 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11227
11228 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11229 index value is out of range.
11230 DEFAULT_PROBABILITY is the probability of jumping to
11231 the default label. */
11232
11233 static void
11234 do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label,
11235 rtx default_label, int default_probability)
11236 {
11237 rtx temp, vector;
11238
11239 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11240 cfun->cfg->max_jumptable_ents = INTVAL (range);
11241
11242 /* Do an unsigned comparison (in the proper mode) between the index
11243 expression and the value which represents the length of the range.
11244 Since we just finished subtracting the lower bound of the range
11245 from the index expression, this comparison allows us to simultaneously
11246 check that the original index expression value is both greater than
11247 or equal to the minimum value of the range and less than or equal to
11248 the maximum value of the range. */
11249
11250 if (default_label)
11251 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11252 default_label, default_probability);
11253
11254
11255 /* If index is in range, it must fit in Pmode.
11256 Convert to Pmode so we can index with it. */
11257 if (mode != Pmode)
11258 index = convert_to_mode (Pmode, index, 1);
11259
11260 /* Don't let a MEM slip through, because then INDEX that comes
11261 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11262 and break_out_memory_refs will go to work on it and mess it up. */
11263 #ifdef PIC_CASE_VECTOR_ADDRESS
11264 if (flag_pic && !REG_P (index))
11265 index = copy_to_mode_reg (Pmode, index);
11266 #endif
11267
11268 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11269 GET_MODE_SIZE, because this indicates how large insns are. The other
11270 uses should all be Pmode, because they are addresses. This code
11271 could fail if addresses and insns are not the same size. */
11272 index = simplify_gen_binary (MULT, Pmode, index,
11273 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11274 Pmode));
11275 index = simplify_gen_binary (PLUS, Pmode, index,
11276 gen_rtx_LABEL_REF (Pmode, table_label));
11277
11278 #ifdef PIC_CASE_VECTOR_ADDRESS
11279 if (flag_pic)
11280 index = PIC_CASE_VECTOR_ADDRESS (index);
11281 else
11282 #endif
11283 index = memory_address (CASE_VECTOR_MODE, index);
11284 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11285 vector = gen_const_mem (CASE_VECTOR_MODE, index);
11286 convert_move (temp, vector, 0);
11287
11288 emit_jump_insn (gen_tablejump (temp, table_label));
11289
11290 /* If we are generating PIC code or if the table is PC-relative, the
11291 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11292 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11293 emit_barrier ();
11294 }
11295
11296 int
11297 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11298 rtx table_label, rtx default_label, int default_probability)
11299 {
11300 rtx index;
11301
11302 if (! HAVE_tablejump)
11303 return 0;
11304
11305 index_expr = fold_build2 (MINUS_EXPR, index_type,
11306 fold_convert (index_type, index_expr),
11307 fold_convert (index_type, minval));
11308 index = expand_normal (index_expr);
11309 do_pending_stack_adjust ();
11310
11311 do_tablejump (index, TYPE_MODE (index_type),
11312 convert_modes (TYPE_MODE (index_type),
11313 TYPE_MODE (TREE_TYPE (range)),
11314 expand_normal (range),
11315 TYPE_UNSIGNED (TREE_TYPE (range))),
11316 table_label, default_label, default_probability);
11317 return 1;
11318 }
11319
11320 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11321 static rtx
11322 const_vector_from_tree (tree exp)
11323 {
11324 rtvec v;
11325 unsigned i;
11326 int units;
11327 tree elt;
11328 machine_mode inner, mode;
11329
11330 mode = TYPE_MODE (TREE_TYPE (exp));
11331
11332 if (initializer_zerop (exp))
11333 return CONST0_RTX (mode);
11334
11335 units = GET_MODE_NUNITS (mode);
11336 inner = GET_MODE_INNER (mode);
11337
11338 v = rtvec_alloc (units);
11339
11340 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11341 {
11342 elt = VECTOR_CST_ELT (exp, i);
11343
11344 if (TREE_CODE (elt) == REAL_CST)
11345 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11346 inner);
11347 else if (TREE_CODE (elt) == FIXED_CST)
11348 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11349 inner);
11350 else
11351 RTVEC_ELT (v, i) = immed_wide_int_const (elt, inner);
11352 }
11353
11354 return gen_rtx_CONST_VECTOR (mode, v);
11355 }
11356
11357 /* Build a decl for a personality function given a language prefix. */
11358
11359 tree
11360 build_personality_function (const char *lang)
11361 {
11362 const char *unwind_and_version;
11363 tree decl, type;
11364 char *name;
11365
11366 switch (targetm_common.except_unwind_info (&global_options))
11367 {
11368 case UI_NONE:
11369 return NULL;
11370 case UI_SJLJ:
11371 unwind_and_version = "_sj0";
11372 break;
11373 case UI_DWARF2:
11374 case UI_TARGET:
11375 unwind_and_version = "_v0";
11376 break;
11377 case UI_SEH:
11378 unwind_and_version = "_seh0";
11379 break;
11380 default:
11381 gcc_unreachable ();
11382 }
11383
11384 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11385
11386 type = build_function_type_list (integer_type_node, integer_type_node,
11387 long_long_unsigned_type_node,
11388 ptr_type_node, ptr_type_node, NULL_TREE);
11389 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11390 get_identifier (name), type);
11391 DECL_ARTIFICIAL (decl) = 1;
11392 DECL_EXTERNAL (decl) = 1;
11393 TREE_PUBLIC (decl) = 1;
11394
11395 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11396 are the flags assigned by targetm.encode_section_info. */
11397 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11398
11399 return decl;
11400 }
11401
11402 /* Extracts the personality function of DECL and returns the corresponding
11403 libfunc. */
11404
11405 rtx
11406 get_personality_function (tree decl)
11407 {
11408 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11409 enum eh_personality_kind pk;
11410
11411 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11412 if (pk == eh_personality_none)
11413 return NULL;
11414
11415 if (!personality
11416 && pk == eh_personality_any)
11417 personality = lang_hooks.eh_personality ();
11418
11419 if (pk == eh_personality_lang)
11420 gcc_assert (personality != NULL_TREE);
11421
11422 return XEXP (DECL_RTL (personality), 0);
11423 }
11424
11425 #include "gt-expr.h"