calls.c (precompute_arguments): Check promoted_for_signed_and_unsigned_p and set...
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "attribs.h"
30 #include "varasm.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
39 #include "expr.h"
40 #include "optabs.h"
41 #include "libfuncs.h"
42 #include "recog.h"
43 #include "reload.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "langhooks.h"
47 #include "intl.h"
48 #include "tm_p.h"
49 #include "tree-iterator.h"
50 #include "basic-block.h"
51 #include "tree-ssa-alias.h"
52 #include "internal-fn.h"
53 #include "gimple-expr.h"
54 #include "is-a.h"
55 #include "gimple.h"
56 #include "gimple-ssa.h"
57 #include "cgraph.h"
58 #include "tree-ssanames.h"
59 #include "target.h"
60 #include "common/common-target.h"
61 #include "timevar.h"
62 #include "df.h"
63 #include "diagnostic.h"
64 #include "tree-ssa-live.h"
65 #include "tree-outof-ssa.h"
66 #include "target-globals.h"
67 #include "params.h"
68 #include "tree-ssa-address.h"
69 #include "cfgexpand.h"
70 #include "builtins.h"
71 #include "tree-ssa.h"
72
73 #ifndef STACK_PUSH_CODE
74 #ifdef STACK_GROWS_DOWNWARD
75 #define STACK_PUSH_CODE PRE_DEC
76 #else
77 #define STACK_PUSH_CODE PRE_INC
78 #endif
79 #endif
80
81
82 /* If this is nonzero, we do not bother generating VOLATILE
83 around volatile memory references, and we are willing to
84 output indirect addresses. If cse is to follow, we reject
85 indirect addresses so a useful potential cse is generated;
86 if it is used only once, instruction combination will produce
87 the same indirect address eventually. */
88 int cse_not_expected;
89
90 /* This structure is used by move_by_pieces to describe the move to
91 be performed. */
92 struct move_by_pieces_d
93 {
94 rtx to;
95 rtx to_addr;
96 int autinc_to;
97 int explicit_inc_to;
98 rtx from;
99 rtx from_addr;
100 int autinc_from;
101 int explicit_inc_from;
102 unsigned HOST_WIDE_INT len;
103 HOST_WIDE_INT offset;
104 int reverse;
105 };
106
107 /* This structure is used by store_by_pieces to describe the clear to
108 be performed. */
109
110 struct store_by_pieces_d
111 {
112 rtx to;
113 rtx to_addr;
114 int autinc_to;
115 int explicit_inc_to;
116 unsigned HOST_WIDE_INT len;
117 HOST_WIDE_INT offset;
118 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
119 void *constfundata;
120 int reverse;
121 };
122
123 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
124 struct move_by_pieces_d *);
125 static bool block_move_libcall_safe_for_call_parm (void);
126 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
127 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
128 unsigned HOST_WIDE_INT);
129 static tree emit_block_move_libcall_fn (int);
130 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
131 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
132 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
133 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
134 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
135 struct store_by_pieces_d *);
136 static tree clear_storage_libcall_fn (int);
137 static rtx compress_float_constant (rtx, rtx);
138 static rtx get_subtarget (rtx);
139 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
140 HOST_WIDE_INT, enum machine_mode,
141 tree, int, alias_set_type);
142 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
143 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
144 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
145 enum machine_mode, tree, alias_set_type, bool);
146
147 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
148
149 static int is_aligning_offset (const_tree, const_tree);
150 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
151 enum expand_modifier);
152 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
153 static rtx do_store_flag (sepops, rtx, enum machine_mode);
154 #ifdef PUSH_ROUNDING
155 static void emit_single_push_insn (enum machine_mode, rtx, tree);
156 #endif
157 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx, int);
158 static rtx const_vector_from_tree (tree);
159 static void write_complex_part (rtx, rtx, bool);
160
161 /* This macro is used to determine whether move_by_pieces should be called
162 to perform a structure copy. */
163 #ifndef MOVE_BY_PIECES_P
164 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
165 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
166 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
167 #endif
168
169 /* This macro is used to determine whether clear_by_pieces should be
170 called to clear storage. */
171 #ifndef CLEAR_BY_PIECES_P
172 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
173 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
174 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
175 #endif
176
177 /* This macro is used to determine whether store_by_pieces should be
178 called to "memset" storage with byte values other than zero. */
179 #ifndef SET_BY_PIECES_P
180 #define SET_BY_PIECES_P(SIZE, ALIGN) \
181 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
182 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
183 #endif
184
185 /* This macro is used to determine whether store_by_pieces should be
186 called to "memcpy" storage when the source is a constant string. */
187 #ifndef STORE_BY_PIECES_P
188 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
189 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
190 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
191 #endif
192 \f
193 /* This is run to set up which modes can be used
194 directly in memory and to initialize the block move optab. It is run
195 at the beginning of compilation and when the target is reinitialized. */
196
197 void
198 init_expr_target (void)
199 {
200 rtx insn, pat;
201 enum machine_mode mode;
202 int num_clobbers;
203 rtx mem, mem1;
204 rtx reg;
205
206 /* Try indexing by frame ptr and try by stack ptr.
207 It is known that on the Convex the stack ptr isn't a valid index.
208 With luck, one or the other is valid on any machine. */
209 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
210 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
211
212 /* A scratch register we can modify in-place below to avoid
213 useless RTL allocations. */
214 reg = gen_rtx_REG (VOIDmode, -1);
215
216 insn = rtx_alloc (INSN);
217 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
218 PATTERN (insn) = pat;
219
220 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
221 mode = (enum machine_mode) ((int) mode + 1))
222 {
223 int regno;
224
225 direct_load[(int) mode] = direct_store[(int) mode] = 0;
226 PUT_MODE (mem, mode);
227 PUT_MODE (mem1, mode);
228 PUT_MODE (reg, mode);
229
230 /* See if there is some register that can be used in this mode and
231 directly loaded or stored from memory. */
232
233 if (mode != VOIDmode && mode != BLKmode)
234 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
235 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
236 regno++)
237 {
238 if (! HARD_REGNO_MODE_OK (regno, mode))
239 continue;
240
241 SET_REGNO (reg, regno);
242
243 SET_SRC (pat) = mem;
244 SET_DEST (pat) = reg;
245 if (recog (pat, insn, &num_clobbers) >= 0)
246 direct_load[(int) mode] = 1;
247
248 SET_SRC (pat) = mem1;
249 SET_DEST (pat) = reg;
250 if (recog (pat, insn, &num_clobbers) >= 0)
251 direct_load[(int) mode] = 1;
252
253 SET_SRC (pat) = reg;
254 SET_DEST (pat) = mem;
255 if (recog (pat, insn, &num_clobbers) >= 0)
256 direct_store[(int) mode] = 1;
257
258 SET_SRC (pat) = reg;
259 SET_DEST (pat) = mem1;
260 if (recog (pat, insn, &num_clobbers) >= 0)
261 direct_store[(int) mode] = 1;
262 }
263 }
264
265 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
266
267 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
268 mode = GET_MODE_WIDER_MODE (mode))
269 {
270 enum machine_mode srcmode;
271 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
272 srcmode = GET_MODE_WIDER_MODE (srcmode))
273 {
274 enum insn_code ic;
275
276 ic = can_extend_p (mode, srcmode, 0);
277 if (ic == CODE_FOR_nothing)
278 continue;
279
280 PUT_MODE (mem, srcmode);
281
282 if (insn_operand_matches (ic, 1, mem))
283 float_extend_from_mem[mode][srcmode] = true;
284 }
285 }
286 }
287
288 /* This is run at the start of compiling a function. */
289
290 void
291 init_expr (void)
292 {
293 memset (&crtl->expr, 0, sizeof (crtl->expr));
294 }
295 \f
296 /* Copy data from FROM to TO, where the machine modes are not the same.
297 Both modes may be integer, or both may be floating, or both may be
298 fixed-point.
299 UNSIGNEDP should be nonzero if FROM is an unsigned type.
300 This causes zero-extension instead of sign-extension. */
301
302 void
303 convert_move (rtx to, rtx from, int unsignedp)
304 {
305 enum machine_mode to_mode = GET_MODE (to);
306 enum machine_mode from_mode = GET_MODE (from);
307 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
308 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
309 enum insn_code code;
310 rtx libcall;
311
312 /* rtx code for making an equivalent value. */
313 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
314 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
315
316
317 gcc_assert (to_real == from_real);
318 gcc_assert (to_mode != BLKmode);
319 gcc_assert (from_mode != BLKmode);
320
321 /* If the source and destination are already the same, then there's
322 nothing to do. */
323 if (to == from)
324 return;
325
326 /* If FROM is a SUBREG that indicates that we have already done at least
327 the required extension, strip it. We don't handle such SUBREGs as
328 TO here. */
329
330 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
331 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
332 >= GET_MODE_PRECISION (to_mode))
333 && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
334 from = gen_lowpart (to_mode, from), from_mode = to_mode;
335
336 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
337
338 if (to_mode == from_mode
339 || (from_mode == VOIDmode && CONSTANT_P (from)))
340 {
341 emit_move_insn (to, from);
342 return;
343 }
344
345 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
346 {
347 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
348
349 if (VECTOR_MODE_P (to_mode))
350 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
351 else
352 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
353
354 emit_move_insn (to, from);
355 return;
356 }
357
358 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
359 {
360 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
361 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
362 return;
363 }
364
365 if (to_real)
366 {
367 rtx value, insns;
368 convert_optab tab;
369
370 gcc_assert ((GET_MODE_PRECISION (from_mode)
371 != GET_MODE_PRECISION (to_mode))
372 || (DECIMAL_FLOAT_MODE_P (from_mode)
373 != DECIMAL_FLOAT_MODE_P (to_mode)));
374
375 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
376 /* Conversion between decimal float and binary float, same size. */
377 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
378 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
379 tab = sext_optab;
380 else
381 tab = trunc_optab;
382
383 /* Try converting directly if the insn is supported. */
384
385 code = convert_optab_handler (tab, to_mode, from_mode);
386 if (code != CODE_FOR_nothing)
387 {
388 emit_unop_insn (code, to, from,
389 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
390 return;
391 }
392
393 /* Otherwise use a libcall. */
394 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
395
396 /* Is this conversion implemented yet? */
397 gcc_assert (libcall);
398
399 start_sequence ();
400 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
401 1, from, from_mode);
402 insns = get_insns ();
403 end_sequence ();
404 emit_libcall_block (insns, to, value,
405 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
406 from)
407 : gen_rtx_FLOAT_EXTEND (to_mode, from));
408 return;
409 }
410
411 /* Handle pointer conversion. */ /* SPEE 900220. */
412 /* Targets are expected to provide conversion insns between PxImode and
413 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
414 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
415 {
416 enum machine_mode full_mode
417 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
418
419 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
420 != CODE_FOR_nothing);
421
422 if (full_mode != from_mode)
423 from = convert_to_mode (full_mode, from, unsignedp);
424 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
425 to, from, UNKNOWN);
426 return;
427 }
428 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
429 {
430 rtx new_from;
431 enum machine_mode full_mode
432 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
433 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
434 enum insn_code icode;
435
436 icode = convert_optab_handler (ctab, full_mode, from_mode);
437 gcc_assert (icode != CODE_FOR_nothing);
438
439 if (to_mode == full_mode)
440 {
441 emit_unop_insn (icode, to, from, UNKNOWN);
442 return;
443 }
444
445 new_from = gen_reg_rtx (full_mode);
446 emit_unop_insn (icode, new_from, from, UNKNOWN);
447
448 /* else proceed to integer conversions below. */
449 from_mode = full_mode;
450 from = new_from;
451 }
452
453 /* Make sure both are fixed-point modes or both are not. */
454 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
455 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
456 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
457 {
458 /* If we widen from_mode to to_mode and they are in the same class,
459 we won't saturate the result.
460 Otherwise, always saturate the result to play safe. */
461 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
462 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
463 expand_fixed_convert (to, from, 0, 0);
464 else
465 expand_fixed_convert (to, from, 0, 1);
466 return;
467 }
468
469 /* Now both modes are integers. */
470
471 /* Handle expanding beyond a word. */
472 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
473 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
474 {
475 rtx insns;
476 rtx lowpart;
477 rtx fill_value;
478 rtx lowfrom;
479 int i;
480 enum machine_mode lowpart_mode;
481 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
482
483 /* Try converting directly if the insn is supported. */
484 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
485 != CODE_FOR_nothing)
486 {
487 /* If FROM is a SUBREG, put it into a register. Do this
488 so that we always generate the same set of insns for
489 better cse'ing; if an intermediate assignment occurred,
490 we won't be doing the operation directly on the SUBREG. */
491 if (optimize > 0 && GET_CODE (from) == SUBREG)
492 from = force_reg (from_mode, from);
493 emit_unop_insn (code, to, from, equiv_code);
494 return;
495 }
496 /* Next, try converting via full word. */
497 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
498 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
499 != CODE_FOR_nothing))
500 {
501 rtx word_to = gen_reg_rtx (word_mode);
502 if (REG_P (to))
503 {
504 if (reg_overlap_mentioned_p (to, from))
505 from = force_reg (from_mode, from);
506 emit_clobber (to);
507 }
508 convert_move (word_to, from, unsignedp);
509 emit_unop_insn (code, to, word_to, equiv_code);
510 return;
511 }
512
513 /* No special multiword conversion insn; do it by hand. */
514 start_sequence ();
515
516 /* Since we will turn this into a no conflict block, we must ensure the
517 the source does not overlap the target so force it into an isolated
518 register when maybe so. Likewise for any MEM input, since the
519 conversion sequence might require several references to it and we
520 must ensure we're getting the same value every time. */
521
522 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
523 from = force_reg (from_mode, from);
524
525 /* Get a copy of FROM widened to a word, if necessary. */
526 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
527 lowpart_mode = word_mode;
528 else
529 lowpart_mode = from_mode;
530
531 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
532
533 lowpart = gen_lowpart (lowpart_mode, to);
534 emit_move_insn (lowpart, lowfrom);
535
536 /* Compute the value to put in each remaining word. */
537 if (unsignedp)
538 fill_value = const0_rtx;
539 else
540 fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
541 LT, lowfrom, const0_rtx,
542 lowpart_mode, 0, -1);
543
544 /* Fill the remaining words. */
545 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
546 {
547 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
548 rtx subword = operand_subword (to, index, 1, to_mode);
549
550 gcc_assert (subword);
551
552 if (fill_value != subword)
553 emit_move_insn (subword, fill_value);
554 }
555
556 insns = get_insns ();
557 end_sequence ();
558
559 emit_insn (insns);
560 return;
561 }
562
563 /* Truncating multi-word to a word or less. */
564 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
565 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
566 {
567 if (!((MEM_P (from)
568 && ! MEM_VOLATILE_P (from)
569 && direct_load[(int) to_mode]
570 && ! mode_dependent_address_p (XEXP (from, 0),
571 MEM_ADDR_SPACE (from)))
572 || REG_P (from)
573 || GET_CODE (from) == SUBREG))
574 from = force_reg (from_mode, from);
575 convert_move (to, gen_lowpart (word_mode, from), 0);
576 return;
577 }
578
579 /* Now follow all the conversions between integers
580 no more than a word long. */
581
582 /* For truncation, usually we can just refer to FROM in a narrower mode. */
583 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
584 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
585 {
586 if (!((MEM_P (from)
587 && ! MEM_VOLATILE_P (from)
588 && direct_load[(int) to_mode]
589 && ! mode_dependent_address_p (XEXP (from, 0),
590 MEM_ADDR_SPACE (from)))
591 || REG_P (from)
592 || GET_CODE (from) == SUBREG))
593 from = force_reg (from_mode, from);
594 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
595 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
596 from = copy_to_reg (from);
597 emit_move_insn (to, gen_lowpart (to_mode, from));
598 return;
599 }
600
601 /* Handle extension. */
602 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
603 {
604 /* Convert directly if that works. */
605 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
606 != CODE_FOR_nothing)
607 {
608 emit_unop_insn (code, to, from, equiv_code);
609 return;
610 }
611 else
612 {
613 enum machine_mode intermediate;
614 rtx tmp;
615 int shift_amount;
616
617 /* Search for a mode to convert via. */
618 for (intermediate = from_mode; intermediate != VOIDmode;
619 intermediate = GET_MODE_WIDER_MODE (intermediate))
620 if (((can_extend_p (to_mode, intermediate, unsignedp)
621 != CODE_FOR_nothing)
622 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
623 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
624 && (can_extend_p (intermediate, from_mode, unsignedp)
625 != CODE_FOR_nothing))
626 {
627 convert_move (to, convert_to_mode (intermediate, from,
628 unsignedp), unsignedp);
629 return;
630 }
631
632 /* No suitable intermediate mode.
633 Generate what we need with shifts. */
634 shift_amount = (GET_MODE_PRECISION (to_mode)
635 - GET_MODE_PRECISION (from_mode));
636 from = gen_lowpart (to_mode, force_reg (from_mode, from));
637 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
638 to, unsignedp);
639 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
640 to, unsignedp);
641 if (tmp != to)
642 emit_move_insn (to, tmp);
643 return;
644 }
645 }
646
647 /* Support special truncate insns for certain modes. */
648 if (convert_optab_handler (trunc_optab, to_mode,
649 from_mode) != CODE_FOR_nothing)
650 {
651 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
652 to, from, UNKNOWN);
653 return;
654 }
655
656 /* Handle truncation of volatile memrefs, and so on;
657 the things that couldn't be truncated directly,
658 and for which there was no special instruction.
659
660 ??? Code above formerly short-circuited this, for most integer
661 mode pairs, with a force_reg in from_mode followed by a recursive
662 call to this routine. Appears always to have been wrong. */
663 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
664 {
665 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
666 emit_move_insn (to, temp);
667 return;
668 }
669
670 /* Mode combination is not recognized. */
671 gcc_unreachable ();
672 }
673
674 /* Return an rtx for a value that would result
675 from converting X to mode MODE.
676 Both X and MODE may be floating, or both integer.
677 UNSIGNEDP is nonzero if X is an unsigned value.
678 This can be done by referring to a part of X in place
679 or by copying to a new temporary with conversion. */
680
681 rtx
682 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
683 {
684 return convert_modes (mode, VOIDmode, x, unsignedp);
685 }
686
687 /* Return an rtx for a value that would result
688 from converting X from mode OLDMODE to mode MODE.
689 Both modes may be floating, or both integer.
690 UNSIGNEDP is nonzero if X is an unsigned value.
691
692 This can be done by referring to a part of X in place
693 or by copying to a new temporary with conversion.
694
695 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
696
697 rtx
698 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
699 {
700 rtx temp;
701
702 /* If FROM is a SUBREG that indicates that we have already done at least
703 the required extension, strip it. */
704
705 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
706 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
707 && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
708 x = gen_lowpart (mode, SUBREG_REG (x));
709
710 if (GET_MODE (x) != VOIDmode)
711 oldmode = GET_MODE (x);
712
713 if (mode == oldmode)
714 return x;
715
716 if (CONST_SCALAR_INT_P (x) && GET_MODE_CLASS (mode) == MODE_INT)
717 {
718 /* If the caller did not tell us the old mode, then there is not
719 much to do with respect to canonicalization. We have to
720 assume that all the bits are significant. */
721 if (GET_MODE_CLASS (oldmode) != MODE_INT)
722 oldmode = MAX_MODE_INT;
723 wide_int w = wide_int::from (std::make_pair (x, oldmode),
724 GET_MODE_PRECISION (mode),
725 unsignedp ? UNSIGNED : SIGNED);
726 return immed_wide_int_const (w, mode);
727 }
728
729 /* We can do this with a gen_lowpart if both desired and current modes
730 are integer, and this is either a constant integer, a register, or a
731 non-volatile MEM. */
732 if (GET_MODE_CLASS (mode) == MODE_INT
733 && GET_MODE_CLASS (oldmode) == MODE_INT
734 && GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
735 && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) mode])
736 || (REG_P (x)
737 && (!HARD_REGISTER_P (x)
738 || HARD_REGNO_MODE_OK (REGNO (x), mode))
739 && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x)))))
740
741 return gen_lowpart (mode, x);
742
743 /* Converting from integer constant into mode is always equivalent to an
744 subreg operation. */
745 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
746 {
747 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
748 return simplify_gen_subreg (mode, x, oldmode, 0);
749 }
750
751 temp = gen_reg_rtx (mode);
752 convert_move (temp, x, unsignedp);
753 return temp;
754 }
755 \f
756 /* Return the largest alignment we can use for doing a move (or store)
757 of MAX_PIECES. ALIGN is the largest alignment we could use. */
758
759 static unsigned int
760 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
761 {
762 enum machine_mode tmode;
763
764 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
765 if (align >= GET_MODE_ALIGNMENT (tmode))
766 align = GET_MODE_ALIGNMENT (tmode);
767 else
768 {
769 enum machine_mode tmode, xmode;
770
771 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
772 tmode != VOIDmode;
773 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
774 if (GET_MODE_SIZE (tmode) > max_pieces
775 || SLOW_UNALIGNED_ACCESS (tmode, align))
776 break;
777
778 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
779 }
780
781 return align;
782 }
783
784 /* Return the widest integer mode no wider than SIZE. If no such mode
785 can be found, return VOIDmode. */
786
787 static enum machine_mode
788 widest_int_mode_for_size (unsigned int size)
789 {
790 enum machine_mode tmode, mode = VOIDmode;
791
792 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
793 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
794 if (GET_MODE_SIZE (tmode) < size)
795 mode = tmode;
796
797 return mode;
798 }
799
800 /* STORE_MAX_PIECES is the number of bytes at a time that we can
801 store efficiently. Due to internal GCC limitations, this is
802 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
803 for an immediate constant. */
804
805 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
806
807 /* Determine whether the LEN bytes can be moved by using several move
808 instructions. Return nonzero if a call to move_by_pieces should
809 succeed. */
810
811 int
812 can_move_by_pieces (unsigned HOST_WIDE_INT len ATTRIBUTE_UNUSED,
813 unsigned int align ATTRIBUTE_UNUSED)
814 {
815 return MOVE_BY_PIECES_P (len, align);
816 }
817
818 /* Generate several move instructions to copy LEN bytes from block FROM to
819 block TO. (These are MEM rtx's with BLKmode).
820
821 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
822 used to push FROM to the stack.
823
824 ALIGN is maximum stack alignment we can assume.
825
826 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
827 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
828 stpcpy. */
829
830 rtx
831 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
832 unsigned int align, int endp)
833 {
834 struct move_by_pieces_d data;
835 enum machine_mode to_addr_mode;
836 enum machine_mode from_addr_mode = get_address_mode (from);
837 rtx to_addr, from_addr = XEXP (from, 0);
838 unsigned int max_size = MOVE_MAX_PIECES + 1;
839 enum insn_code icode;
840
841 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
842
843 data.offset = 0;
844 data.from_addr = from_addr;
845 if (to)
846 {
847 to_addr_mode = get_address_mode (to);
848 to_addr = XEXP (to, 0);
849 data.to = to;
850 data.autinc_to
851 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
852 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
853 data.reverse
854 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
855 }
856 else
857 {
858 to_addr_mode = VOIDmode;
859 to_addr = NULL_RTX;
860 data.to = NULL_RTX;
861 data.autinc_to = 1;
862 #ifdef STACK_GROWS_DOWNWARD
863 data.reverse = 1;
864 #else
865 data.reverse = 0;
866 #endif
867 }
868 data.to_addr = to_addr;
869 data.from = from;
870 data.autinc_from
871 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
872 || GET_CODE (from_addr) == POST_INC
873 || GET_CODE (from_addr) == POST_DEC);
874
875 data.explicit_inc_from = 0;
876 data.explicit_inc_to = 0;
877 if (data.reverse) data.offset = len;
878 data.len = len;
879
880 /* If copying requires more than two move insns,
881 copy addresses to registers (to make displacements shorter)
882 and use post-increment if available. */
883 if (!(data.autinc_from && data.autinc_to)
884 && move_by_pieces_ninsns (len, align, max_size) > 2)
885 {
886 /* Find the mode of the largest move...
887 MODE might not be used depending on the definitions of the
888 USE_* macros below. */
889 enum machine_mode mode ATTRIBUTE_UNUSED
890 = widest_int_mode_for_size (max_size);
891
892 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
893 {
894 data.from_addr = copy_to_mode_reg (from_addr_mode,
895 plus_constant (from_addr_mode,
896 from_addr, len));
897 data.autinc_from = 1;
898 data.explicit_inc_from = -1;
899 }
900 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
901 {
902 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
903 data.autinc_from = 1;
904 data.explicit_inc_from = 1;
905 }
906 if (!data.autinc_from && CONSTANT_P (from_addr))
907 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
908 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
909 {
910 data.to_addr = copy_to_mode_reg (to_addr_mode,
911 plus_constant (to_addr_mode,
912 to_addr, len));
913 data.autinc_to = 1;
914 data.explicit_inc_to = -1;
915 }
916 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
917 {
918 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
919 data.autinc_to = 1;
920 data.explicit_inc_to = 1;
921 }
922 if (!data.autinc_to && CONSTANT_P (to_addr))
923 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
924 }
925
926 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
927
928 /* First move what we can in the largest integer mode, then go to
929 successively smaller modes. */
930
931 while (max_size > 1 && data.len > 0)
932 {
933 enum machine_mode mode = widest_int_mode_for_size (max_size);
934
935 if (mode == VOIDmode)
936 break;
937
938 icode = optab_handler (mov_optab, mode);
939 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
940 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
941
942 max_size = GET_MODE_SIZE (mode);
943 }
944
945 /* The code above should have handled everything. */
946 gcc_assert (!data.len);
947
948 if (endp)
949 {
950 rtx to1;
951
952 gcc_assert (!data.reverse);
953 if (data.autinc_to)
954 {
955 if (endp == 2)
956 {
957 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
958 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
959 else
960 data.to_addr = copy_to_mode_reg (to_addr_mode,
961 plus_constant (to_addr_mode,
962 data.to_addr,
963 -1));
964 }
965 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
966 data.offset);
967 }
968 else
969 {
970 if (endp == 2)
971 --data.offset;
972 to1 = adjust_address (data.to, QImode, data.offset);
973 }
974 return to1;
975 }
976 else
977 return data.to;
978 }
979
980 /* Return number of insns required to move L bytes by pieces.
981 ALIGN (in bits) is maximum alignment we can assume. */
982
983 unsigned HOST_WIDE_INT
984 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
985 unsigned int max_size)
986 {
987 unsigned HOST_WIDE_INT n_insns = 0;
988
989 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
990
991 while (max_size > 1 && l > 0)
992 {
993 enum machine_mode mode;
994 enum insn_code icode;
995
996 mode = widest_int_mode_for_size (max_size);
997
998 if (mode == VOIDmode)
999 break;
1000
1001 icode = optab_handler (mov_optab, mode);
1002 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1003 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1004
1005 max_size = GET_MODE_SIZE (mode);
1006 }
1007
1008 gcc_assert (!l);
1009 return n_insns;
1010 }
1011
1012 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1013 with move instructions for mode MODE. GENFUN is the gen_... function
1014 to make a move insn for that mode. DATA has all the other info. */
1015
1016 static void
1017 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
1018 struct move_by_pieces_d *data)
1019 {
1020 unsigned int size = GET_MODE_SIZE (mode);
1021 rtx to1 = NULL_RTX, from1;
1022
1023 while (data->len >= size)
1024 {
1025 if (data->reverse)
1026 data->offset -= size;
1027
1028 if (data->to)
1029 {
1030 if (data->autinc_to)
1031 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1032 data->offset);
1033 else
1034 to1 = adjust_address (data->to, mode, data->offset);
1035 }
1036
1037 if (data->autinc_from)
1038 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1039 data->offset);
1040 else
1041 from1 = adjust_address (data->from, mode, data->offset);
1042
1043 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1044 emit_insn (gen_add2_insn (data->to_addr,
1045 gen_int_mode (-(HOST_WIDE_INT) size,
1046 GET_MODE (data->to_addr))));
1047 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1048 emit_insn (gen_add2_insn (data->from_addr,
1049 gen_int_mode (-(HOST_WIDE_INT) size,
1050 GET_MODE (data->from_addr))));
1051
1052 if (data->to)
1053 emit_insn ((*genfun) (to1, from1));
1054 else
1055 {
1056 #ifdef PUSH_ROUNDING
1057 emit_single_push_insn (mode, from1, NULL);
1058 #else
1059 gcc_unreachable ();
1060 #endif
1061 }
1062
1063 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1064 emit_insn (gen_add2_insn (data->to_addr,
1065 gen_int_mode (size,
1066 GET_MODE (data->to_addr))));
1067 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1068 emit_insn (gen_add2_insn (data->from_addr,
1069 gen_int_mode (size,
1070 GET_MODE (data->from_addr))));
1071
1072 if (! data->reverse)
1073 data->offset += size;
1074
1075 data->len -= size;
1076 }
1077 }
1078 \f
1079 /* Emit code to move a block Y to a block X. This may be done with
1080 string-move instructions, with multiple scalar move instructions,
1081 or with a library call.
1082
1083 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1084 SIZE is an rtx that says how long they are.
1085 ALIGN is the maximum alignment we can assume they have.
1086 METHOD describes what kind of copy this is, and what mechanisms may be used.
1087 MIN_SIZE is the minimal size of block to move
1088 MAX_SIZE is the maximal size of block to move, if it can not be represented
1089 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1090
1091 Return the address of the new block, if memcpy is called and returns it,
1092 0 otherwise. */
1093
1094 rtx
1095 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1096 unsigned int expected_align, HOST_WIDE_INT expected_size,
1097 unsigned HOST_WIDE_INT min_size,
1098 unsigned HOST_WIDE_INT max_size,
1099 unsigned HOST_WIDE_INT probable_max_size)
1100 {
1101 bool may_use_call;
1102 rtx retval = 0;
1103 unsigned int align;
1104
1105 gcc_assert (size);
1106 if (CONST_INT_P (size)
1107 && INTVAL (size) == 0)
1108 return 0;
1109
1110 switch (method)
1111 {
1112 case BLOCK_OP_NORMAL:
1113 case BLOCK_OP_TAILCALL:
1114 may_use_call = true;
1115 break;
1116
1117 case BLOCK_OP_CALL_PARM:
1118 may_use_call = block_move_libcall_safe_for_call_parm ();
1119
1120 /* Make inhibit_defer_pop nonzero around the library call
1121 to force it to pop the arguments right away. */
1122 NO_DEFER_POP;
1123 break;
1124
1125 case BLOCK_OP_NO_LIBCALL:
1126 may_use_call = false;
1127 break;
1128
1129 default:
1130 gcc_unreachable ();
1131 }
1132
1133 gcc_assert (MEM_P (x) && MEM_P (y));
1134 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1135 gcc_assert (align >= BITS_PER_UNIT);
1136
1137 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1138 block copy is more efficient for other large modes, e.g. DCmode. */
1139 x = adjust_address (x, BLKmode, 0);
1140 y = adjust_address (y, BLKmode, 0);
1141
1142 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1143 can be incorrect is coming from __builtin_memcpy. */
1144 if (CONST_INT_P (size))
1145 {
1146 x = shallow_copy_rtx (x);
1147 y = shallow_copy_rtx (y);
1148 set_mem_size (x, INTVAL (size));
1149 set_mem_size (y, INTVAL (size));
1150 }
1151
1152 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1153 move_by_pieces (x, y, INTVAL (size), align, 0);
1154 else if (emit_block_move_via_movmem (x, y, size, align,
1155 expected_align, expected_size,
1156 min_size, max_size, probable_max_size))
1157 ;
1158 else if (may_use_call
1159 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1160 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1161 {
1162 /* Since x and y are passed to a libcall, mark the corresponding
1163 tree EXPR as addressable. */
1164 tree y_expr = MEM_EXPR (y);
1165 tree x_expr = MEM_EXPR (x);
1166 if (y_expr)
1167 mark_addressable (y_expr);
1168 if (x_expr)
1169 mark_addressable (x_expr);
1170 retval = emit_block_move_via_libcall (x, y, size,
1171 method == BLOCK_OP_TAILCALL);
1172 }
1173
1174 else
1175 emit_block_move_via_loop (x, y, size, align);
1176
1177 if (method == BLOCK_OP_CALL_PARM)
1178 OK_DEFER_POP;
1179
1180 return retval;
1181 }
1182
1183 rtx
1184 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1185 {
1186 unsigned HOST_WIDE_INT max, min = 0;
1187 if (GET_CODE (size) == CONST_INT)
1188 min = max = UINTVAL (size);
1189 else
1190 max = GET_MODE_MASK (GET_MODE (size));
1191 return emit_block_move_hints (x, y, size, method, 0, -1,
1192 min, max, max);
1193 }
1194
1195 /* A subroutine of emit_block_move. Returns true if calling the
1196 block move libcall will not clobber any parameters which may have
1197 already been placed on the stack. */
1198
1199 static bool
1200 block_move_libcall_safe_for_call_parm (void)
1201 {
1202 #if defined (REG_PARM_STACK_SPACE)
1203 tree fn;
1204 #endif
1205
1206 /* If arguments are pushed on the stack, then they're safe. */
1207 if (PUSH_ARGS)
1208 return true;
1209
1210 /* If registers go on the stack anyway, any argument is sure to clobber
1211 an outgoing argument. */
1212 #if defined (REG_PARM_STACK_SPACE)
1213 fn = emit_block_move_libcall_fn (false);
1214 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1215 depend on its argument. */
1216 (void) fn;
1217 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1218 && REG_PARM_STACK_SPACE (fn) != 0)
1219 return false;
1220 #endif
1221
1222 /* If any argument goes in memory, then it might clobber an outgoing
1223 argument. */
1224 {
1225 CUMULATIVE_ARGS args_so_far_v;
1226 cumulative_args_t args_so_far;
1227 tree fn, arg;
1228
1229 fn = emit_block_move_libcall_fn (false);
1230 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1231 args_so_far = pack_cumulative_args (&args_so_far_v);
1232
1233 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1234 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1235 {
1236 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1237 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1238 NULL_TREE, true);
1239 if (!tmp || !REG_P (tmp))
1240 return false;
1241 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1242 return false;
1243 targetm.calls.function_arg_advance (args_so_far, mode,
1244 NULL_TREE, true);
1245 }
1246 }
1247 return true;
1248 }
1249
1250 /* A subroutine of emit_block_move. Expand a movmem pattern;
1251 return true if successful. */
1252
1253 static bool
1254 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1255 unsigned int expected_align, HOST_WIDE_INT expected_size,
1256 unsigned HOST_WIDE_INT min_size,
1257 unsigned HOST_WIDE_INT max_size,
1258 unsigned HOST_WIDE_INT probable_max_size)
1259 {
1260 int save_volatile_ok = volatile_ok;
1261 enum machine_mode mode;
1262
1263 if (expected_align < align)
1264 expected_align = align;
1265 if (expected_size != -1)
1266 {
1267 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1268 expected_size = probable_max_size;
1269 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1270 expected_size = min_size;
1271 }
1272
1273 /* Since this is a move insn, we don't care about volatility. */
1274 volatile_ok = 1;
1275
1276 /* Try the most limited insn first, because there's no point
1277 including more than one in the machine description unless
1278 the more limited one has some advantage. */
1279
1280 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1281 mode = GET_MODE_WIDER_MODE (mode))
1282 {
1283 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1284
1285 if (code != CODE_FOR_nothing
1286 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1287 here because if SIZE is less than the mode mask, as it is
1288 returned by the macro, it will definitely be less than the
1289 actual mode mask. Since SIZE is within the Pmode address
1290 space, we limit MODE to Pmode. */
1291 && ((CONST_INT_P (size)
1292 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1293 <= (GET_MODE_MASK (mode) >> 1)))
1294 || max_size <= (GET_MODE_MASK (mode) >> 1)
1295 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1296 {
1297 struct expand_operand ops[9];
1298 unsigned int nops;
1299
1300 /* ??? When called via emit_block_move_for_call, it'd be
1301 nice if there were some way to inform the backend, so
1302 that it doesn't fail the expansion because it thinks
1303 emitting the libcall would be more efficient. */
1304 nops = insn_data[(int) code].n_generator_args;
1305 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1306
1307 create_fixed_operand (&ops[0], x);
1308 create_fixed_operand (&ops[1], y);
1309 /* The check above guarantees that this size conversion is valid. */
1310 create_convert_operand_to (&ops[2], size, mode, true);
1311 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1312 if (nops >= 6)
1313 {
1314 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1315 create_integer_operand (&ops[5], expected_size);
1316 }
1317 if (nops >= 8)
1318 {
1319 create_integer_operand (&ops[6], min_size);
1320 /* If we can not represent the maximal size,
1321 make parameter NULL. */
1322 if ((HOST_WIDE_INT) max_size != -1)
1323 create_integer_operand (&ops[7], max_size);
1324 else
1325 create_fixed_operand (&ops[7], NULL);
1326 }
1327 if (nops == 9)
1328 {
1329 /* If we can not represent the maximal size,
1330 make parameter NULL. */
1331 if ((HOST_WIDE_INT) probable_max_size != -1)
1332 create_integer_operand (&ops[8], probable_max_size);
1333 else
1334 create_fixed_operand (&ops[8], NULL);
1335 }
1336 if (maybe_expand_insn (code, nops, ops))
1337 {
1338 volatile_ok = save_volatile_ok;
1339 return true;
1340 }
1341 }
1342 }
1343
1344 volatile_ok = save_volatile_ok;
1345 return false;
1346 }
1347
1348 /* A subroutine of emit_block_move. Expand a call to memcpy.
1349 Return the return value from memcpy, 0 otherwise. */
1350
1351 rtx
1352 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1353 {
1354 rtx dst_addr, src_addr;
1355 tree call_expr, fn, src_tree, dst_tree, size_tree;
1356 enum machine_mode size_mode;
1357 rtx retval;
1358
1359 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1360 pseudos. We can then place those new pseudos into a VAR_DECL and
1361 use them later. */
1362
1363 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1364 src_addr = copy_addr_to_reg (XEXP (src, 0));
1365
1366 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1367 src_addr = convert_memory_address (ptr_mode, src_addr);
1368
1369 dst_tree = make_tree (ptr_type_node, dst_addr);
1370 src_tree = make_tree (ptr_type_node, src_addr);
1371
1372 size_mode = TYPE_MODE (sizetype);
1373
1374 size = convert_to_mode (size_mode, size, 1);
1375 size = copy_to_mode_reg (size_mode, size);
1376
1377 /* It is incorrect to use the libcall calling conventions to call
1378 memcpy in this context. This could be a user call to memcpy and
1379 the user may wish to examine the return value from memcpy. For
1380 targets where libcalls and normal calls have different conventions
1381 for returning pointers, we could end up generating incorrect code. */
1382
1383 size_tree = make_tree (sizetype, size);
1384
1385 fn = emit_block_move_libcall_fn (true);
1386 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1387 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1388
1389 retval = expand_normal (call_expr);
1390
1391 return retval;
1392 }
1393
1394 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1395 for the function we use for block copies. */
1396
1397 static GTY(()) tree block_move_fn;
1398
1399 void
1400 init_block_move_fn (const char *asmspec)
1401 {
1402 if (!block_move_fn)
1403 {
1404 tree args, fn, attrs, attr_args;
1405
1406 fn = get_identifier ("memcpy");
1407 args = build_function_type_list (ptr_type_node, ptr_type_node,
1408 const_ptr_type_node, sizetype,
1409 NULL_TREE);
1410
1411 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1412 DECL_EXTERNAL (fn) = 1;
1413 TREE_PUBLIC (fn) = 1;
1414 DECL_ARTIFICIAL (fn) = 1;
1415 TREE_NOTHROW (fn) = 1;
1416 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1417 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1418
1419 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1420 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1421
1422 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1423
1424 block_move_fn = fn;
1425 }
1426
1427 if (asmspec)
1428 set_user_assembler_name (block_move_fn, asmspec);
1429 }
1430
1431 static tree
1432 emit_block_move_libcall_fn (int for_call)
1433 {
1434 static bool emitted_extern;
1435
1436 if (!block_move_fn)
1437 init_block_move_fn (NULL);
1438
1439 if (for_call && !emitted_extern)
1440 {
1441 emitted_extern = true;
1442 make_decl_rtl (block_move_fn);
1443 }
1444
1445 return block_move_fn;
1446 }
1447
1448 /* A subroutine of emit_block_move. Copy the data via an explicit
1449 loop. This is used only when libcalls are forbidden. */
1450 /* ??? It'd be nice to copy in hunks larger than QImode. */
1451
1452 static void
1453 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1454 unsigned int align ATTRIBUTE_UNUSED)
1455 {
1456 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1457 enum machine_mode x_addr_mode = get_address_mode (x);
1458 enum machine_mode y_addr_mode = get_address_mode (y);
1459 enum machine_mode iter_mode;
1460
1461 iter_mode = GET_MODE (size);
1462 if (iter_mode == VOIDmode)
1463 iter_mode = word_mode;
1464
1465 top_label = gen_label_rtx ();
1466 cmp_label = gen_label_rtx ();
1467 iter = gen_reg_rtx (iter_mode);
1468
1469 emit_move_insn (iter, const0_rtx);
1470
1471 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1472 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1473 do_pending_stack_adjust ();
1474
1475 emit_jump (cmp_label);
1476 emit_label (top_label);
1477
1478 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1479 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1480
1481 if (x_addr_mode != y_addr_mode)
1482 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1483 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1484
1485 x = change_address (x, QImode, x_addr);
1486 y = change_address (y, QImode, y_addr);
1487
1488 emit_move_insn (x, y);
1489
1490 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1491 true, OPTAB_LIB_WIDEN);
1492 if (tmp != iter)
1493 emit_move_insn (iter, tmp);
1494
1495 emit_label (cmp_label);
1496
1497 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1498 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1499 }
1500 \f
1501 /* Copy all or part of a value X into registers starting at REGNO.
1502 The number of registers to be filled is NREGS. */
1503
1504 void
1505 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1506 {
1507 int i;
1508 #ifdef HAVE_load_multiple
1509 rtx pat;
1510 rtx last;
1511 #endif
1512
1513 if (nregs == 0)
1514 return;
1515
1516 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1517 x = validize_mem (force_const_mem (mode, x));
1518
1519 /* See if the machine can do this with a load multiple insn. */
1520 #ifdef HAVE_load_multiple
1521 if (HAVE_load_multiple)
1522 {
1523 last = get_last_insn ();
1524 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1525 GEN_INT (nregs));
1526 if (pat)
1527 {
1528 emit_insn (pat);
1529 return;
1530 }
1531 else
1532 delete_insns_since (last);
1533 }
1534 #endif
1535
1536 for (i = 0; i < nregs; i++)
1537 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1538 operand_subword_force (x, i, mode));
1539 }
1540
1541 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1542 The number of registers to be filled is NREGS. */
1543
1544 void
1545 move_block_from_reg (int regno, rtx x, int nregs)
1546 {
1547 int i;
1548
1549 if (nregs == 0)
1550 return;
1551
1552 /* See if the machine can do this with a store multiple insn. */
1553 #ifdef HAVE_store_multiple
1554 if (HAVE_store_multiple)
1555 {
1556 rtx last = get_last_insn ();
1557 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1558 GEN_INT (nregs));
1559 if (pat)
1560 {
1561 emit_insn (pat);
1562 return;
1563 }
1564 else
1565 delete_insns_since (last);
1566 }
1567 #endif
1568
1569 for (i = 0; i < nregs; i++)
1570 {
1571 rtx tem = operand_subword (x, i, 1, BLKmode);
1572
1573 gcc_assert (tem);
1574
1575 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1576 }
1577 }
1578
1579 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1580 ORIG, where ORIG is a non-consecutive group of registers represented by
1581 a PARALLEL. The clone is identical to the original except in that the
1582 original set of registers is replaced by a new set of pseudo registers.
1583 The new set has the same modes as the original set. */
1584
1585 rtx
1586 gen_group_rtx (rtx orig)
1587 {
1588 int i, length;
1589 rtx *tmps;
1590
1591 gcc_assert (GET_CODE (orig) == PARALLEL);
1592
1593 length = XVECLEN (orig, 0);
1594 tmps = XALLOCAVEC (rtx, length);
1595
1596 /* Skip a NULL entry in first slot. */
1597 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1598
1599 if (i)
1600 tmps[0] = 0;
1601
1602 for (; i < length; i++)
1603 {
1604 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1605 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1606
1607 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1608 }
1609
1610 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1611 }
1612
1613 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1614 except that values are placed in TMPS[i], and must later be moved
1615 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1616
1617 static void
1618 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1619 {
1620 rtx src;
1621 int start, i;
1622 enum machine_mode m = GET_MODE (orig_src);
1623
1624 gcc_assert (GET_CODE (dst) == PARALLEL);
1625
1626 if (m != VOIDmode
1627 && !SCALAR_INT_MODE_P (m)
1628 && !MEM_P (orig_src)
1629 && GET_CODE (orig_src) != CONCAT)
1630 {
1631 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1632 if (imode == BLKmode)
1633 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1634 else
1635 src = gen_reg_rtx (imode);
1636 if (imode != BLKmode)
1637 src = gen_lowpart (GET_MODE (orig_src), src);
1638 emit_move_insn (src, orig_src);
1639 /* ...and back again. */
1640 if (imode != BLKmode)
1641 src = gen_lowpart (imode, src);
1642 emit_group_load_1 (tmps, dst, src, type, ssize);
1643 return;
1644 }
1645
1646 /* Check for a NULL entry, used to indicate that the parameter goes
1647 both on the stack and in registers. */
1648 if (XEXP (XVECEXP (dst, 0, 0), 0))
1649 start = 0;
1650 else
1651 start = 1;
1652
1653 /* Process the pieces. */
1654 for (i = start; i < XVECLEN (dst, 0); i++)
1655 {
1656 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1657 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1658 unsigned int bytelen = GET_MODE_SIZE (mode);
1659 int shift = 0;
1660
1661 /* Handle trailing fragments that run over the size of the struct. */
1662 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1663 {
1664 /* Arrange to shift the fragment to where it belongs.
1665 extract_bit_field loads to the lsb of the reg. */
1666 if (
1667 #ifdef BLOCK_REG_PADDING
1668 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1669 == (BYTES_BIG_ENDIAN ? upward : downward)
1670 #else
1671 BYTES_BIG_ENDIAN
1672 #endif
1673 )
1674 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1675 bytelen = ssize - bytepos;
1676 gcc_assert (bytelen > 0);
1677 }
1678
1679 /* If we won't be loading directly from memory, protect the real source
1680 from strange tricks we might play; but make sure that the source can
1681 be loaded directly into the destination. */
1682 src = orig_src;
1683 if (!MEM_P (orig_src)
1684 && (!CONSTANT_P (orig_src)
1685 || (GET_MODE (orig_src) != mode
1686 && GET_MODE (orig_src) != VOIDmode)))
1687 {
1688 if (GET_MODE (orig_src) == VOIDmode)
1689 src = gen_reg_rtx (mode);
1690 else
1691 src = gen_reg_rtx (GET_MODE (orig_src));
1692
1693 emit_move_insn (src, orig_src);
1694 }
1695
1696 /* Optimize the access just a bit. */
1697 if (MEM_P (src)
1698 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1699 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1700 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1701 && bytelen == GET_MODE_SIZE (mode))
1702 {
1703 tmps[i] = gen_reg_rtx (mode);
1704 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1705 }
1706 else if (COMPLEX_MODE_P (mode)
1707 && GET_MODE (src) == mode
1708 && bytelen == GET_MODE_SIZE (mode))
1709 /* Let emit_move_complex do the bulk of the work. */
1710 tmps[i] = src;
1711 else if (GET_CODE (src) == CONCAT)
1712 {
1713 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1714 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1715
1716 if ((bytepos == 0 && bytelen == slen0)
1717 || (bytepos != 0 && bytepos + bytelen <= slen))
1718 {
1719 /* The following assumes that the concatenated objects all
1720 have the same size. In this case, a simple calculation
1721 can be used to determine the object and the bit field
1722 to be extracted. */
1723 tmps[i] = XEXP (src, bytepos / slen0);
1724 if (! CONSTANT_P (tmps[i])
1725 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1726 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1727 (bytepos % slen0) * BITS_PER_UNIT,
1728 1, NULL_RTX, mode, mode);
1729 }
1730 else
1731 {
1732 rtx mem;
1733
1734 gcc_assert (!bytepos);
1735 mem = assign_stack_temp (GET_MODE (src), slen);
1736 emit_move_insn (mem, src);
1737 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1738 0, 1, NULL_RTX, mode, mode);
1739 }
1740 }
1741 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1742 SIMD register, which is currently broken. While we get GCC
1743 to emit proper RTL for these cases, let's dump to memory. */
1744 else if (VECTOR_MODE_P (GET_MODE (dst))
1745 && REG_P (src))
1746 {
1747 int slen = GET_MODE_SIZE (GET_MODE (src));
1748 rtx mem;
1749
1750 mem = assign_stack_temp (GET_MODE (src), slen);
1751 emit_move_insn (mem, src);
1752 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1753 }
1754 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1755 && XVECLEN (dst, 0) > 1)
1756 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1757 else if (CONSTANT_P (src))
1758 {
1759 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1760
1761 if (len == ssize)
1762 tmps[i] = src;
1763 else
1764 {
1765 rtx first, second;
1766
1767 /* TODO: const_wide_int can have sizes other than this... */
1768 gcc_assert (2 * len == ssize);
1769 split_double (src, &first, &second);
1770 if (i)
1771 tmps[i] = second;
1772 else
1773 tmps[i] = first;
1774 }
1775 }
1776 else if (REG_P (src) && GET_MODE (src) == mode)
1777 tmps[i] = src;
1778 else
1779 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1780 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1781 mode, mode);
1782
1783 if (shift)
1784 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1785 shift, tmps[i], 0);
1786 }
1787 }
1788
1789 /* Emit code to move a block SRC of type TYPE to a block DST,
1790 where DST is non-consecutive registers represented by a PARALLEL.
1791 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1792 if not known. */
1793
1794 void
1795 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1796 {
1797 rtx *tmps;
1798 int i;
1799
1800 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1801 emit_group_load_1 (tmps, dst, src, type, ssize);
1802
1803 /* Copy the extracted pieces into the proper (probable) hard regs. */
1804 for (i = 0; i < XVECLEN (dst, 0); i++)
1805 {
1806 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1807 if (d == NULL)
1808 continue;
1809 emit_move_insn (d, tmps[i]);
1810 }
1811 }
1812
1813 /* Similar, but load SRC into new pseudos in a format that looks like
1814 PARALLEL. This can later be fed to emit_group_move to get things
1815 in the right place. */
1816
1817 rtx
1818 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1819 {
1820 rtvec vec;
1821 int i;
1822
1823 vec = rtvec_alloc (XVECLEN (parallel, 0));
1824 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1825
1826 /* Convert the vector to look just like the original PARALLEL, except
1827 with the computed values. */
1828 for (i = 0; i < XVECLEN (parallel, 0); i++)
1829 {
1830 rtx e = XVECEXP (parallel, 0, i);
1831 rtx d = XEXP (e, 0);
1832
1833 if (d)
1834 {
1835 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1836 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1837 }
1838 RTVEC_ELT (vec, i) = e;
1839 }
1840
1841 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1842 }
1843
1844 /* Emit code to move a block SRC to block DST, where SRC and DST are
1845 non-consecutive groups of registers, each represented by a PARALLEL. */
1846
1847 void
1848 emit_group_move (rtx dst, rtx src)
1849 {
1850 int i;
1851
1852 gcc_assert (GET_CODE (src) == PARALLEL
1853 && GET_CODE (dst) == PARALLEL
1854 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1855
1856 /* Skip first entry if NULL. */
1857 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1858 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1859 XEXP (XVECEXP (src, 0, i), 0));
1860 }
1861
1862 /* Move a group of registers represented by a PARALLEL into pseudos. */
1863
1864 rtx
1865 emit_group_move_into_temps (rtx src)
1866 {
1867 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1868 int i;
1869
1870 for (i = 0; i < XVECLEN (src, 0); i++)
1871 {
1872 rtx e = XVECEXP (src, 0, i);
1873 rtx d = XEXP (e, 0);
1874
1875 if (d)
1876 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1877 RTVEC_ELT (vec, i) = e;
1878 }
1879
1880 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1881 }
1882
1883 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1884 where SRC is non-consecutive registers represented by a PARALLEL.
1885 SSIZE represents the total size of block ORIG_DST, or -1 if not
1886 known. */
1887
1888 void
1889 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1890 {
1891 rtx *tmps, dst;
1892 int start, finish, i;
1893 enum machine_mode m = GET_MODE (orig_dst);
1894
1895 gcc_assert (GET_CODE (src) == PARALLEL);
1896
1897 if (!SCALAR_INT_MODE_P (m)
1898 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1899 {
1900 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1901 if (imode == BLKmode)
1902 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1903 else
1904 dst = gen_reg_rtx (imode);
1905 emit_group_store (dst, src, type, ssize);
1906 if (imode != BLKmode)
1907 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1908 emit_move_insn (orig_dst, dst);
1909 return;
1910 }
1911
1912 /* Check for a NULL entry, used to indicate that the parameter goes
1913 both on the stack and in registers. */
1914 if (XEXP (XVECEXP (src, 0, 0), 0))
1915 start = 0;
1916 else
1917 start = 1;
1918 finish = XVECLEN (src, 0);
1919
1920 tmps = XALLOCAVEC (rtx, finish);
1921
1922 /* Copy the (probable) hard regs into pseudos. */
1923 for (i = start; i < finish; i++)
1924 {
1925 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1926 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1927 {
1928 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1929 emit_move_insn (tmps[i], reg);
1930 }
1931 else
1932 tmps[i] = reg;
1933 }
1934
1935 /* If we won't be storing directly into memory, protect the real destination
1936 from strange tricks we might play. */
1937 dst = orig_dst;
1938 if (GET_CODE (dst) == PARALLEL)
1939 {
1940 rtx temp;
1941
1942 /* We can get a PARALLEL dst if there is a conditional expression in
1943 a return statement. In that case, the dst and src are the same,
1944 so no action is necessary. */
1945 if (rtx_equal_p (dst, src))
1946 return;
1947
1948 /* It is unclear if we can ever reach here, but we may as well handle
1949 it. Allocate a temporary, and split this into a store/load to/from
1950 the temporary. */
1951 temp = assign_stack_temp (GET_MODE (dst), ssize);
1952 emit_group_store (temp, src, type, ssize);
1953 emit_group_load (dst, temp, type, ssize);
1954 return;
1955 }
1956 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1957 {
1958 enum machine_mode outer = GET_MODE (dst);
1959 enum machine_mode inner;
1960 HOST_WIDE_INT bytepos;
1961 bool done = false;
1962 rtx temp;
1963
1964 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1965 dst = gen_reg_rtx (outer);
1966
1967 /* Make life a bit easier for combine. */
1968 /* If the first element of the vector is the low part
1969 of the destination mode, use a paradoxical subreg to
1970 initialize the destination. */
1971 if (start < finish)
1972 {
1973 inner = GET_MODE (tmps[start]);
1974 bytepos = subreg_lowpart_offset (inner, outer);
1975 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1976 {
1977 temp = simplify_gen_subreg (outer, tmps[start],
1978 inner, 0);
1979 if (temp)
1980 {
1981 emit_move_insn (dst, temp);
1982 done = true;
1983 start++;
1984 }
1985 }
1986 }
1987
1988 /* If the first element wasn't the low part, try the last. */
1989 if (!done
1990 && start < finish - 1)
1991 {
1992 inner = GET_MODE (tmps[finish - 1]);
1993 bytepos = subreg_lowpart_offset (inner, outer);
1994 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1995 {
1996 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1997 inner, 0);
1998 if (temp)
1999 {
2000 emit_move_insn (dst, temp);
2001 done = true;
2002 finish--;
2003 }
2004 }
2005 }
2006
2007 /* Otherwise, simply initialize the result to zero. */
2008 if (!done)
2009 emit_move_insn (dst, CONST0_RTX (outer));
2010 }
2011
2012 /* Process the pieces. */
2013 for (i = start; i < finish; i++)
2014 {
2015 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2016 enum machine_mode mode = GET_MODE (tmps[i]);
2017 unsigned int bytelen = GET_MODE_SIZE (mode);
2018 unsigned int adj_bytelen;
2019 rtx dest = dst;
2020
2021 /* Handle trailing fragments that run over the size of the struct. */
2022 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2023 adj_bytelen = ssize - bytepos;
2024 else
2025 adj_bytelen = bytelen;
2026
2027 if (GET_CODE (dst) == CONCAT)
2028 {
2029 if (bytepos + adj_bytelen
2030 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2031 dest = XEXP (dst, 0);
2032 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2033 {
2034 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2035 dest = XEXP (dst, 1);
2036 }
2037 else
2038 {
2039 enum machine_mode dest_mode = GET_MODE (dest);
2040 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2041
2042 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2043
2044 if (GET_MODE_ALIGNMENT (dest_mode)
2045 >= GET_MODE_ALIGNMENT (tmp_mode))
2046 {
2047 dest = assign_stack_temp (dest_mode,
2048 GET_MODE_SIZE (dest_mode));
2049 emit_move_insn (adjust_address (dest,
2050 tmp_mode,
2051 bytepos),
2052 tmps[i]);
2053 dst = dest;
2054 }
2055 else
2056 {
2057 dest = assign_stack_temp (tmp_mode,
2058 GET_MODE_SIZE (tmp_mode));
2059 emit_move_insn (dest, tmps[i]);
2060 dst = adjust_address (dest, dest_mode, bytepos);
2061 }
2062 break;
2063 }
2064 }
2065
2066 /* Handle trailing fragments that run over the size of the struct. */
2067 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2068 {
2069 /* store_bit_field always takes its value from the lsb.
2070 Move the fragment to the lsb if it's not already there. */
2071 if (
2072 #ifdef BLOCK_REG_PADDING
2073 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2074 == (BYTES_BIG_ENDIAN ? upward : downward)
2075 #else
2076 BYTES_BIG_ENDIAN
2077 #endif
2078 )
2079 {
2080 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2081 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2082 shift, tmps[i], 0);
2083 }
2084
2085 /* Make sure not to write past the end of the struct. */
2086 store_bit_field (dest,
2087 adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2088 bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2089 VOIDmode, tmps[i]);
2090 }
2091
2092 /* Optimize the access just a bit. */
2093 else if (MEM_P (dest)
2094 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2095 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2096 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2097 && bytelen == GET_MODE_SIZE (mode))
2098 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2099
2100 else
2101 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2102 0, 0, mode, tmps[i]);
2103 }
2104
2105 /* Copy from the pseudo into the (probable) hard reg. */
2106 if (orig_dst != dst)
2107 emit_move_insn (orig_dst, dst);
2108 }
2109
2110 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2111 of the value stored in X. */
2112
2113 rtx
2114 maybe_emit_group_store (rtx x, tree type)
2115 {
2116 enum machine_mode mode = TYPE_MODE (type);
2117 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2118 if (GET_CODE (x) == PARALLEL)
2119 {
2120 rtx result = gen_reg_rtx (mode);
2121 emit_group_store (result, x, type, int_size_in_bytes (type));
2122 return result;
2123 }
2124 return x;
2125 }
2126
2127 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2128
2129 This is used on targets that return BLKmode values in registers. */
2130
2131 void
2132 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2133 {
2134 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2135 rtx src = NULL, dst = NULL;
2136 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2137 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2138 enum machine_mode mode = GET_MODE (srcreg);
2139 enum machine_mode tmode = GET_MODE (target);
2140 enum machine_mode copy_mode;
2141
2142 /* BLKmode registers created in the back-end shouldn't have survived. */
2143 gcc_assert (mode != BLKmode);
2144
2145 /* If the structure doesn't take up a whole number of words, see whether
2146 SRCREG is padded on the left or on the right. If it's on the left,
2147 set PADDING_CORRECTION to the number of bits to skip.
2148
2149 In most ABIs, the structure will be returned at the least end of
2150 the register, which translates to right padding on little-endian
2151 targets and left padding on big-endian targets. The opposite
2152 holds if the structure is returned at the most significant
2153 end of the register. */
2154 if (bytes % UNITS_PER_WORD != 0
2155 && (targetm.calls.return_in_msb (type)
2156 ? !BYTES_BIG_ENDIAN
2157 : BYTES_BIG_ENDIAN))
2158 padding_correction
2159 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2160
2161 /* We can use a single move if we have an exact mode for the size. */
2162 else if (MEM_P (target)
2163 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2164 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2165 && bytes == GET_MODE_SIZE (mode))
2166 {
2167 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2168 return;
2169 }
2170
2171 /* And if we additionally have the same mode for a register. */
2172 else if (REG_P (target)
2173 && GET_MODE (target) == mode
2174 && bytes == GET_MODE_SIZE (mode))
2175 {
2176 emit_move_insn (target, srcreg);
2177 return;
2178 }
2179
2180 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2181 into a new pseudo which is a full word. */
2182 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2183 {
2184 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2185 mode = word_mode;
2186 }
2187
2188 /* Copy the structure BITSIZE bits at a time. If the target lives in
2189 memory, take care of not reading/writing past its end by selecting
2190 a copy mode suited to BITSIZE. This should always be possible given
2191 how it is computed.
2192
2193 If the target lives in register, make sure not to select a copy mode
2194 larger than the mode of the register.
2195
2196 We could probably emit more efficient code for machines which do not use
2197 strict alignment, but it doesn't seem worth the effort at the current
2198 time. */
2199
2200 copy_mode = word_mode;
2201 if (MEM_P (target))
2202 {
2203 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2204 if (mem_mode != BLKmode)
2205 copy_mode = mem_mode;
2206 }
2207 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2208 copy_mode = tmode;
2209
2210 for (bitpos = 0, xbitpos = padding_correction;
2211 bitpos < bytes * BITS_PER_UNIT;
2212 bitpos += bitsize, xbitpos += bitsize)
2213 {
2214 /* We need a new source operand each time xbitpos is on a
2215 word boundary and when xbitpos == padding_correction
2216 (the first time through). */
2217 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2218 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2219
2220 /* We need a new destination operand each time bitpos is on
2221 a word boundary. */
2222 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2223 dst = target;
2224 else if (bitpos % BITS_PER_WORD == 0)
2225 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2226
2227 /* Use xbitpos for the source extraction (right justified) and
2228 bitpos for the destination store (left justified). */
2229 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2230 extract_bit_field (src, bitsize,
2231 xbitpos % BITS_PER_WORD, 1,
2232 NULL_RTX, copy_mode, copy_mode));
2233 }
2234 }
2235
2236 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2237 register if it contains any data, otherwise return null.
2238
2239 This is used on targets that return BLKmode values in registers. */
2240
2241 rtx
2242 copy_blkmode_to_reg (enum machine_mode mode, tree src)
2243 {
2244 int i, n_regs;
2245 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2246 unsigned int bitsize;
2247 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2248 enum machine_mode dst_mode;
2249
2250 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2251
2252 x = expand_normal (src);
2253
2254 bytes = int_size_in_bytes (TREE_TYPE (src));
2255 if (bytes == 0)
2256 return NULL_RTX;
2257
2258 /* If the structure doesn't take up a whole number of words, see
2259 whether the register value should be padded on the left or on
2260 the right. Set PADDING_CORRECTION to the number of padding
2261 bits needed on the left side.
2262
2263 In most ABIs, the structure will be returned at the least end of
2264 the register, which translates to right padding on little-endian
2265 targets and left padding on big-endian targets. The opposite
2266 holds if the structure is returned at the most significant
2267 end of the register. */
2268 if (bytes % UNITS_PER_WORD != 0
2269 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2270 ? !BYTES_BIG_ENDIAN
2271 : BYTES_BIG_ENDIAN))
2272 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2273 * BITS_PER_UNIT));
2274
2275 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2276 dst_words = XALLOCAVEC (rtx, n_regs);
2277 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2278
2279 /* Copy the structure BITSIZE bits at a time. */
2280 for (bitpos = 0, xbitpos = padding_correction;
2281 bitpos < bytes * BITS_PER_UNIT;
2282 bitpos += bitsize, xbitpos += bitsize)
2283 {
2284 /* We need a new destination pseudo each time xbitpos is
2285 on a word boundary and when xbitpos == padding_correction
2286 (the first time through). */
2287 if (xbitpos % BITS_PER_WORD == 0
2288 || xbitpos == padding_correction)
2289 {
2290 /* Generate an appropriate register. */
2291 dst_word = gen_reg_rtx (word_mode);
2292 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2293
2294 /* Clear the destination before we move anything into it. */
2295 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2296 }
2297
2298 /* We need a new source operand each time bitpos is on a word
2299 boundary. */
2300 if (bitpos % BITS_PER_WORD == 0)
2301 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2302
2303 /* Use bitpos for the source extraction (left justified) and
2304 xbitpos for the destination store (right justified). */
2305 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2306 0, 0, word_mode,
2307 extract_bit_field (src_word, bitsize,
2308 bitpos % BITS_PER_WORD, 1,
2309 NULL_RTX, word_mode, word_mode));
2310 }
2311
2312 if (mode == BLKmode)
2313 {
2314 /* Find the smallest integer mode large enough to hold the
2315 entire structure. */
2316 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2317 mode != VOIDmode;
2318 mode = GET_MODE_WIDER_MODE (mode))
2319 /* Have we found a large enough mode? */
2320 if (GET_MODE_SIZE (mode) >= bytes)
2321 break;
2322
2323 /* A suitable mode should have been found. */
2324 gcc_assert (mode != VOIDmode);
2325 }
2326
2327 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2328 dst_mode = word_mode;
2329 else
2330 dst_mode = mode;
2331 dst = gen_reg_rtx (dst_mode);
2332
2333 for (i = 0; i < n_regs; i++)
2334 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2335
2336 if (mode != dst_mode)
2337 dst = gen_lowpart (mode, dst);
2338
2339 return dst;
2340 }
2341
2342 /* Add a USE expression for REG to the (possibly empty) list pointed
2343 to by CALL_FUSAGE. REG must denote a hard register. */
2344
2345 void
2346 use_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode)
2347 {
2348 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2349
2350 *call_fusage
2351 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2352 }
2353
2354 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2355 to by CALL_FUSAGE. REG must denote a hard register. */
2356
2357 void
2358 clobber_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode)
2359 {
2360 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2361
2362 *call_fusage
2363 = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2364 }
2365
2366 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2367 starting at REGNO. All of these registers must be hard registers. */
2368
2369 void
2370 use_regs (rtx *call_fusage, int regno, int nregs)
2371 {
2372 int i;
2373
2374 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2375
2376 for (i = 0; i < nregs; i++)
2377 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2378 }
2379
2380 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2381 PARALLEL REGS. This is for calls that pass values in multiple
2382 non-contiguous locations. The Irix 6 ABI has examples of this. */
2383
2384 void
2385 use_group_regs (rtx *call_fusage, rtx regs)
2386 {
2387 int i;
2388
2389 for (i = 0; i < XVECLEN (regs, 0); i++)
2390 {
2391 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2392
2393 /* A NULL entry means the parameter goes both on the stack and in
2394 registers. This can also be a MEM for targets that pass values
2395 partially on the stack and partially in registers. */
2396 if (reg != 0 && REG_P (reg))
2397 use_reg (call_fusage, reg);
2398 }
2399 }
2400
2401 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2402 assigment and the code of the expresion on the RHS is CODE. Return
2403 NULL otherwise. */
2404
2405 static gimple
2406 get_def_for_expr (tree name, enum tree_code code)
2407 {
2408 gimple def_stmt;
2409
2410 if (TREE_CODE (name) != SSA_NAME)
2411 return NULL;
2412
2413 def_stmt = get_gimple_for_ssa_name (name);
2414 if (!def_stmt
2415 || gimple_assign_rhs_code (def_stmt) != code)
2416 return NULL;
2417
2418 return def_stmt;
2419 }
2420
2421 #ifdef HAVE_conditional_move
2422 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2423 assigment and the class of the expresion on the RHS is CLASS. Return
2424 NULL otherwise. */
2425
2426 static gimple
2427 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2428 {
2429 gimple def_stmt;
2430
2431 if (TREE_CODE (name) != SSA_NAME)
2432 return NULL;
2433
2434 def_stmt = get_gimple_for_ssa_name (name);
2435 if (!def_stmt
2436 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2437 return NULL;
2438
2439 return def_stmt;
2440 }
2441 #endif
2442 \f
2443
2444 /* Determine whether the LEN bytes generated by CONSTFUN can be
2445 stored to memory using several move instructions. CONSTFUNDATA is
2446 a pointer which will be passed as argument in every CONSTFUN call.
2447 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2448 a memset operation and false if it's a copy of a constant string.
2449 Return nonzero if a call to store_by_pieces should succeed. */
2450
2451 int
2452 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2453 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2454 void *constfundata, unsigned int align, bool memsetp)
2455 {
2456 unsigned HOST_WIDE_INT l;
2457 unsigned int max_size;
2458 HOST_WIDE_INT offset = 0;
2459 enum machine_mode mode;
2460 enum insn_code icode;
2461 int reverse;
2462 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2463 rtx cst ATTRIBUTE_UNUSED;
2464
2465 if (len == 0)
2466 return 1;
2467
2468 if (! (memsetp
2469 ? SET_BY_PIECES_P (len, align)
2470 : STORE_BY_PIECES_P (len, align)))
2471 return 0;
2472
2473 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2474
2475 /* We would first store what we can in the largest integer mode, then go to
2476 successively smaller modes. */
2477
2478 for (reverse = 0;
2479 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2480 reverse++)
2481 {
2482 l = len;
2483 max_size = STORE_MAX_PIECES + 1;
2484 while (max_size > 1 && l > 0)
2485 {
2486 mode = widest_int_mode_for_size (max_size);
2487
2488 if (mode == VOIDmode)
2489 break;
2490
2491 icode = optab_handler (mov_optab, mode);
2492 if (icode != CODE_FOR_nothing
2493 && align >= GET_MODE_ALIGNMENT (mode))
2494 {
2495 unsigned int size = GET_MODE_SIZE (mode);
2496
2497 while (l >= size)
2498 {
2499 if (reverse)
2500 offset -= size;
2501
2502 cst = (*constfun) (constfundata, offset, mode);
2503 if (!targetm.legitimate_constant_p (mode, cst))
2504 return 0;
2505
2506 if (!reverse)
2507 offset += size;
2508
2509 l -= size;
2510 }
2511 }
2512
2513 max_size = GET_MODE_SIZE (mode);
2514 }
2515
2516 /* The code above should have handled everything. */
2517 gcc_assert (!l);
2518 }
2519
2520 return 1;
2521 }
2522
2523 /* Generate several move instructions to store LEN bytes generated by
2524 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2525 pointer which will be passed as argument in every CONSTFUN call.
2526 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2527 a memset operation and false if it's a copy of a constant string.
2528 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2529 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2530 stpcpy. */
2531
2532 rtx
2533 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2534 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2535 void *constfundata, unsigned int align, bool memsetp, int endp)
2536 {
2537 enum machine_mode to_addr_mode = get_address_mode (to);
2538 struct store_by_pieces_d data;
2539
2540 if (len == 0)
2541 {
2542 gcc_assert (endp != 2);
2543 return to;
2544 }
2545
2546 gcc_assert (memsetp
2547 ? SET_BY_PIECES_P (len, align)
2548 : STORE_BY_PIECES_P (len, align));
2549 data.constfun = constfun;
2550 data.constfundata = constfundata;
2551 data.len = len;
2552 data.to = to;
2553 store_by_pieces_1 (&data, align);
2554 if (endp)
2555 {
2556 rtx to1;
2557
2558 gcc_assert (!data.reverse);
2559 if (data.autinc_to)
2560 {
2561 if (endp == 2)
2562 {
2563 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2564 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2565 else
2566 data.to_addr = copy_to_mode_reg (to_addr_mode,
2567 plus_constant (to_addr_mode,
2568 data.to_addr,
2569 -1));
2570 }
2571 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2572 data.offset);
2573 }
2574 else
2575 {
2576 if (endp == 2)
2577 --data.offset;
2578 to1 = adjust_address (data.to, QImode, data.offset);
2579 }
2580 return to1;
2581 }
2582 else
2583 return data.to;
2584 }
2585
2586 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2587 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2588
2589 static void
2590 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2591 {
2592 struct store_by_pieces_d data;
2593
2594 if (len == 0)
2595 return;
2596
2597 data.constfun = clear_by_pieces_1;
2598 data.constfundata = NULL;
2599 data.len = len;
2600 data.to = to;
2601 store_by_pieces_1 (&data, align);
2602 }
2603
2604 /* Callback routine for clear_by_pieces.
2605 Return const0_rtx unconditionally. */
2606
2607 static rtx
2608 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2609 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2610 enum machine_mode mode ATTRIBUTE_UNUSED)
2611 {
2612 return const0_rtx;
2613 }
2614
2615 /* Subroutine of clear_by_pieces and store_by_pieces.
2616 Generate several move instructions to store LEN bytes of block TO. (A MEM
2617 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2618
2619 static void
2620 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2621 unsigned int align ATTRIBUTE_UNUSED)
2622 {
2623 enum machine_mode to_addr_mode = get_address_mode (data->to);
2624 rtx to_addr = XEXP (data->to, 0);
2625 unsigned int max_size = STORE_MAX_PIECES + 1;
2626 enum insn_code icode;
2627
2628 data->offset = 0;
2629 data->to_addr = to_addr;
2630 data->autinc_to
2631 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2632 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2633
2634 data->explicit_inc_to = 0;
2635 data->reverse
2636 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2637 if (data->reverse)
2638 data->offset = data->len;
2639
2640 /* If storing requires more than two move insns,
2641 copy addresses to registers (to make displacements shorter)
2642 and use post-increment if available. */
2643 if (!data->autinc_to
2644 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2645 {
2646 /* Determine the main mode we'll be using.
2647 MODE might not be used depending on the definitions of the
2648 USE_* macros below. */
2649 enum machine_mode mode ATTRIBUTE_UNUSED
2650 = widest_int_mode_for_size (max_size);
2651
2652 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2653 {
2654 data->to_addr = copy_to_mode_reg (to_addr_mode,
2655 plus_constant (to_addr_mode,
2656 to_addr,
2657 data->len));
2658 data->autinc_to = 1;
2659 data->explicit_inc_to = -1;
2660 }
2661
2662 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2663 && ! data->autinc_to)
2664 {
2665 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2666 data->autinc_to = 1;
2667 data->explicit_inc_to = 1;
2668 }
2669
2670 if ( !data->autinc_to && CONSTANT_P (to_addr))
2671 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2672 }
2673
2674 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2675
2676 /* First store what we can in the largest integer mode, then go to
2677 successively smaller modes. */
2678
2679 while (max_size > 1 && data->len > 0)
2680 {
2681 enum machine_mode mode = widest_int_mode_for_size (max_size);
2682
2683 if (mode == VOIDmode)
2684 break;
2685
2686 icode = optab_handler (mov_optab, mode);
2687 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2688 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2689
2690 max_size = GET_MODE_SIZE (mode);
2691 }
2692
2693 /* The code above should have handled everything. */
2694 gcc_assert (!data->len);
2695 }
2696
2697 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2698 with move instructions for mode MODE. GENFUN is the gen_... function
2699 to make a move insn for that mode. DATA has all the other info. */
2700
2701 static void
2702 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2703 struct store_by_pieces_d *data)
2704 {
2705 unsigned int size = GET_MODE_SIZE (mode);
2706 rtx to1, cst;
2707
2708 while (data->len >= size)
2709 {
2710 if (data->reverse)
2711 data->offset -= size;
2712
2713 if (data->autinc_to)
2714 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2715 data->offset);
2716 else
2717 to1 = adjust_address (data->to, mode, data->offset);
2718
2719 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2720 emit_insn (gen_add2_insn (data->to_addr,
2721 gen_int_mode (-(HOST_WIDE_INT) size,
2722 GET_MODE (data->to_addr))));
2723
2724 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2725 emit_insn ((*genfun) (to1, cst));
2726
2727 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2728 emit_insn (gen_add2_insn (data->to_addr,
2729 gen_int_mode (size,
2730 GET_MODE (data->to_addr))));
2731
2732 if (! data->reverse)
2733 data->offset += size;
2734
2735 data->len -= size;
2736 }
2737 }
2738 \f
2739 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2740 its length in bytes. */
2741
2742 rtx
2743 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2744 unsigned int expected_align, HOST_WIDE_INT expected_size,
2745 unsigned HOST_WIDE_INT min_size,
2746 unsigned HOST_WIDE_INT max_size,
2747 unsigned HOST_WIDE_INT probable_max_size)
2748 {
2749 enum machine_mode mode = GET_MODE (object);
2750 unsigned int align;
2751
2752 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2753
2754 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2755 just move a zero. Otherwise, do this a piece at a time. */
2756 if (mode != BLKmode
2757 && CONST_INT_P (size)
2758 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2759 {
2760 rtx zero = CONST0_RTX (mode);
2761 if (zero != NULL)
2762 {
2763 emit_move_insn (object, zero);
2764 return NULL;
2765 }
2766
2767 if (COMPLEX_MODE_P (mode))
2768 {
2769 zero = CONST0_RTX (GET_MODE_INNER (mode));
2770 if (zero != NULL)
2771 {
2772 write_complex_part (object, zero, 0);
2773 write_complex_part (object, zero, 1);
2774 return NULL;
2775 }
2776 }
2777 }
2778
2779 if (size == const0_rtx)
2780 return NULL;
2781
2782 align = MEM_ALIGN (object);
2783
2784 if (CONST_INT_P (size)
2785 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2786 clear_by_pieces (object, INTVAL (size), align);
2787 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2788 expected_align, expected_size,
2789 min_size, max_size, probable_max_size))
2790 ;
2791 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2792 return set_storage_via_libcall (object, size, const0_rtx,
2793 method == BLOCK_OP_TAILCALL);
2794 else
2795 gcc_unreachable ();
2796
2797 return NULL;
2798 }
2799
2800 rtx
2801 clear_storage (rtx object, rtx size, enum block_op_methods method)
2802 {
2803 unsigned HOST_WIDE_INT max, min = 0;
2804 if (GET_CODE (size) == CONST_INT)
2805 min = max = UINTVAL (size);
2806 else
2807 max = GET_MODE_MASK (GET_MODE (size));
2808 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
2809 }
2810
2811
2812 /* A subroutine of clear_storage. Expand a call to memset.
2813 Return the return value of memset, 0 otherwise. */
2814
2815 rtx
2816 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2817 {
2818 tree call_expr, fn, object_tree, size_tree, val_tree;
2819 enum machine_mode size_mode;
2820 rtx retval;
2821
2822 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2823 place those into new pseudos into a VAR_DECL and use them later. */
2824
2825 object = copy_addr_to_reg (XEXP (object, 0));
2826
2827 size_mode = TYPE_MODE (sizetype);
2828 size = convert_to_mode (size_mode, size, 1);
2829 size = copy_to_mode_reg (size_mode, size);
2830
2831 /* It is incorrect to use the libcall calling conventions to call
2832 memset in this context. This could be a user call to memset and
2833 the user may wish to examine the return value from memset. For
2834 targets where libcalls and normal calls have different conventions
2835 for returning pointers, we could end up generating incorrect code. */
2836
2837 object_tree = make_tree (ptr_type_node, object);
2838 if (!CONST_INT_P (val))
2839 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2840 size_tree = make_tree (sizetype, size);
2841 val_tree = make_tree (integer_type_node, val);
2842
2843 fn = clear_storage_libcall_fn (true);
2844 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2845 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2846
2847 retval = expand_normal (call_expr);
2848
2849 return retval;
2850 }
2851
2852 /* A subroutine of set_storage_via_libcall. Create the tree node
2853 for the function we use for block clears. */
2854
2855 tree block_clear_fn;
2856
2857 void
2858 init_block_clear_fn (const char *asmspec)
2859 {
2860 if (!block_clear_fn)
2861 {
2862 tree fn, args;
2863
2864 fn = get_identifier ("memset");
2865 args = build_function_type_list (ptr_type_node, ptr_type_node,
2866 integer_type_node, sizetype,
2867 NULL_TREE);
2868
2869 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2870 DECL_EXTERNAL (fn) = 1;
2871 TREE_PUBLIC (fn) = 1;
2872 DECL_ARTIFICIAL (fn) = 1;
2873 TREE_NOTHROW (fn) = 1;
2874 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2875 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2876
2877 block_clear_fn = fn;
2878 }
2879
2880 if (asmspec)
2881 set_user_assembler_name (block_clear_fn, asmspec);
2882 }
2883
2884 static tree
2885 clear_storage_libcall_fn (int for_call)
2886 {
2887 static bool emitted_extern;
2888
2889 if (!block_clear_fn)
2890 init_block_clear_fn (NULL);
2891
2892 if (for_call && !emitted_extern)
2893 {
2894 emitted_extern = true;
2895 make_decl_rtl (block_clear_fn);
2896 }
2897
2898 return block_clear_fn;
2899 }
2900 \f
2901 /* Expand a setmem pattern; return true if successful. */
2902
2903 bool
2904 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2905 unsigned int expected_align, HOST_WIDE_INT expected_size,
2906 unsigned HOST_WIDE_INT min_size,
2907 unsigned HOST_WIDE_INT max_size,
2908 unsigned HOST_WIDE_INT probable_max_size)
2909 {
2910 /* Try the most limited insn first, because there's no point
2911 including more than one in the machine description unless
2912 the more limited one has some advantage. */
2913
2914 enum machine_mode mode;
2915
2916 if (expected_align < align)
2917 expected_align = align;
2918 if (expected_size != -1)
2919 {
2920 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
2921 expected_size = max_size;
2922 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
2923 expected_size = min_size;
2924 }
2925
2926 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2927 mode = GET_MODE_WIDER_MODE (mode))
2928 {
2929 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2930
2931 if (code != CODE_FOR_nothing
2932 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2933 here because if SIZE is less than the mode mask, as it is
2934 returned by the macro, it will definitely be less than the
2935 actual mode mask. Since SIZE is within the Pmode address
2936 space, we limit MODE to Pmode. */
2937 && ((CONST_INT_P (size)
2938 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2939 <= (GET_MODE_MASK (mode) >> 1)))
2940 || max_size <= (GET_MODE_MASK (mode) >> 1)
2941 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2942 {
2943 struct expand_operand ops[9];
2944 unsigned int nops;
2945
2946 nops = insn_data[(int) code].n_generator_args;
2947 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
2948
2949 create_fixed_operand (&ops[0], object);
2950 /* The check above guarantees that this size conversion is valid. */
2951 create_convert_operand_to (&ops[1], size, mode, true);
2952 create_convert_operand_from (&ops[2], val, byte_mode, true);
2953 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2954 if (nops >= 6)
2955 {
2956 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2957 create_integer_operand (&ops[5], expected_size);
2958 }
2959 if (nops >= 8)
2960 {
2961 create_integer_operand (&ops[6], min_size);
2962 /* If we can not represent the maximal size,
2963 make parameter NULL. */
2964 if ((HOST_WIDE_INT) max_size != -1)
2965 create_integer_operand (&ops[7], max_size);
2966 else
2967 create_fixed_operand (&ops[7], NULL);
2968 }
2969 if (nops == 9)
2970 {
2971 /* If we can not represent the maximal size,
2972 make parameter NULL. */
2973 if ((HOST_WIDE_INT) probable_max_size != -1)
2974 create_integer_operand (&ops[8], probable_max_size);
2975 else
2976 create_fixed_operand (&ops[8], NULL);
2977 }
2978 if (maybe_expand_insn (code, nops, ops))
2979 return true;
2980 }
2981 }
2982
2983 return false;
2984 }
2985
2986 \f
2987 /* Write to one of the components of the complex value CPLX. Write VAL to
2988 the real part if IMAG_P is false, and the imaginary part if its true. */
2989
2990 static void
2991 write_complex_part (rtx cplx, rtx val, bool imag_p)
2992 {
2993 enum machine_mode cmode;
2994 enum machine_mode imode;
2995 unsigned ibitsize;
2996
2997 if (GET_CODE (cplx) == CONCAT)
2998 {
2999 emit_move_insn (XEXP (cplx, imag_p), val);
3000 return;
3001 }
3002
3003 cmode = GET_MODE (cplx);
3004 imode = GET_MODE_INNER (cmode);
3005 ibitsize = GET_MODE_BITSIZE (imode);
3006
3007 /* For MEMs simplify_gen_subreg may generate an invalid new address
3008 because, e.g., the original address is considered mode-dependent
3009 by the target, which restricts simplify_subreg from invoking
3010 adjust_address_nv. Instead of preparing fallback support for an
3011 invalid address, we call adjust_address_nv directly. */
3012 if (MEM_P (cplx))
3013 {
3014 emit_move_insn (adjust_address_nv (cplx, imode,
3015 imag_p ? GET_MODE_SIZE (imode) : 0),
3016 val);
3017 return;
3018 }
3019
3020 /* If the sub-object is at least word sized, then we know that subregging
3021 will work. This special case is important, since store_bit_field
3022 wants to operate on integer modes, and there's rarely an OImode to
3023 correspond to TCmode. */
3024 if (ibitsize >= BITS_PER_WORD
3025 /* For hard regs we have exact predicates. Assume we can split
3026 the original object if it spans an even number of hard regs.
3027 This special case is important for SCmode on 64-bit platforms
3028 where the natural size of floating-point regs is 32-bit. */
3029 || (REG_P (cplx)
3030 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3031 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3032 {
3033 rtx part = simplify_gen_subreg (imode, cplx, cmode,
3034 imag_p ? GET_MODE_SIZE (imode) : 0);
3035 if (part)
3036 {
3037 emit_move_insn (part, val);
3038 return;
3039 }
3040 else
3041 /* simplify_gen_subreg may fail for sub-word MEMs. */
3042 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3043 }
3044
3045 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
3046 }
3047
3048 /* Extract one of the components of the complex value CPLX. Extract the
3049 real part if IMAG_P is false, and the imaginary part if it's true. */
3050
3051 static rtx
3052 read_complex_part (rtx cplx, bool imag_p)
3053 {
3054 enum machine_mode cmode, imode;
3055 unsigned ibitsize;
3056
3057 if (GET_CODE (cplx) == CONCAT)
3058 return XEXP (cplx, imag_p);
3059
3060 cmode = GET_MODE (cplx);
3061 imode = GET_MODE_INNER (cmode);
3062 ibitsize = GET_MODE_BITSIZE (imode);
3063
3064 /* Special case reads from complex constants that got spilled to memory. */
3065 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3066 {
3067 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3068 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3069 {
3070 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3071 if (CONSTANT_CLASS_P (part))
3072 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3073 }
3074 }
3075
3076 /* For MEMs simplify_gen_subreg may generate an invalid new address
3077 because, e.g., the original address is considered mode-dependent
3078 by the target, which restricts simplify_subreg from invoking
3079 adjust_address_nv. Instead of preparing fallback support for an
3080 invalid address, we call adjust_address_nv directly. */
3081 if (MEM_P (cplx))
3082 return adjust_address_nv (cplx, imode,
3083 imag_p ? GET_MODE_SIZE (imode) : 0);
3084
3085 /* If the sub-object is at least word sized, then we know that subregging
3086 will work. This special case is important, since extract_bit_field
3087 wants to operate on integer modes, and there's rarely an OImode to
3088 correspond to TCmode. */
3089 if (ibitsize >= BITS_PER_WORD
3090 /* For hard regs we have exact predicates. Assume we can split
3091 the original object if it spans an even number of hard regs.
3092 This special case is important for SCmode on 64-bit platforms
3093 where the natural size of floating-point regs is 32-bit. */
3094 || (REG_P (cplx)
3095 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3096 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3097 {
3098 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3099 imag_p ? GET_MODE_SIZE (imode) : 0);
3100 if (ret)
3101 return ret;
3102 else
3103 /* simplify_gen_subreg may fail for sub-word MEMs. */
3104 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3105 }
3106
3107 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3108 true, NULL_RTX, imode, imode);
3109 }
3110 \f
3111 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3112 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3113 represented in NEW_MODE. If FORCE is true, this will never happen, as
3114 we'll force-create a SUBREG if needed. */
3115
3116 static rtx
3117 emit_move_change_mode (enum machine_mode new_mode,
3118 enum machine_mode old_mode, rtx x, bool force)
3119 {
3120 rtx ret;
3121
3122 if (push_operand (x, GET_MODE (x)))
3123 {
3124 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3125 MEM_COPY_ATTRIBUTES (ret, x);
3126 }
3127 else if (MEM_P (x))
3128 {
3129 /* We don't have to worry about changing the address since the
3130 size in bytes is supposed to be the same. */
3131 if (reload_in_progress)
3132 {
3133 /* Copy the MEM to change the mode and move any
3134 substitutions from the old MEM to the new one. */
3135 ret = adjust_address_nv (x, new_mode, 0);
3136 copy_replacements (x, ret);
3137 }
3138 else
3139 ret = adjust_address (x, new_mode, 0);
3140 }
3141 else
3142 {
3143 /* Note that we do want simplify_subreg's behavior of validating
3144 that the new mode is ok for a hard register. If we were to use
3145 simplify_gen_subreg, we would create the subreg, but would
3146 probably run into the target not being able to implement it. */
3147 /* Except, of course, when FORCE is true, when this is exactly what
3148 we want. Which is needed for CCmodes on some targets. */
3149 if (force)
3150 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3151 else
3152 ret = simplify_subreg (new_mode, x, old_mode, 0);
3153 }
3154
3155 return ret;
3156 }
3157
3158 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3159 an integer mode of the same size as MODE. Returns the instruction
3160 emitted, or NULL if such a move could not be generated. */
3161
3162 static rtx
3163 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
3164 {
3165 enum machine_mode imode;
3166 enum insn_code code;
3167
3168 /* There must exist a mode of the exact size we require. */
3169 imode = int_mode_for_mode (mode);
3170 if (imode == BLKmode)
3171 return NULL_RTX;
3172
3173 /* The target must support moves in this mode. */
3174 code = optab_handler (mov_optab, imode);
3175 if (code == CODE_FOR_nothing)
3176 return NULL_RTX;
3177
3178 x = emit_move_change_mode (imode, mode, x, force);
3179 if (x == NULL_RTX)
3180 return NULL_RTX;
3181 y = emit_move_change_mode (imode, mode, y, force);
3182 if (y == NULL_RTX)
3183 return NULL_RTX;
3184 return emit_insn (GEN_FCN (code) (x, y));
3185 }
3186
3187 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3188 Return an equivalent MEM that does not use an auto-increment. */
3189
3190 rtx
3191 emit_move_resolve_push (enum machine_mode mode, rtx x)
3192 {
3193 enum rtx_code code = GET_CODE (XEXP (x, 0));
3194 HOST_WIDE_INT adjust;
3195 rtx temp;
3196
3197 adjust = GET_MODE_SIZE (mode);
3198 #ifdef PUSH_ROUNDING
3199 adjust = PUSH_ROUNDING (adjust);
3200 #endif
3201 if (code == PRE_DEC || code == POST_DEC)
3202 adjust = -adjust;
3203 else if (code == PRE_MODIFY || code == POST_MODIFY)
3204 {
3205 rtx expr = XEXP (XEXP (x, 0), 1);
3206 HOST_WIDE_INT val;
3207
3208 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3209 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3210 val = INTVAL (XEXP (expr, 1));
3211 if (GET_CODE (expr) == MINUS)
3212 val = -val;
3213 gcc_assert (adjust == val || adjust == -val);
3214 adjust = val;
3215 }
3216
3217 /* Do not use anti_adjust_stack, since we don't want to update
3218 stack_pointer_delta. */
3219 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3220 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3221 0, OPTAB_LIB_WIDEN);
3222 if (temp != stack_pointer_rtx)
3223 emit_move_insn (stack_pointer_rtx, temp);
3224
3225 switch (code)
3226 {
3227 case PRE_INC:
3228 case PRE_DEC:
3229 case PRE_MODIFY:
3230 temp = stack_pointer_rtx;
3231 break;
3232 case POST_INC:
3233 case POST_DEC:
3234 case POST_MODIFY:
3235 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3236 break;
3237 default:
3238 gcc_unreachable ();
3239 }
3240
3241 return replace_equiv_address (x, temp);
3242 }
3243
3244 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3245 X is known to satisfy push_operand, and MODE is known to be complex.
3246 Returns the last instruction emitted. */
3247
3248 rtx
3249 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3250 {
3251 enum machine_mode submode = GET_MODE_INNER (mode);
3252 bool imag_first;
3253
3254 #ifdef PUSH_ROUNDING
3255 unsigned int submodesize = GET_MODE_SIZE (submode);
3256
3257 /* In case we output to the stack, but the size is smaller than the
3258 machine can push exactly, we need to use move instructions. */
3259 if (PUSH_ROUNDING (submodesize) != submodesize)
3260 {
3261 x = emit_move_resolve_push (mode, x);
3262 return emit_move_insn (x, y);
3263 }
3264 #endif
3265
3266 /* Note that the real part always precedes the imag part in memory
3267 regardless of machine's endianness. */
3268 switch (GET_CODE (XEXP (x, 0)))
3269 {
3270 case PRE_DEC:
3271 case POST_DEC:
3272 imag_first = true;
3273 break;
3274 case PRE_INC:
3275 case POST_INC:
3276 imag_first = false;
3277 break;
3278 default:
3279 gcc_unreachable ();
3280 }
3281
3282 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3283 read_complex_part (y, imag_first));
3284 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3285 read_complex_part (y, !imag_first));
3286 }
3287
3288 /* A subroutine of emit_move_complex. Perform the move from Y to X
3289 via two moves of the parts. Returns the last instruction emitted. */
3290
3291 rtx
3292 emit_move_complex_parts (rtx x, rtx y)
3293 {
3294 /* Show the output dies here. This is necessary for SUBREGs
3295 of pseudos since we cannot track their lifetimes correctly;
3296 hard regs shouldn't appear here except as return values. */
3297 if (!reload_completed && !reload_in_progress
3298 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3299 emit_clobber (x);
3300
3301 write_complex_part (x, read_complex_part (y, false), false);
3302 write_complex_part (x, read_complex_part (y, true), true);
3303
3304 return get_last_insn ();
3305 }
3306
3307 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3308 MODE is known to be complex. Returns the last instruction emitted. */
3309
3310 static rtx
3311 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3312 {
3313 bool try_int;
3314
3315 /* Need to take special care for pushes, to maintain proper ordering
3316 of the data, and possibly extra padding. */
3317 if (push_operand (x, mode))
3318 return emit_move_complex_push (mode, x, y);
3319
3320 /* See if we can coerce the target into moving both values at once, except
3321 for floating point where we favor moving as parts if this is easy. */
3322 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3323 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3324 && !(REG_P (x)
3325 && HARD_REGISTER_P (x)
3326 && hard_regno_nregs[REGNO (x)][mode] == 1)
3327 && !(REG_P (y)
3328 && HARD_REGISTER_P (y)
3329 && hard_regno_nregs[REGNO (y)][mode] == 1))
3330 try_int = false;
3331 /* Not possible if the values are inherently not adjacent. */
3332 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3333 try_int = false;
3334 /* Is possible if both are registers (or subregs of registers). */
3335 else if (register_operand (x, mode) && register_operand (y, mode))
3336 try_int = true;
3337 /* If one of the operands is a memory, and alignment constraints
3338 are friendly enough, we may be able to do combined memory operations.
3339 We do not attempt this if Y is a constant because that combination is
3340 usually better with the by-parts thing below. */
3341 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3342 && (!STRICT_ALIGNMENT
3343 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3344 try_int = true;
3345 else
3346 try_int = false;
3347
3348 if (try_int)
3349 {
3350 rtx ret;
3351
3352 /* For memory to memory moves, optimal behavior can be had with the
3353 existing block move logic. */
3354 if (MEM_P (x) && MEM_P (y))
3355 {
3356 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3357 BLOCK_OP_NO_LIBCALL);
3358 return get_last_insn ();
3359 }
3360
3361 ret = emit_move_via_integer (mode, x, y, true);
3362 if (ret)
3363 return ret;
3364 }
3365
3366 return emit_move_complex_parts (x, y);
3367 }
3368
3369 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3370 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3371
3372 static rtx
3373 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3374 {
3375 rtx ret;
3376
3377 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3378 if (mode != CCmode)
3379 {
3380 enum insn_code code = optab_handler (mov_optab, CCmode);
3381 if (code != CODE_FOR_nothing)
3382 {
3383 x = emit_move_change_mode (CCmode, mode, x, true);
3384 y = emit_move_change_mode (CCmode, mode, y, true);
3385 return emit_insn (GEN_FCN (code) (x, y));
3386 }
3387 }
3388
3389 /* Otherwise, find the MODE_INT mode of the same width. */
3390 ret = emit_move_via_integer (mode, x, y, false);
3391 gcc_assert (ret != NULL);
3392 return ret;
3393 }
3394
3395 /* Return true if word I of OP lies entirely in the
3396 undefined bits of a paradoxical subreg. */
3397
3398 static bool
3399 undefined_operand_subword_p (const_rtx op, int i)
3400 {
3401 enum machine_mode innermode, innermostmode;
3402 int offset;
3403 if (GET_CODE (op) != SUBREG)
3404 return false;
3405 innermode = GET_MODE (op);
3406 innermostmode = GET_MODE (SUBREG_REG (op));
3407 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3408 /* The SUBREG_BYTE represents offset, as if the value were stored in
3409 memory, except for a paradoxical subreg where we define
3410 SUBREG_BYTE to be 0; undo this exception as in
3411 simplify_subreg. */
3412 if (SUBREG_BYTE (op) == 0
3413 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3414 {
3415 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3416 if (WORDS_BIG_ENDIAN)
3417 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3418 if (BYTES_BIG_ENDIAN)
3419 offset += difference % UNITS_PER_WORD;
3420 }
3421 if (offset >= GET_MODE_SIZE (innermostmode)
3422 || offset <= -GET_MODE_SIZE (word_mode))
3423 return true;
3424 return false;
3425 }
3426
3427 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3428 MODE is any multi-word or full-word mode that lacks a move_insn
3429 pattern. Note that you will get better code if you define such
3430 patterns, even if they must turn into multiple assembler instructions. */
3431
3432 static rtx
3433 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3434 {
3435 rtx last_insn = 0;
3436 rtx seq, inner;
3437 bool need_clobber;
3438 int i;
3439
3440 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3441
3442 /* If X is a push on the stack, do the push now and replace
3443 X with a reference to the stack pointer. */
3444 if (push_operand (x, mode))
3445 x = emit_move_resolve_push (mode, x);
3446
3447 /* If we are in reload, see if either operand is a MEM whose address
3448 is scheduled for replacement. */
3449 if (reload_in_progress && MEM_P (x)
3450 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3451 x = replace_equiv_address_nv (x, inner);
3452 if (reload_in_progress && MEM_P (y)
3453 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3454 y = replace_equiv_address_nv (y, inner);
3455
3456 start_sequence ();
3457
3458 need_clobber = false;
3459 for (i = 0;
3460 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3461 i++)
3462 {
3463 rtx xpart = operand_subword (x, i, 1, mode);
3464 rtx ypart;
3465
3466 /* Do not generate code for a move if it would come entirely
3467 from the undefined bits of a paradoxical subreg. */
3468 if (undefined_operand_subword_p (y, i))
3469 continue;
3470
3471 ypart = operand_subword (y, i, 1, mode);
3472
3473 /* If we can't get a part of Y, put Y into memory if it is a
3474 constant. Otherwise, force it into a register. Then we must
3475 be able to get a part of Y. */
3476 if (ypart == 0 && CONSTANT_P (y))
3477 {
3478 y = use_anchored_address (force_const_mem (mode, y));
3479 ypart = operand_subword (y, i, 1, mode);
3480 }
3481 else if (ypart == 0)
3482 ypart = operand_subword_force (y, i, mode);
3483
3484 gcc_assert (xpart && ypart);
3485
3486 need_clobber |= (GET_CODE (xpart) == SUBREG);
3487
3488 last_insn = emit_move_insn (xpart, ypart);
3489 }
3490
3491 seq = get_insns ();
3492 end_sequence ();
3493
3494 /* Show the output dies here. This is necessary for SUBREGs
3495 of pseudos since we cannot track their lifetimes correctly;
3496 hard regs shouldn't appear here except as return values.
3497 We never want to emit such a clobber after reload. */
3498 if (x != y
3499 && ! (reload_in_progress || reload_completed)
3500 && need_clobber != 0)
3501 emit_clobber (x);
3502
3503 emit_insn (seq);
3504
3505 return last_insn;
3506 }
3507
3508 /* Low level part of emit_move_insn.
3509 Called just like emit_move_insn, but assumes X and Y
3510 are basically valid. */
3511
3512 rtx
3513 emit_move_insn_1 (rtx x, rtx y)
3514 {
3515 enum machine_mode mode = GET_MODE (x);
3516 enum insn_code code;
3517
3518 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3519
3520 code = optab_handler (mov_optab, mode);
3521 if (code != CODE_FOR_nothing)
3522 return emit_insn (GEN_FCN (code) (x, y));
3523
3524 /* Expand complex moves by moving real part and imag part. */
3525 if (COMPLEX_MODE_P (mode))
3526 return emit_move_complex (mode, x, y);
3527
3528 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3529 || ALL_FIXED_POINT_MODE_P (mode))
3530 {
3531 rtx result = emit_move_via_integer (mode, x, y, true);
3532
3533 /* If we can't find an integer mode, use multi words. */
3534 if (result)
3535 return result;
3536 else
3537 return emit_move_multi_word (mode, x, y);
3538 }
3539
3540 if (GET_MODE_CLASS (mode) == MODE_CC)
3541 return emit_move_ccmode (mode, x, y);
3542
3543 /* Try using a move pattern for the corresponding integer mode. This is
3544 only safe when simplify_subreg can convert MODE constants into integer
3545 constants. At present, it can only do this reliably if the value
3546 fits within a HOST_WIDE_INT. */
3547 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3548 {
3549 rtx ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3550
3551 if (ret)
3552 {
3553 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3554 return ret;
3555 }
3556 }
3557
3558 return emit_move_multi_word (mode, x, y);
3559 }
3560
3561 /* Generate code to copy Y into X.
3562 Both Y and X must have the same mode, except that
3563 Y can be a constant with VOIDmode.
3564 This mode cannot be BLKmode; use emit_block_move for that.
3565
3566 Return the last instruction emitted. */
3567
3568 rtx
3569 emit_move_insn (rtx x, rtx y)
3570 {
3571 enum machine_mode mode = GET_MODE (x);
3572 rtx y_cst = NULL_RTX;
3573 rtx last_insn, set;
3574
3575 gcc_assert (mode != BLKmode
3576 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3577
3578 if (CONSTANT_P (y))
3579 {
3580 if (optimize
3581 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3582 && (last_insn = compress_float_constant (x, y)))
3583 return last_insn;
3584
3585 y_cst = y;
3586
3587 if (!targetm.legitimate_constant_p (mode, y))
3588 {
3589 y = force_const_mem (mode, y);
3590
3591 /* If the target's cannot_force_const_mem prevented the spill,
3592 assume that the target's move expanders will also take care
3593 of the non-legitimate constant. */
3594 if (!y)
3595 y = y_cst;
3596 else
3597 y = use_anchored_address (y);
3598 }
3599 }
3600
3601 /* If X or Y are memory references, verify that their addresses are valid
3602 for the machine. */
3603 if (MEM_P (x)
3604 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3605 MEM_ADDR_SPACE (x))
3606 && ! push_operand (x, GET_MODE (x))))
3607 x = validize_mem (x);
3608
3609 if (MEM_P (y)
3610 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3611 MEM_ADDR_SPACE (y)))
3612 y = validize_mem (y);
3613
3614 gcc_assert (mode != BLKmode);
3615
3616 last_insn = emit_move_insn_1 (x, y);
3617
3618 if (y_cst && REG_P (x)
3619 && (set = single_set (last_insn)) != NULL_RTX
3620 && SET_DEST (set) == x
3621 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3622 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3623
3624 return last_insn;
3625 }
3626
3627 /* If Y is representable exactly in a narrower mode, and the target can
3628 perform the extension directly from constant or memory, then emit the
3629 move as an extension. */
3630
3631 static rtx
3632 compress_float_constant (rtx x, rtx y)
3633 {
3634 enum machine_mode dstmode = GET_MODE (x);
3635 enum machine_mode orig_srcmode = GET_MODE (y);
3636 enum machine_mode srcmode;
3637 REAL_VALUE_TYPE r;
3638 int oldcost, newcost;
3639 bool speed = optimize_insn_for_speed_p ();
3640
3641 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3642
3643 if (targetm.legitimate_constant_p (dstmode, y))
3644 oldcost = set_src_cost (y, speed);
3645 else
3646 oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3647
3648 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3649 srcmode != orig_srcmode;
3650 srcmode = GET_MODE_WIDER_MODE (srcmode))
3651 {
3652 enum insn_code ic;
3653 rtx trunc_y, last_insn;
3654
3655 /* Skip if the target can't extend this way. */
3656 ic = can_extend_p (dstmode, srcmode, 0);
3657 if (ic == CODE_FOR_nothing)
3658 continue;
3659
3660 /* Skip if the narrowed value isn't exact. */
3661 if (! exact_real_truncate (srcmode, &r))
3662 continue;
3663
3664 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3665
3666 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3667 {
3668 /* Skip if the target needs extra instructions to perform
3669 the extension. */
3670 if (!insn_operand_matches (ic, 1, trunc_y))
3671 continue;
3672 /* This is valid, but may not be cheaper than the original. */
3673 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3674 speed);
3675 if (oldcost < newcost)
3676 continue;
3677 }
3678 else if (float_extend_from_mem[dstmode][srcmode])
3679 {
3680 trunc_y = force_const_mem (srcmode, trunc_y);
3681 /* This is valid, but may not be cheaper than the original. */
3682 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3683 speed);
3684 if (oldcost < newcost)
3685 continue;
3686 trunc_y = validize_mem (trunc_y);
3687 }
3688 else
3689 continue;
3690
3691 /* For CSE's benefit, force the compressed constant pool entry
3692 into a new pseudo. This constant may be used in different modes,
3693 and if not, combine will put things back together for us. */
3694 trunc_y = force_reg (srcmode, trunc_y);
3695
3696 /* If x is a hard register, perform the extension into a pseudo,
3697 so that e.g. stack realignment code is aware of it. */
3698 rtx target = x;
3699 if (REG_P (x) && HARD_REGISTER_P (x))
3700 target = gen_reg_rtx (dstmode);
3701
3702 emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3703 last_insn = get_last_insn ();
3704
3705 if (REG_P (target))
3706 set_unique_reg_note (last_insn, REG_EQUAL, y);
3707
3708 if (target != x)
3709 return emit_move_insn (x, target);
3710 return last_insn;
3711 }
3712
3713 return NULL_RTX;
3714 }
3715 \f
3716 /* Pushing data onto the stack. */
3717
3718 /* Push a block of length SIZE (perhaps variable)
3719 and return an rtx to address the beginning of the block.
3720 The value may be virtual_outgoing_args_rtx.
3721
3722 EXTRA is the number of bytes of padding to push in addition to SIZE.
3723 BELOW nonzero means this padding comes at low addresses;
3724 otherwise, the padding comes at high addresses. */
3725
3726 rtx
3727 push_block (rtx size, int extra, int below)
3728 {
3729 rtx temp;
3730
3731 size = convert_modes (Pmode, ptr_mode, size, 1);
3732 if (CONSTANT_P (size))
3733 anti_adjust_stack (plus_constant (Pmode, size, extra));
3734 else if (REG_P (size) && extra == 0)
3735 anti_adjust_stack (size);
3736 else
3737 {
3738 temp = copy_to_mode_reg (Pmode, size);
3739 if (extra != 0)
3740 temp = expand_binop (Pmode, add_optab, temp,
3741 gen_int_mode (extra, Pmode),
3742 temp, 0, OPTAB_LIB_WIDEN);
3743 anti_adjust_stack (temp);
3744 }
3745
3746 #ifndef STACK_GROWS_DOWNWARD
3747 if (0)
3748 #else
3749 if (1)
3750 #endif
3751 {
3752 temp = virtual_outgoing_args_rtx;
3753 if (extra != 0 && below)
3754 temp = plus_constant (Pmode, temp, extra);
3755 }
3756 else
3757 {
3758 if (CONST_INT_P (size))
3759 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3760 -INTVAL (size) - (below ? 0 : extra));
3761 else if (extra != 0 && !below)
3762 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3763 negate_rtx (Pmode, plus_constant (Pmode, size,
3764 extra)));
3765 else
3766 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3767 negate_rtx (Pmode, size));
3768 }
3769
3770 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3771 }
3772
3773 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3774
3775 static rtx
3776 mem_autoinc_base (rtx mem)
3777 {
3778 if (MEM_P (mem))
3779 {
3780 rtx addr = XEXP (mem, 0);
3781 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3782 return XEXP (addr, 0);
3783 }
3784 return NULL;
3785 }
3786
3787 /* A utility routine used here, in reload, and in try_split. The insns
3788 after PREV up to and including LAST are known to adjust the stack,
3789 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3790 placing notes as appropriate. PREV may be NULL, indicating the
3791 entire insn sequence prior to LAST should be scanned.
3792
3793 The set of allowed stack pointer modifications is small:
3794 (1) One or more auto-inc style memory references (aka pushes),
3795 (2) One or more addition/subtraction with the SP as destination,
3796 (3) A single move insn with the SP as destination,
3797 (4) A call_pop insn,
3798 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3799
3800 Insns in the sequence that do not modify the SP are ignored,
3801 except for noreturn calls.
3802
3803 The return value is the amount of adjustment that can be trivially
3804 verified, via immediate operand or auto-inc. If the adjustment
3805 cannot be trivially extracted, the return value is INT_MIN. */
3806
3807 HOST_WIDE_INT
3808 find_args_size_adjust (rtx insn)
3809 {
3810 rtx dest, set, pat;
3811 int i;
3812
3813 pat = PATTERN (insn);
3814 set = NULL;
3815
3816 /* Look for a call_pop pattern. */
3817 if (CALL_P (insn))
3818 {
3819 /* We have to allow non-call_pop patterns for the case
3820 of emit_single_push_insn of a TLS address. */
3821 if (GET_CODE (pat) != PARALLEL)
3822 return 0;
3823
3824 /* All call_pop have a stack pointer adjust in the parallel.
3825 The call itself is always first, and the stack adjust is
3826 usually last, so search from the end. */
3827 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3828 {
3829 set = XVECEXP (pat, 0, i);
3830 if (GET_CODE (set) != SET)
3831 continue;
3832 dest = SET_DEST (set);
3833 if (dest == stack_pointer_rtx)
3834 break;
3835 }
3836 /* We'd better have found the stack pointer adjust. */
3837 if (i == 0)
3838 return 0;
3839 /* Fall through to process the extracted SET and DEST
3840 as if it was a standalone insn. */
3841 }
3842 else if (GET_CODE (pat) == SET)
3843 set = pat;
3844 else if ((set = single_set (insn)) != NULL)
3845 ;
3846 else if (GET_CODE (pat) == PARALLEL)
3847 {
3848 /* ??? Some older ports use a parallel with a stack adjust
3849 and a store for a PUSH_ROUNDING pattern, rather than a
3850 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3851 /* ??? See h8300 and m68k, pushqi1. */
3852 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3853 {
3854 set = XVECEXP (pat, 0, i);
3855 if (GET_CODE (set) != SET)
3856 continue;
3857 dest = SET_DEST (set);
3858 if (dest == stack_pointer_rtx)
3859 break;
3860
3861 /* We do not expect an auto-inc of the sp in the parallel. */
3862 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3863 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3864 != stack_pointer_rtx);
3865 }
3866 if (i < 0)
3867 return 0;
3868 }
3869 else
3870 return 0;
3871
3872 dest = SET_DEST (set);
3873
3874 /* Look for direct modifications of the stack pointer. */
3875 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3876 {
3877 /* Look for a trivial adjustment, otherwise assume nothing. */
3878 /* Note that the SPU restore_stack_block pattern refers to
3879 the stack pointer in V4SImode. Consider that non-trivial. */
3880 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3881 && GET_CODE (SET_SRC (set)) == PLUS
3882 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3883 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3884 return INTVAL (XEXP (SET_SRC (set), 1));
3885 /* ??? Reload can generate no-op moves, which will be cleaned
3886 up later. Recognize it and continue searching. */
3887 else if (rtx_equal_p (dest, SET_SRC (set)))
3888 return 0;
3889 else
3890 return HOST_WIDE_INT_MIN;
3891 }
3892 else
3893 {
3894 rtx mem, addr;
3895
3896 /* Otherwise only think about autoinc patterns. */
3897 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3898 {
3899 mem = dest;
3900 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3901 != stack_pointer_rtx);
3902 }
3903 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3904 mem = SET_SRC (set);
3905 else
3906 return 0;
3907
3908 addr = XEXP (mem, 0);
3909 switch (GET_CODE (addr))
3910 {
3911 case PRE_INC:
3912 case POST_INC:
3913 return GET_MODE_SIZE (GET_MODE (mem));
3914 case PRE_DEC:
3915 case POST_DEC:
3916 return -GET_MODE_SIZE (GET_MODE (mem));
3917 case PRE_MODIFY:
3918 case POST_MODIFY:
3919 addr = XEXP (addr, 1);
3920 gcc_assert (GET_CODE (addr) == PLUS);
3921 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3922 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3923 return INTVAL (XEXP (addr, 1));
3924 default:
3925 gcc_unreachable ();
3926 }
3927 }
3928 }
3929
3930 int
3931 fixup_args_size_notes (rtx prev, rtx last, int end_args_size)
3932 {
3933 int args_size = end_args_size;
3934 bool saw_unknown = false;
3935 rtx insn;
3936
3937 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3938 {
3939 HOST_WIDE_INT this_delta;
3940
3941 if (!NONDEBUG_INSN_P (insn))
3942 continue;
3943
3944 this_delta = find_args_size_adjust (insn);
3945 if (this_delta == 0)
3946 {
3947 if (!CALL_P (insn)
3948 || ACCUMULATE_OUTGOING_ARGS
3949 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3950 continue;
3951 }
3952
3953 gcc_assert (!saw_unknown);
3954 if (this_delta == HOST_WIDE_INT_MIN)
3955 saw_unknown = true;
3956
3957 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3958 #ifdef STACK_GROWS_DOWNWARD
3959 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3960 #endif
3961 args_size -= this_delta;
3962 }
3963
3964 return saw_unknown ? INT_MIN : args_size;
3965 }
3966
3967 #ifdef PUSH_ROUNDING
3968 /* Emit single push insn. */
3969
3970 static void
3971 emit_single_push_insn_1 (enum machine_mode mode, rtx x, tree type)
3972 {
3973 rtx dest_addr;
3974 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3975 rtx dest;
3976 enum insn_code icode;
3977
3978 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3979 /* If there is push pattern, use it. Otherwise try old way of throwing
3980 MEM representing push operation to move expander. */
3981 icode = optab_handler (push_optab, mode);
3982 if (icode != CODE_FOR_nothing)
3983 {
3984 struct expand_operand ops[1];
3985
3986 create_input_operand (&ops[0], x, mode);
3987 if (maybe_expand_insn (icode, 1, ops))
3988 return;
3989 }
3990 if (GET_MODE_SIZE (mode) == rounded_size)
3991 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3992 /* If we are to pad downward, adjust the stack pointer first and
3993 then store X into the stack location using an offset. This is
3994 because emit_move_insn does not know how to pad; it does not have
3995 access to type. */
3996 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3997 {
3998 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3999 HOST_WIDE_INT offset;
4000
4001 emit_move_insn (stack_pointer_rtx,
4002 expand_binop (Pmode,
4003 #ifdef STACK_GROWS_DOWNWARD
4004 sub_optab,
4005 #else
4006 add_optab,
4007 #endif
4008 stack_pointer_rtx,
4009 gen_int_mode (rounded_size, Pmode),
4010 NULL_RTX, 0, OPTAB_LIB_WIDEN));
4011
4012 offset = (HOST_WIDE_INT) padding_size;
4013 #ifdef STACK_GROWS_DOWNWARD
4014 if (STACK_PUSH_CODE == POST_DEC)
4015 /* We have already decremented the stack pointer, so get the
4016 previous value. */
4017 offset += (HOST_WIDE_INT) rounded_size;
4018 #else
4019 if (STACK_PUSH_CODE == POST_INC)
4020 /* We have already incremented the stack pointer, so get the
4021 previous value. */
4022 offset -= (HOST_WIDE_INT) rounded_size;
4023 #endif
4024 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4025 gen_int_mode (offset, Pmode));
4026 }
4027 else
4028 {
4029 #ifdef STACK_GROWS_DOWNWARD
4030 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4031 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4032 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4033 Pmode));
4034 #else
4035 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4036 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4037 gen_int_mode (rounded_size, Pmode));
4038 #endif
4039 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4040 }
4041
4042 dest = gen_rtx_MEM (mode, dest_addr);
4043
4044 if (type != 0)
4045 {
4046 set_mem_attributes (dest, type, 1);
4047
4048 if (cfun->tail_call_marked)
4049 /* Function incoming arguments may overlap with sibling call
4050 outgoing arguments and we cannot allow reordering of reads
4051 from function arguments with stores to outgoing arguments
4052 of sibling calls. */
4053 set_mem_alias_set (dest, 0);
4054 }
4055 emit_move_insn (dest, x);
4056 }
4057
4058 /* Emit and annotate a single push insn. */
4059
4060 static void
4061 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
4062 {
4063 int delta, old_delta = stack_pointer_delta;
4064 rtx prev = get_last_insn ();
4065 rtx last;
4066
4067 emit_single_push_insn_1 (mode, x, type);
4068
4069 last = get_last_insn ();
4070
4071 /* Notice the common case where we emitted exactly one insn. */
4072 if (PREV_INSN (last) == prev)
4073 {
4074 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4075 return;
4076 }
4077
4078 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4079 gcc_assert (delta == INT_MIN || delta == old_delta);
4080 }
4081 #endif
4082
4083 /* Generate code to push X onto the stack, assuming it has mode MODE and
4084 type TYPE.
4085 MODE is redundant except when X is a CONST_INT (since they don't
4086 carry mode info).
4087 SIZE is an rtx for the size of data to be copied (in bytes),
4088 needed only if X is BLKmode.
4089
4090 ALIGN (in bits) is maximum alignment we can assume.
4091
4092 If PARTIAL and REG are both nonzero, then copy that many of the first
4093 bytes of X into registers starting with REG, and push the rest of X.
4094 The amount of space pushed is decreased by PARTIAL bytes.
4095 REG must be a hard register in this case.
4096 If REG is zero but PARTIAL is not, take any all others actions for an
4097 argument partially in registers, but do not actually load any
4098 registers.
4099
4100 EXTRA is the amount in bytes of extra space to leave next to this arg.
4101 This is ignored if an argument block has already been allocated.
4102
4103 On a machine that lacks real push insns, ARGS_ADDR is the address of
4104 the bottom of the argument block for this call. We use indexing off there
4105 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4106 argument block has not been preallocated.
4107
4108 ARGS_SO_FAR is the size of args previously pushed for this call.
4109
4110 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4111 for arguments passed in registers. If nonzero, it will be the number
4112 of bytes required. */
4113
4114 void
4115 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
4116 unsigned int align, int partial, rtx reg, int extra,
4117 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4118 rtx alignment_pad)
4119 {
4120 rtx xinner;
4121 enum direction stack_direction
4122 #ifdef STACK_GROWS_DOWNWARD
4123 = downward;
4124 #else
4125 = upward;
4126 #endif
4127
4128 /* Decide where to pad the argument: `downward' for below,
4129 `upward' for above, or `none' for don't pad it.
4130 Default is below for small data on big-endian machines; else above. */
4131 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4132
4133 /* Invert direction if stack is post-decrement.
4134 FIXME: why? */
4135 if (STACK_PUSH_CODE == POST_DEC)
4136 if (where_pad != none)
4137 where_pad = (where_pad == downward ? upward : downward);
4138
4139 xinner = x;
4140
4141 if (mode == BLKmode
4142 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4143 {
4144 /* Copy a block into the stack, entirely or partially. */
4145
4146 rtx temp;
4147 int used;
4148 int offset;
4149 int skip;
4150
4151 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4152 used = partial - offset;
4153
4154 if (mode != BLKmode)
4155 {
4156 /* A value is to be stored in an insufficiently aligned
4157 stack slot; copy via a suitably aligned slot if
4158 necessary. */
4159 size = GEN_INT (GET_MODE_SIZE (mode));
4160 if (!MEM_P (xinner))
4161 {
4162 temp = assign_temp (type, 1, 1);
4163 emit_move_insn (temp, xinner);
4164 xinner = temp;
4165 }
4166 }
4167
4168 gcc_assert (size);
4169
4170 /* USED is now the # of bytes we need not copy to the stack
4171 because registers will take care of them. */
4172
4173 if (partial != 0)
4174 xinner = adjust_address (xinner, BLKmode, used);
4175
4176 /* If the partial register-part of the arg counts in its stack size,
4177 skip the part of stack space corresponding to the registers.
4178 Otherwise, start copying to the beginning of the stack space,
4179 by setting SKIP to 0. */
4180 skip = (reg_parm_stack_space == 0) ? 0 : used;
4181
4182 #ifdef PUSH_ROUNDING
4183 /* Do it with several push insns if that doesn't take lots of insns
4184 and if there is no difficulty with push insns that skip bytes
4185 on the stack for alignment purposes. */
4186 if (args_addr == 0
4187 && PUSH_ARGS
4188 && CONST_INT_P (size)
4189 && skip == 0
4190 && MEM_ALIGN (xinner) >= align
4191 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
4192 /* Here we avoid the case of a structure whose weak alignment
4193 forces many pushes of a small amount of data,
4194 and such small pushes do rounding that causes trouble. */
4195 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4196 || align >= BIGGEST_ALIGNMENT
4197 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4198 == (align / BITS_PER_UNIT)))
4199 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4200 {
4201 /* Push padding now if padding above and stack grows down,
4202 or if padding below and stack grows up.
4203 But if space already allocated, this has already been done. */
4204 if (extra && args_addr == 0
4205 && where_pad != none && where_pad != stack_direction)
4206 anti_adjust_stack (GEN_INT (extra));
4207
4208 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4209 }
4210 else
4211 #endif /* PUSH_ROUNDING */
4212 {
4213 rtx target;
4214
4215 /* Otherwise make space on the stack and copy the data
4216 to the address of that space. */
4217
4218 /* Deduct words put into registers from the size we must copy. */
4219 if (partial != 0)
4220 {
4221 if (CONST_INT_P (size))
4222 size = GEN_INT (INTVAL (size) - used);
4223 else
4224 size = expand_binop (GET_MODE (size), sub_optab, size,
4225 gen_int_mode (used, GET_MODE (size)),
4226 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4227 }
4228
4229 /* Get the address of the stack space.
4230 In this case, we do not deal with EXTRA separately.
4231 A single stack adjust will do. */
4232 if (! args_addr)
4233 {
4234 temp = push_block (size, extra, where_pad == downward);
4235 extra = 0;
4236 }
4237 else if (CONST_INT_P (args_so_far))
4238 temp = memory_address (BLKmode,
4239 plus_constant (Pmode, args_addr,
4240 skip + INTVAL (args_so_far)));
4241 else
4242 temp = memory_address (BLKmode,
4243 plus_constant (Pmode,
4244 gen_rtx_PLUS (Pmode,
4245 args_addr,
4246 args_so_far),
4247 skip));
4248
4249 if (!ACCUMULATE_OUTGOING_ARGS)
4250 {
4251 /* If the source is referenced relative to the stack pointer,
4252 copy it to another register to stabilize it. We do not need
4253 to do this if we know that we won't be changing sp. */
4254
4255 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4256 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4257 temp = copy_to_reg (temp);
4258 }
4259
4260 target = gen_rtx_MEM (BLKmode, temp);
4261
4262 /* We do *not* set_mem_attributes here, because incoming arguments
4263 may overlap with sibling call outgoing arguments and we cannot
4264 allow reordering of reads from function arguments with stores
4265 to outgoing arguments of sibling calls. We do, however, want
4266 to record the alignment of the stack slot. */
4267 /* ALIGN may well be better aligned than TYPE, e.g. due to
4268 PARM_BOUNDARY. Assume the caller isn't lying. */
4269 set_mem_align (target, align);
4270
4271 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4272 }
4273 }
4274 else if (partial > 0)
4275 {
4276 /* Scalar partly in registers. */
4277
4278 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4279 int i;
4280 int not_stack;
4281 /* # bytes of start of argument
4282 that we must make space for but need not store. */
4283 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4284 int args_offset = INTVAL (args_so_far);
4285 int skip;
4286
4287 /* Push padding now if padding above and stack grows down,
4288 or if padding below and stack grows up.
4289 But if space already allocated, this has already been done. */
4290 if (extra && args_addr == 0
4291 && where_pad != none && where_pad != stack_direction)
4292 anti_adjust_stack (GEN_INT (extra));
4293
4294 /* If we make space by pushing it, we might as well push
4295 the real data. Otherwise, we can leave OFFSET nonzero
4296 and leave the space uninitialized. */
4297 if (args_addr == 0)
4298 offset = 0;
4299
4300 /* Now NOT_STACK gets the number of words that we don't need to
4301 allocate on the stack. Convert OFFSET to words too. */
4302 not_stack = (partial - offset) / UNITS_PER_WORD;
4303 offset /= UNITS_PER_WORD;
4304
4305 /* If the partial register-part of the arg counts in its stack size,
4306 skip the part of stack space corresponding to the registers.
4307 Otherwise, start copying to the beginning of the stack space,
4308 by setting SKIP to 0. */
4309 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4310
4311 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4312 x = validize_mem (force_const_mem (mode, x));
4313
4314 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4315 SUBREGs of such registers are not allowed. */
4316 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4317 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4318 x = copy_to_reg (x);
4319
4320 /* Loop over all the words allocated on the stack for this arg. */
4321 /* We can do it by words, because any scalar bigger than a word
4322 has a size a multiple of a word. */
4323 for (i = size - 1; i >= not_stack; i--)
4324 if (i >= not_stack + offset)
4325 emit_push_insn (operand_subword_force (x, i, mode),
4326 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4327 0, args_addr,
4328 GEN_INT (args_offset + ((i - not_stack + skip)
4329 * UNITS_PER_WORD)),
4330 reg_parm_stack_space, alignment_pad);
4331 }
4332 else
4333 {
4334 rtx addr;
4335 rtx dest;
4336
4337 /* Push padding now if padding above and stack grows down,
4338 or if padding below and stack grows up.
4339 But if space already allocated, this has already been done. */
4340 if (extra && args_addr == 0
4341 && where_pad != none && where_pad != stack_direction)
4342 anti_adjust_stack (GEN_INT (extra));
4343
4344 #ifdef PUSH_ROUNDING
4345 if (args_addr == 0 && PUSH_ARGS)
4346 emit_single_push_insn (mode, x, type);
4347 else
4348 #endif
4349 {
4350 if (CONST_INT_P (args_so_far))
4351 addr
4352 = memory_address (mode,
4353 plus_constant (Pmode, args_addr,
4354 INTVAL (args_so_far)));
4355 else
4356 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4357 args_so_far));
4358 dest = gen_rtx_MEM (mode, addr);
4359
4360 /* We do *not* set_mem_attributes here, because incoming arguments
4361 may overlap with sibling call outgoing arguments and we cannot
4362 allow reordering of reads from function arguments with stores
4363 to outgoing arguments of sibling calls. We do, however, want
4364 to record the alignment of the stack slot. */
4365 /* ALIGN may well be better aligned than TYPE, e.g. due to
4366 PARM_BOUNDARY. Assume the caller isn't lying. */
4367 set_mem_align (dest, align);
4368
4369 emit_move_insn (dest, x);
4370 }
4371 }
4372
4373 /* If part should go in registers, copy that part
4374 into the appropriate registers. Do this now, at the end,
4375 since mem-to-mem copies above may do function calls. */
4376 if (partial > 0 && reg != 0)
4377 {
4378 /* Handle calls that pass values in multiple non-contiguous locations.
4379 The Irix 6 ABI has examples of this. */
4380 if (GET_CODE (reg) == PARALLEL)
4381 emit_group_load (reg, x, type, -1);
4382 else
4383 {
4384 gcc_assert (partial % UNITS_PER_WORD == 0);
4385 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4386 }
4387 }
4388
4389 if (extra && args_addr == 0 && where_pad == stack_direction)
4390 anti_adjust_stack (GEN_INT (extra));
4391
4392 if (alignment_pad && args_addr == 0)
4393 anti_adjust_stack (alignment_pad);
4394 }
4395 \f
4396 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4397 operations. */
4398
4399 static rtx
4400 get_subtarget (rtx x)
4401 {
4402 return (optimize
4403 || x == 0
4404 /* Only registers can be subtargets. */
4405 || !REG_P (x)
4406 /* Don't use hard regs to avoid extending their life. */
4407 || REGNO (x) < FIRST_PSEUDO_REGISTER
4408 ? 0 : x);
4409 }
4410
4411 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4412 FIELD is a bitfield. Returns true if the optimization was successful,
4413 and there's nothing else to do. */
4414
4415 static bool
4416 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4417 unsigned HOST_WIDE_INT bitpos,
4418 unsigned HOST_WIDE_INT bitregion_start,
4419 unsigned HOST_WIDE_INT bitregion_end,
4420 enum machine_mode mode1, rtx str_rtx,
4421 tree to, tree src)
4422 {
4423 enum machine_mode str_mode = GET_MODE (str_rtx);
4424 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4425 tree op0, op1;
4426 rtx value, result;
4427 optab binop;
4428 gimple srcstmt;
4429 enum tree_code code;
4430
4431 if (mode1 != VOIDmode
4432 || bitsize >= BITS_PER_WORD
4433 || str_bitsize > BITS_PER_WORD
4434 || TREE_SIDE_EFFECTS (to)
4435 || TREE_THIS_VOLATILE (to))
4436 return false;
4437
4438 STRIP_NOPS (src);
4439 if (TREE_CODE (src) != SSA_NAME)
4440 return false;
4441 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4442 return false;
4443
4444 srcstmt = get_gimple_for_ssa_name (src);
4445 if (!srcstmt
4446 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4447 return false;
4448
4449 code = gimple_assign_rhs_code (srcstmt);
4450
4451 op0 = gimple_assign_rhs1 (srcstmt);
4452
4453 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4454 to find its initialization. Hopefully the initialization will
4455 be from a bitfield load. */
4456 if (TREE_CODE (op0) == SSA_NAME)
4457 {
4458 gimple op0stmt = get_gimple_for_ssa_name (op0);
4459
4460 /* We want to eventually have OP0 be the same as TO, which
4461 should be a bitfield. */
4462 if (!op0stmt
4463 || !is_gimple_assign (op0stmt)
4464 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4465 return false;
4466 op0 = gimple_assign_rhs1 (op0stmt);
4467 }
4468
4469 op1 = gimple_assign_rhs2 (srcstmt);
4470
4471 if (!operand_equal_p (to, op0, 0))
4472 return false;
4473
4474 if (MEM_P (str_rtx))
4475 {
4476 unsigned HOST_WIDE_INT offset1;
4477
4478 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4479 str_mode = word_mode;
4480 str_mode = get_best_mode (bitsize, bitpos,
4481 bitregion_start, bitregion_end,
4482 MEM_ALIGN (str_rtx), str_mode, 0);
4483 if (str_mode == VOIDmode)
4484 return false;
4485 str_bitsize = GET_MODE_BITSIZE (str_mode);
4486
4487 offset1 = bitpos;
4488 bitpos %= str_bitsize;
4489 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4490 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4491 }
4492 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4493 return false;
4494
4495 /* If the bit field covers the whole REG/MEM, store_field
4496 will likely generate better code. */
4497 if (bitsize >= str_bitsize)
4498 return false;
4499
4500 /* We can't handle fields split across multiple entities. */
4501 if (bitpos + bitsize > str_bitsize)
4502 return false;
4503
4504 if (BYTES_BIG_ENDIAN)
4505 bitpos = str_bitsize - bitpos - bitsize;
4506
4507 switch (code)
4508 {
4509 case PLUS_EXPR:
4510 case MINUS_EXPR:
4511 /* For now, just optimize the case of the topmost bitfield
4512 where we don't need to do any masking and also
4513 1 bit bitfields where xor can be used.
4514 We might win by one instruction for the other bitfields
4515 too if insv/extv instructions aren't used, so that
4516 can be added later. */
4517 if (bitpos + bitsize != str_bitsize
4518 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4519 break;
4520
4521 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4522 value = convert_modes (str_mode,
4523 TYPE_MODE (TREE_TYPE (op1)), value,
4524 TYPE_UNSIGNED (TREE_TYPE (op1)));
4525
4526 /* We may be accessing data outside the field, which means
4527 we can alias adjacent data. */
4528 if (MEM_P (str_rtx))
4529 {
4530 str_rtx = shallow_copy_rtx (str_rtx);
4531 set_mem_alias_set (str_rtx, 0);
4532 set_mem_expr (str_rtx, 0);
4533 }
4534
4535 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4536 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4537 {
4538 value = expand_and (str_mode, value, const1_rtx, NULL);
4539 binop = xor_optab;
4540 }
4541 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4542 result = expand_binop (str_mode, binop, str_rtx,
4543 value, str_rtx, 1, OPTAB_WIDEN);
4544 if (result != str_rtx)
4545 emit_move_insn (str_rtx, result);
4546 return true;
4547
4548 case BIT_IOR_EXPR:
4549 case BIT_XOR_EXPR:
4550 if (TREE_CODE (op1) != INTEGER_CST)
4551 break;
4552 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4553 value = convert_modes (str_mode,
4554 TYPE_MODE (TREE_TYPE (op1)), value,
4555 TYPE_UNSIGNED (TREE_TYPE (op1)));
4556
4557 /* We may be accessing data outside the field, which means
4558 we can alias adjacent data. */
4559 if (MEM_P (str_rtx))
4560 {
4561 str_rtx = shallow_copy_rtx (str_rtx);
4562 set_mem_alias_set (str_rtx, 0);
4563 set_mem_expr (str_rtx, 0);
4564 }
4565
4566 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4567 if (bitpos + bitsize != str_bitsize)
4568 {
4569 rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4570 str_mode);
4571 value = expand_and (str_mode, value, mask, NULL_RTX);
4572 }
4573 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4574 result = expand_binop (str_mode, binop, str_rtx,
4575 value, str_rtx, 1, OPTAB_WIDEN);
4576 if (result != str_rtx)
4577 emit_move_insn (str_rtx, result);
4578 return true;
4579
4580 default:
4581 break;
4582 }
4583
4584 return false;
4585 }
4586
4587 /* In the C++ memory model, consecutive bit fields in a structure are
4588 considered one memory location.
4589
4590 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4591 returns the bit range of consecutive bits in which this COMPONENT_REF
4592 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4593 and *OFFSET may be adjusted in the process.
4594
4595 If the access does not need to be restricted, 0 is returned in both
4596 *BITSTART and *BITEND. */
4597
4598 static void
4599 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4600 unsigned HOST_WIDE_INT *bitend,
4601 tree exp,
4602 HOST_WIDE_INT *bitpos,
4603 tree *offset)
4604 {
4605 HOST_WIDE_INT bitoffset;
4606 tree field, repr;
4607
4608 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4609
4610 field = TREE_OPERAND (exp, 1);
4611 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4612 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4613 need to limit the range we can access. */
4614 if (!repr)
4615 {
4616 *bitstart = *bitend = 0;
4617 return;
4618 }
4619
4620 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4621 part of a larger bit field, then the representative does not serve any
4622 useful purpose. This can occur in Ada. */
4623 if (handled_component_p (TREE_OPERAND (exp, 0)))
4624 {
4625 enum machine_mode rmode;
4626 HOST_WIDE_INT rbitsize, rbitpos;
4627 tree roffset;
4628 int unsignedp;
4629 int volatilep = 0;
4630 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4631 &roffset, &rmode, &unsignedp, &volatilep, false);
4632 if ((rbitpos % BITS_PER_UNIT) != 0)
4633 {
4634 *bitstart = *bitend = 0;
4635 return;
4636 }
4637 }
4638
4639 /* Compute the adjustment to bitpos from the offset of the field
4640 relative to the representative. DECL_FIELD_OFFSET of field and
4641 repr are the same by construction if they are not constants,
4642 see finish_bitfield_layout. */
4643 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4644 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4645 bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4646 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4647 else
4648 bitoffset = 0;
4649 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4650 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4651
4652 /* If the adjustment is larger than bitpos, we would have a negative bit
4653 position for the lower bound and this may wreak havoc later. Adjust
4654 offset and bitpos to make the lower bound non-negative in that case. */
4655 if (bitoffset > *bitpos)
4656 {
4657 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4658 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4659
4660 *bitpos += adjust;
4661 if (*offset == NULL_TREE)
4662 *offset = size_int (-adjust / BITS_PER_UNIT);
4663 else
4664 *offset
4665 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4666 *bitstart = 0;
4667 }
4668 else
4669 *bitstart = *bitpos - bitoffset;
4670
4671 *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4672 }
4673
4674 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4675 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4676 DECL_RTL was not set yet, return NORTL. */
4677
4678 static inline bool
4679 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4680 {
4681 if (TREE_CODE (addr) != ADDR_EXPR)
4682 return false;
4683
4684 tree base = TREE_OPERAND (addr, 0);
4685
4686 if (!DECL_P (base)
4687 || TREE_ADDRESSABLE (base)
4688 || DECL_MODE (base) == BLKmode)
4689 return false;
4690
4691 if (!DECL_RTL_SET_P (base))
4692 return nortl;
4693
4694 return (!MEM_P (DECL_RTL (base)));
4695 }
4696
4697 /* Returns true if the MEM_REF REF refers to an object that does not
4698 reside in memory and has non-BLKmode. */
4699
4700 static inline bool
4701 mem_ref_refers_to_non_mem_p (tree ref)
4702 {
4703 tree base = TREE_OPERAND (ref, 0);
4704 return addr_expr_of_non_mem_decl_p_1 (base, false);
4705 }
4706
4707 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4708 is true, try generating a nontemporal store. */
4709
4710 void
4711 expand_assignment (tree to, tree from, bool nontemporal)
4712 {
4713 rtx to_rtx = 0;
4714 rtx result;
4715 enum machine_mode mode;
4716 unsigned int align;
4717 enum insn_code icode;
4718
4719 /* Don't crash if the lhs of the assignment was erroneous. */
4720 if (TREE_CODE (to) == ERROR_MARK)
4721 {
4722 expand_normal (from);
4723 return;
4724 }
4725
4726 /* Optimize away no-op moves without side-effects. */
4727 if (operand_equal_p (to, from, 0))
4728 return;
4729
4730 /* Handle misaligned stores. */
4731 mode = TYPE_MODE (TREE_TYPE (to));
4732 if ((TREE_CODE (to) == MEM_REF
4733 || TREE_CODE (to) == TARGET_MEM_REF)
4734 && mode != BLKmode
4735 && !mem_ref_refers_to_non_mem_p (to)
4736 && ((align = get_object_alignment (to))
4737 < GET_MODE_ALIGNMENT (mode))
4738 && (((icode = optab_handler (movmisalign_optab, mode))
4739 != CODE_FOR_nothing)
4740 || SLOW_UNALIGNED_ACCESS (mode, align)))
4741 {
4742 rtx reg, mem;
4743
4744 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4745 reg = force_not_mem (reg);
4746 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4747
4748 if (icode != CODE_FOR_nothing)
4749 {
4750 struct expand_operand ops[2];
4751
4752 create_fixed_operand (&ops[0], mem);
4753 create_input_operand (&ops[1], reg, mode);
4754 /* The movmisalign<mode> pattern cannot fail, else the assignment
4755 would silently be omitted. */
4756 expand_insn (icode, 2, ops);
4757 }
4758 else
4759 store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg);
4760 return;
4761 }
4762
4763 /* Assignment of a structure component needs special treatment
4764 if the structure component's rtx is not simply a MEM.
4765 Assignment of an array element at a constant index, and assignment of
4766 an array element in an unaligned packed structure field, has the same
4767 problem. Same for (partially) storing into a non-memory object. */
4768 if (handled_component_p (to)
4769 || (TREE_CODE (to) == MEM_REF
4770 && mem_ref_refers_to_non_mem_p (to))
4771 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4772 {
4773 enum machine_mode mode1;
4774 HOST_WIDE_INT bitsize, bitpos;
4775 unsigned HOST_WIDE_INT bitregion_start = 0;
4776 unsigned HOST_WIDE_INT bitregion_end = 0;
4777 tree offset;
4778 int unsignedp;
4779 int volatilep = 0;
4780 tree tem;
4781
4782 push_temp_slots ();
4783 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4784 &unsignedp, &volatilep, true);
4785
4786 /* Make sure bitpos is not negative, it can wreak havoc later. */
4787 if (bitpos < 0)
4788 {
4789 gcc_assert (offset == NULL_TREE);
4790 offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4791 ? 3 : exact_log2 (BITS_PER_UNIT)));
4792 bitpos &= BITS_PER_UNIT - 1;
4793 }
4794
4795 if (TREE_CODE (to) == COMPONENT_REF
4796 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4797 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4798 /* The C++ memory model naturally applies to byte-aligned fields.
4799 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4800 BITSIZE are not byte-aligned, there is no need to limit the range
4801 we can access. This can occur with packed structures in Ada. */
4802 else if (bitsize > 0
4803 && bitsize % BITS_PER_UNIT == 0
4804 && bitpos % BITS_PER_UNIT == 0)
4805 {
4806 bitregion_start = bitpos;
4807 bitregion_end = bitpos + bitsize - 1;
4808 }
4809
4810 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4811
4812 /* If the field has a mode, we want to access it in the
4813 field's mode, not the computed mode.
4814 If a MEM has VOIDmode (external with incomplete type),
4815 use BLKmode for it instead. */
4816 if (MEM_P (to_rtx))
4817 {
4818 if (mode1 != VOIDmode)
4819 to_rtx = adjust_address (to_rtx, mode1, 0);
4820 else if (GET_MODE (to_rtx) == VOIDmode)
4821 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4822 }
4823
4824 if (offset != 0)
4825 {
4826 enum machine_mode address_mode;
4827 rtx offset_rtx;
4828
4829 if (!MEM_P (to_rtx))
4830 {
4831 /* We can get constant negative offsets into arrays with broken
4832 user code. Translate this to a trap instead of ICEing. */
4833 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4834 expand_builtin_trap ();
4835 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4836 }
4837
4838 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4839 address_mode = get_address_mode (to_rtx);
4840 if (GET_MODE (offset_rtx) != address_mode)
4841 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4842
4843 /* If we have an expression in OFFSET_RTX and a non-zero
4844 byte offset in BITPOS, adding the byte offset before the
4845 OFFSET_RTX results in better intermediate code, which makes
4846 later rtl optimization passes perform better.
4847
4848 We prefer intermediate code like this:
4849
4850 r124:DI=r123:DI+0x18
4851 [r124:DI]=r121:DI
4852
4853 ... instead of ...
4854
4855 r124:DI=r123:DI+0x10
4856 [r124:DI+0x8]=r121:DI
4857
4858 This is only done for aligned data values, as these can
4859 be expected to result in single move instructions. */
4860 if (mode1 != VOIDmode
4861 && bitpos != 0
4862 && bitsize > 0
4863 && (bitpos % bitsize) == 0
4864 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4865 && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
4866 {
4867 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4868 bitregion_start = 0;
4869 if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos)
4870 bitregion_end -= bitpos;
4871 bitpos = 0;
4872 }
4873
4874 to_rtx = offset_address (to_rtx, offset_rtx,
4875 highest_pow2_factor_for_target (to,
4876 offset));
4877 }
4878
4879 /* No action is needed if the target is not a memory and the field
4880 lies completely outside that target. This can occur if the source
4881 code contains an out-of-bounds access to a small array. */
4882 if (!MEM_P (to_rtx)
4883 && GET_MODE (to_rtx) != BLKmode
4884 && (unsigned HOST_WIDE_INT) bitpos
4885 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4886 {
4887 expand_normal (from);
4888 result = NULL;
4889 }
4890 /* Handle expand_expr of a complex value returning a CONCAT. */
4891 else if (GET_CODE (to_rtx) == CONCAT)
4892 {
4893 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4894 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4895 && bitpos == 0
4896 && bitsize == mode_bitsize)
4897 result = store_expr (from, to_rtx, false, nontemporal);
4898 else if (bitsize == mode_bitsize / 2
4899 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4900 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4901 nontemporal);
4902 else if (bitpos + bitsize <= mode_bitsize / 2)
4903 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4904 bitregion_start, bitregion_end,
4905 mode1, from,
4906 get_alias_set (to), nontemporal);
4907 else if (bitpos >= mode_bitsize / 2)
4908 result = store_field (XEXP (to_rtx, 1), bitsize,
4909 bitpos - mode_bitsize / 2,
4910 bitregion_start, bitregion_end,
4911 mode1, from,
4912 get_alias_set (to), nontemporal);
4913 else if (bitpos == 0 && bitsize == mode_bitsize)
4914 {
4915 rtx from_rtx;
4916 result = expand_normal (from);
4917 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4918 TYPE_MODE (TREE_TYPE (from)), 0);
4919 emit_move_insn (XEXP (to_rtx, 0),
4920 read_complex_part (from_rtx, false));
4921 emit_move_insn (XEXP (to_rtx, 1),
4922 read_complex_part (from_rtx, true));
4923 }
4924 else
4925 {
4926 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4927 GET_MODE_SIZE (GET_MODE (to_rtx)));
4928 write_complex_part (temp, XEXP (to_rtx, 0), false);
4929 write_complex_part (temp, XEXP (to_rtx, 1), true);
4930 result = store_field (temp, bitsize, bitpos,
4931 bitregion_start, bitregion_end,
4932 mode1, from,
4933 get_alias_set (to), nontemporal);
4934 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4935 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4936 }
4937 }
4938 else
4939 {
4940 if (MEM_P (to_rtx))
4941 {
4942 /* If the field is at offset zero, we could have been given the
4943 DECL_RTX of the parent struct. Don't munge it. */
4944 to_rtx = shallow_copy_rtx (to_rtx);
4945 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4946 if (volatilep)
4947 MEM_VOLATILE_P (to_rtx) = 1;
4948 }
4949
4950 if (optimize_bitfield_assignment_op (bitsize, bitpos,
4951 bitregion_start, bitregion_end,
4952 mode1,
4953 to_rtx, to, from))
4954 result = NULL;
4955 else
4956 result = store_field (to_rtx, bitsize, bitpos,
4957 bitregion_start, bitregion_end,
4958 mode1, from,
4959 get_alias_set (to), nontemporal);
4960 }
4961
4962 if (result)
4963 preserve_temp_slots (result);
4964 pop_temp_slots ();
4965 return;
4966 }
4967
4968 /* If the rhs is a function call and its value is not an aggregate,
4969 call the function before we start to compute the lhs.
4970 This is needed for correct code for cases such as
4971 val = setjmp (buf) on machines where reference to val
4972 requires loading up part of an address in a separate insn.
4973
4974 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4975 since it might be a promoted variable where the zero- or sign- extension
4976 needs to be done. Handling this in the normal way is safe because no
4977 computation is done before the call. The same is true for SSA names. */
4978 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4979 && COMPLETE_TYPE_P (TREE_TYPE (from))
4980 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4981 && ! (((TREE_CODE (to) == VAR_DECL
4982 || TREE_CODE (to) == PARM_DECL
4983 || TREE_CODE (to) == RESULT_DECL)
4984 && REG_P (DECL_RTL (to)))
4985 || TREE_CODE (to) == SSA_NAME))
4986 {
4987 rtx value;
4988
4989 push_temp_slots ();
4990 value = expand_normal (from);
4991 if (to_rtx == 0)
4992 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4993
4994 /* Handle calls that return values in multiple non-contiguous locations.
4995 The Irix 6 ABI has examples of this. */
4996 if (GET_CODE (to_rtx) == PARALLEL)
4997 {
4998 if (GET_CODE (value) == PARALLEL)
4999 emit_group_move (to_rtx, value);
5000 else
5001 emit_group_load (to_rtx, value, TREE_TYPE (from),
5002 int_size_in_bytes (TREE_TYPE (from)));
5003 }
5004 else if (GET_CODE (value) == PARALLEL)
5005 emit_group_store (to_rtx, value, TREE_TYPE (from),
5006 int_size_in_bytes (TREE_TYPE (from)));
5007 else if (GET_MODE (to_rtx) == BLKmode)
5008 {
5009 /* Handle calls that return BLKmode values in registers. */
5010 if (REG_P (value))
5011 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5012 else
5013 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5014 }
5015 else
5016 {
5017 if (POINTER_TYPE_P (TREE_TYPE (to)))
5018 value = convert_memory_address_addr_space
5019 (GET_MODE (to_rtx), value,
5020 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5021
5022 emit_move_insn (to_rtx, value);
5023 }
5024 preserve_temp_slots (to_rtx);
5025 pop_temp_slots ();
5026 return;
5027 }
5028
5029 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5030 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5031
5032 /* Don't move directly into a return register. */
5033 if (TREE_CODE (to) == RESULT_DECL
5034 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5035 {
5036 rtx temp;
5037
5038 push_temp_slots ();
5039
5040 /* If the source is itself a return value, it still is in a pseudo at
5041 this point so we can move it back to the return register directly. */
5042 if (REG_P (to_rtx)
5043 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5044 && TREE_CODE (from) != CALL_EXPR)
5045 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5046 else
5047 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5048
5049 /* Handle calls that return values in multiple non-contiguous locations.
5050 The Irix 6 ABI has examples of this. */
5051 if (GET_CODE (to_rtx) == PARALLEL)
5052 {
5053 if (GET_CODE (temp) == PARALLEL)
5054 emit_group_move (to_rtx, temp);
5055 else
5056 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5057 int_size_in_bytes (TREE_TYPE (from)));
5058 }
5059 else if (temp)
5060 emit_move_insn (to_rtx, temp);
5061
5062 preserve_temp_slots (to_rtx);
5063 pop_temp_slots ();
5064 return;
5065 }
5066
5067 /* In case we are returning the contents of an object which overlaps
5068 the place the value is being stored, use a safe function when copying
5069 a value through a pointer into a structure value return block. */
5070 if (TREE_CODE (to) == RESULT_DECL
5071 && TREE_CODE (from) == INDIRECT_REF
5072 && ADDR_SPACE_GENERIC_P
5073 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5074 && refs_may_alias_p (to, from)
5075 && cfun->returns_struct
5076 && !cfun->returns_pcc_struct)
5077 {
5078 rtx from_rtx, size;
5079
5080 push_temp_slots ();
5081 size = expr_size (from);
5082 from_rtx = expand_normal (from);
5083
5084 emit_library_call (memmove_libfunc, LCT_NORMAL,
5085 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
5086 XEXP (from_rtx, 0), Pmode,
5087 convert_to_mode (TYPE_MODE (sizetype),
5088 size, TYPE_UNSIGNED (sizetype)),
5089 TYPE_MODE (sizetype));
5090
5091 preserve_temp_slots (to_rtx);
5092 pop_temp_slots ();
5093 return;
5094 }
5095
5096 /* Compute FROM and store the value in the rtx we got. */
5097
5098 push_temp_slots ();
5099 result = store_expr (from, to_rtx, 0, nontemporal);
5100 preserve_temp_slots (result);
5101 pop_temp_slots ();
5102 return;
5103 }
5104
5105 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5106 succeeded, false otherwise. */
5107
5108 bool
5109 emit_storent_insn (rtx to, rtx from)
5110 {
5111 struct expand_operand ops[2];
5112 enum machine_mode mode = GET_MODE (to);
5113 enum insn_code code = optab_handler (storent_optab, mode);
5114
5115 if (code == CODE_FOR_nothing)
5116 return false;
5117
5118 create_fixed_operand (&ops[0], to);
5119 create_input_operand (&ops[1], from, mode);
5120 return maybe_expand_insn (code, 2, ops);
5121 }
5122
5123 /* Generate code for computing expression EXP,
5124 and storing the value into TARGET.
5125
5126 If the mode is BLKmode then we may return TARGET itself.
5127 It turns out that in BLKmode it doesn't cause a problem.
5128 because C has no operators that could combine two different
5129 assignments into the same BLKmode object with different values
5130 with no sequence point. Will other languages need this to
5131 be more thorough?
5132
5133 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5134 stack, and block moves may need to be treated specially.
5135
5136 If NONTEMPORAL is true, try using a nontemporal store instruction. */
5137
5138 rtx
5139 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5140 {
5141 rtx temp;
5142 rtx alt_rtl = NULL_RTX;
5143 location_t loc = curr_insn_location ();
5144
5145 if (VOID_TYPE_P (TREE_TYPE (exp)))
5146 {
5147 /* C++ can generate ?: expressions with a throw expression in one
5148 branch and an rvalue in the other. Here, we resolve attempts to
5149 store the throw expression's nonexistent result. */
5150 gcc_assert (!call_param_p);
5151 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5152 return NULL_RTX;
5153 }
5154 if (TREE_CODE (exp) == COMPOUND_EXPR)
5155 {
5156 /* Perform first part of compound expression, then assign from second
5157 part. */
5158 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5159 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5160 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5161 nontemporal);
5162 }
5163 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5164 {
5165 /* For conditional expression, get safe form of the target. Then
5166 test the condition, doing the appropriate assignment on either
5167 side. This avoids the creation of unnecessary temporaries.
5168 For non-BLKmode, it is more efficient not to do this. */
5169
5170 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
5171
5172 do_pending_stack_adjust ();
5173 NO_DEFER_POP;
5174 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5175 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5176 nontemporal);
5177 emit_jump_insn (gen_jump (lab2));
5178 emit_barrier ();
5179 emit_label (lab1);
5180 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
5181 nontemporal);
5182 emit_label (lab2);
5183 OK_DEFER_POP;
5184
5185 return NULL_RTX;
5186 }
5187 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5188 /* If this is a scalar in a register that is stored in a wider mode
5189 than the declared mode, compute the result into its declared mode
5190 and then convert to the wider mode. Our value is the computed
5191 expression. */
5192 {
5193 rtx inner_target = 0;
5194
5195 /* We can do the conversion inside EXP, which will often result
5196 in some optimizations. Do the conversion in two steps: first
5197 change the signedness, if needed, then the extend. But don't
5198 do this if the type of EXP is a subtype of something else
5199 since then the conversion might involve more than just
5200 converting modes. */
5201 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5202 && TREE_TYPE (TREE_TYPE (exp)) == 0
5203 && GET_MODE_PRECISION (GET_MODE (target))
5204 == TYPE_PRECISION (TREE_TYPE (exp)))
5205 {
5206 if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5207 TYPE_UNSIGNED (TREE_TYPE (exp))))
5208 {
5209 /* Some types, e.g. Fortran's logical*4, won't have a signed
5210 version, so use the mode instead. */
5211 tree ntype
5212 = (signed_or_unsigned_type_for
5213 (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
5214 if (ntype == NULL)
5215 ntype = lang_hooks.types.type_for_mode
5216 (TYPE_MODE (TREE_TYPE (exp)),
5217 SUBREG_PROMOTED_SIGN (target));
5218
5219 exp = fold_convert_loc (loc, ntype, exp);
5220 }
5221
5222 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5223 (GET_MODE (SUBREG_REG (target)),
5224 SUBREG_PROMOTED_SIGN (target)),
5225 exp);
5226
5227 inner_target = SUBREG_REG (target);
5228 }
5229
5230 temp = expand_expr (exp, inner_target, VOIDmode,
5231 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5232
5233 /* If TEMP is a VOIDmode constant, use convert_modes to make
5234 sure that we properly convert it. */
5235 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5236 {
5237 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5238 temp, SUBREG_PROMOTED_SIGN (target));
5239 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5240 GET_MODE (target), temp,
5241 SUBREG_PROMOTED_SIGN (target));
5242 }
5243
5244 convert_move (SUBREG_REG (target), temp,
5245 SUBREG_PROMOTED_SIGN (target));
5246
5247 return NULL_RTX;
5248 }
5249 else if ((TREE_CODE (exp) == STRING_CST
5250 || (TREE_CODE (exp) == MEM_REF
5251 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5252 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5253 == STRING_CST
5254 && integer_zerop (TREE_OPERAND (exp, 1))))
5255 && !nontemporal && !call_param_p
5256 && MEM_P (target))
5257 {
5258 /* Optimize initialization of an array with a STRING_CST. */
5259 HOST_WIDE_INT exp_len, str_copy_len;
5260 rtx dest_mem;
5261 tree str = TREE_CODE (exp) == STRING_CST
5262 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5263
5264 exp_len = int_expr_size (exp);
5265 if (exp_len <= 0)
5266 goto normal_expr;
5267
5268 if (TREE_STRING_LENGTH (str) <= 0)
5269 goto normal_expr;
5270
5271 str_copy_len = strlen (TREE_STRING_POINTER (str));
5272 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5273 goto normal_expr;
5274
5275 str_copy_len = TREE_STRING_LENGTH (str);
5276 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5277 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5278 {
5279 str_copy_len += STORE_MAX_PIECES - 1;
5280 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5281 }
5282 str_copy_len = MIN (str_copy_len, exp_len);
5283 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5284 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5285 MEM_ALIGN (target), false))
5286 goto normal_expr;
5287
5288 dest_mem = target;
5289
5290 dest_mem = store_by_pieces (dest_mem,
5291 str_copy_len, builtin_strncpy_read_str,
5292 CONST_CAST (char *,
5293 TREE_STRING_POINTER (str)),
5294 MEM_ALIGN (target), false,
5295 exp_len > str_copy_len ? 1 : 0);
5296 if (exp_len > str_copy_len)
5297 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5298 GEN_INT (exp_len - str_copy_len),
5299 BLOCK_OP_NORMAL);
5300 return NULL_RTX;
5301 }
5302 else
5303 {
5304 rtx tmp_target;
5305
5306 normal_expr:
5307 /* If we want to use a nontemporal store, force the value to
5308 register first. */
5309 tmp_target = nontemporal ? NULL_RTX : target;
5310 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5311 (call_param_p
5312 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5313 &alt_rtl, false);
5314 }
5315
5316 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5317 the same as that of TARGET, adjust the constant. This is needed, for
5318 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5319 only a word-sized value. */
5320 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5321 && TREE_CODE (exp) != ERROR_MARK
5322 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5323 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5324 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5325
5326 /* If value was not generated in the target, store it there.
5327 Convert the value to TARGET's type first if necessary and emit the
5328 pending incrementations that have been queued when expanding EXP.
5329 Note that we cannot emit the whole queue blindly because this will
5330 effectively disable the POST_INC optimization later.
5331
5332 If TEMP and TARGET compare equal according to rtx_equal_p, but
5333 one or both of them are volatile memory refs, we have to distinguish
5334 two cases:
5335 - expand_expr has used TARGET. In this case, we must not generate
5336 another copy. This can be detected by TARGET being equal according
5337 to == .
5338 - expand_expr has not used TARGET - that means that the source just
5339 happens to have the same RTX form. Since temp will have been created
5340 by expand_expr, it will compare unequal according to == .
5341 We must generate a copy in this case, to reach the correct number
5342 of volatile memory references. */
5343
5344 if ((! rtx_equal_p (temp, target)
5345 || (temp != target && (side_effects_p (temp)
5346 || side_effects_p (target))))
5347 && TREE_CODE (exp) != ERROR_MARK
5348 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5349 but TARGET is not valid memory reference, TEMP will differ
5350 from TARGET although it is really the same location. */
5351 && !(alt_rtl
5352 && rtx_equal_p (alt_rtl, target)
5353 && !side_effects_p (alt_rtl)
5354 && !side_effects_p (target))
5355 /* If there's nothing to copy, don't bother. Don't call
5356 expr_size unless necessary, because some front-ends (C++)
5357 expr_size-hook must not be given objects that are not
5358 supposed to be bit-copied or bit-initialized. */
5359 && expr_size (exp) != const0_rtx)
5360 {
5361 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5362 {
5363 if (GET_MODE (target) == BLKmode)
5364 {
5365 /* Handle calls that return BLKmode values in registers. */
5366 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5367 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5368 else
5369 store_bit_field (target,
5370 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5371 0, 0, 0, GET_MODE (temp), temp);
5372 }
5373 else
5374 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5375 }
5376
5377 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5378 {
5379 /* Handle copying a string constant into an array. The string
5380 constant may be shorter than the array. So copy just the string's
5381 actual length, and clear the rest. First get the size of the data
5382 type of the string, which is actually the size of the target. */
5383 rtx size = expr_size (exp);
5384
5385 if (CONST_INT_P (size)
5386 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5387 emit_block_move (target, temp, size,
5388 (call_param_p
5389 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5390 else
5391 {
5392 enum machine_mode pointer_mode
5393 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5394 enum machine_mode address_mode = get_address_mode (target);
5395
5396 /* Compute the size of the data to copy from the string. */
5397 tree copy_size
5398 = size_binop_loc (loc, MIN_EXPR,
5399 make_tree (sizetype, size),
5400 size_int (TREE_STRING_LENGTH (exp)));
5401 rtx copy_size_rtx
5402 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5403 (call_param_p
5404 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5405 rtx label = 0;
5406
5407 /* Copy that much. */
5408 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5409 TYPE_UNSIGNED (sizetype));
5410 emit_block_move (target, temp, copy_size_rtx,
5411 (call_param_p
5412 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5413
5414 /* Figure out how much is left in TARGET that we have to clear.
5415 Do all calculations in pointer_mode. */
5416 if (CONST_INT_P (copy_size_rtx))
5417 {
5418 size = plus_constant (address_mode, size,
5419 -INTVAL (copy_size_rtx));
5420 target = adjust_address (target, BLKmode,
5421 INTVAL (copy_size_rtx));
5422 }
5423 else
5424 {
5425 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5426 copy_size_rtx, NULL_RTX, 0,
5427 OPTAB_LIB_WIDEN);
5428
5429 if (GET_MODE (copy_size_rtx) != address_mode)
5430 copy_size_rtx = convert_to_mode (address_mode,
5431 copy_size_rtx,
5432 TYPE_UNSIGNED (sizetype));
5433
5434 target = offset_address (target, copy_size_rtx,
5435 highest_pow2_factor (copy_size));
5436 label = gen_label_rtx ();
5437 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5438 GET_MODE (size), 0, label);
5439 }
5440
5441 if (size != const0_rtx)
5442 clear_storage (target, size, BLOCK_OP_NORMAL);
5443
5444 if (label)
5445 emit_label (label);
5446 }
5447 }
5448 /* Handle calls that return values in multiple non-contiguous locations.
5449 The Irix 6 ABI has examples of this. */
5450 else if (GET_CODE (target) == PARALLEL)
5451 {
5452 if (GET_CODE (temp) == PARALLEL)
5453 emit_group_move (target, temp);
5454 else
5455 emit_group_load (target, temp, TREE_TYPE (exp),
5456 int_size_in_bytes (TREE_TYPE (exp)));
5457 }
5458 else if (GET_CODE (temp) == PARALLEL)
5459 emit_group_store (target, temp, TREE_TYPE (exp),
5460 int_size_in_bytes (TREE_TYPE (exp)));
5461 else if (GET_MODE (temp) == BLKmode)
5462 emit_block_move (target, temp, expr_size (exp),
5463 (call_param_p
5464 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5465 /* If we emit a nontemporal store, there is nothing else to do. */
5466 else if (nontemporal && emit_storent_insn (target, temp))
5467 ;
5468 else
5469 {
5470 temp = force_operand (temp, target);
5471 if (temp != target)
5472 emit_move_insn (target, temp);
5473 }
5474 }
5475
5476 return NULL_RTX;
5477 }
5478 \f
5479 /* Return true if field F of structure TYPE is a flexible array. */
5480
5481 static bool
5482 flexible_array_member_p (const_tree f, const_tree type)
5483 {
5484 const_tree tf;
5485
5486 tf = TREE_TYPE (f);
5487 return (DECL_CHAIN (f) == NULL
5488 && TREE_CODE (tf) == ARRAY_TYPE
5489 && TYPE_DOMAIN (tf)
5490 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5491 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5492 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5493 && int_size_in_bytes (type) >= 0);
5494 }
5495
5496 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5497 must have in order for it to completely initialize a value of type TYPE.
5498 Return -1 if the number isn't known.
5499
5500 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5501
5502 static HOST_WIDE_INT
5503 count_type_elements (const_tree type, bool for_ctor_p)
5504 {
5505 switch (TREE_CODE (type))
5506 {
5507 case ARRAY_TYPE:
5508 {
5509 tree nelts;
5510
5511 nelts = array_type_nelts (type);
5512 if (nelts && tree_fits_uhwi_p (nelts))
5513 {
5514 unsigned HOST_WIDE_INT n;
5515
5516 n = tree_to_uhwi (nelts) + 1;
5517 if (n == 0 || for_ctor_p)
5518 return n;
5519 else
5520 return n * count_type_elements (TREE_TYPE (type), false);
5521 }
5522 return for_ctor_p ? -1 : 1;
5523 }
5524
5525 case RECORD_TYPE:
5526 {
5527 unsigned HOST_WIDE_INT n;
5528 tree f;
5529
5530 n = 0;
5531 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5532 if (TREE_CODE (f) == FIELD_DECL)
5533 {
5534 if (!for_ctor_p)
5535 n += count_type_elements (TREE_TYPE (f), false);
5536 else if (!flexible_array_member_p (f, type))
5537 /* Don't count flexible arrays, which are not supposed
5538 to be initialized. */
5539 n += 1;
5540 }
5541
5542 return n;
5543 }
5544
5545 case UNION_TYPE:
5546 case QUAL_UNION_TYPE:
5547 {
5548 tree f;
5549 HOST_WIDE_INT n, m;
5550
5551 gcc_assert (!for_ctor_p);
5552 /* Estimate the number of scalars in each field and pick the
5553 maximum. Other estimates would do instead; the idea is simply
5554 to make sure that the estimate is not sensitive to the ordering
5555 of the fields. */
5556 n = 1;
5557 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5558 if (TREE_CODE (f) == FIELD_DECL)
5559 {
5560 m = count_type_elements (TREE_TYPE (f), false);
5561 /* If the field doesn't span the whole union, add an extra
5562 scalar for the rest. */
5563 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5564 TYPE_SIZE (type)) != 1)
5565 m++;
5566 if (n < m)
5567 n = m;
5568 }
5569 return n;
5570 }
5571
5572 case COMPLEX_TYPE:
5573 return 2;
5574
5575 case VECTOR_TYPE:
5576 return TYPE_VECTOR_SUBPARTS (type);
5577
5578 case INTEGER_TYPE:
5579 case REAL_TYPE:
5580 case FIXED_POINT_TYPE:
5581 case ENUMERAL_TYPE:
5582 case BOOLEAN_TYPE:
5583 case POINTER_TYPE:
5584 case OFFSET_TYPE:
5585 case REFERENCE_TYPE:
5586 case NULLPTR_TYPE:
5587 return 1;
5588
5589 case ERROR_MARK:
5590 return 0;
5591
5592 case VOID_TYPE:
5593 case METHOD_TYPE:
5594 case FUNCTION_TYPE:
5595 case LANG_TYPE:
5596 default:
5597 gcc_unreachable ();
5598 }
5599 }
5600
5601 /* Helper for categorize_ctor_elements. Identical interface. */
5602
5603 static bool
5604 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5605 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5606 {
5607 unsigned HOST_WIDE_INT idx;
5608 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5609 tree value, purpose, elt_type;
5610
5611 /* Whether CTOR is a valid constant initializer, in accordance with what
5612 initializer_constant_valid_p does. If inferred from the constructor
5613 elements, true until proven otherwise. */
5614 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5615 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5616
5617 nz_elts = 0;
5618 init_elts = 0;
5619 num_fields = 0;
5620 elt_type = NULL_TREE;
5621
5622 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5623 {
5624 HOST_WIDE_INT mult = 1;
5625
5626 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5627 {
5628 tree lo_index = TREE_OPERAND (purpose, 0);
5629 tree hi_index = TREE_OPERAND (purpose, 1);
5630
5631 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5632 mult = (tree_to_uhwi (hi_index)
5633 - tree_to_uhwi (lo_index) + 1);
5634 }
5635 num_fields += mult;
5636 elt_type = TREE_TYPE (value);
5637
5638 switch (TREE_CODE (value))
5639 {
5640 case CONSTRUCTOR:
5641 {
5642 HOST_WIDE_INT nz = 0, ic = 0;
5643
5644 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5645 p_complete);
5646
5647 nz_elts += mult * nz;
5648 init_elts += mult * ic;
5649
5650 if (const_from_elts_p && const_p)
5651 const_p = const_elt_p;
5652 }
5653 break;
5654
5655 case INTEGER_CST:
5656 case REAL_CST:
5657 case FIXED_CST:
5658 if (!initializer_zerop (value))
5659 nz_elts += mult;
5660 init_elts += mult;
5661 break;
5662
5663 case STRING_CST:
5664 nz_elts += mult * TREE_STRING_LENGTH (value);
5665 init_elts += mult * TREE_STRING_LENGTH (value);
5666 break;
5667
5668 case COMPLEX_CST:
5669 if (!initializer_zerop (TREE_REALPART (value)))
5670 nz_elts += mult;
5671 if (!initializer_zerop (TREE_IMAGPART (value)))
5672 nz_elts += mult;
5673 init_elts += mult;
5674 break;
5675
5676 case VECTOR_CST:
5677 {
5678 unsigned i;
5679 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5680 {
5681 tree v = VECTOR_CST_ELT (value, i);
5682 if (!initializer_zerop (v))
5683 nz_elts += mult;
5684 init_elts += mult;
5685 }
5686 }
5687 break;
5688
5689 default:
5690 {
5691 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5692 nz_elts += mult * tc;
5693 init_elts += mult * tc;
5694
5695 if (const_from_elts_p && const_p)
5696 const_p = initializer_constant_valid_p (value, elt_type)
5697 != NULL_TREE;
5698 }
5699 break;
5700 }
5701 }
5702
5703 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5704 num_fields, elt_type))
5705 *p_complete = false;
5706
5707 *p_nz_elts += nz_elts;
5708 *p_init_elts += init_elts;
5709
5710 return const_p;
5711 }
5712
5713 /* Examine CTOR to discover:
5714 * how many scalar fields are set to nonzero values,
5715 and place it in *P_NZ_ELTS;
5716 * how many scalar fields in total are in CTOR,
5717 and place it in *P_ELT_COUNT.
5718 * whether the constructor is complete -- in the sense that every
5719 meaningful byte is explicitly given a value --
5720 and place it in *P_COMPLETE.
5721
5722 Return whether or not CTOR is a valid static constant initializer, the same
5723 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5724
5725 bool
5726 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5727 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5728 {
5729 *p_nz_elts = 0;
5730 *p_init_elts = 0;
5731 *p_complete = true;
5732
5733 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5734 }
5735
5736 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5737 of which had type LAST_TYPE. Each element was itself a complete
5738 initializer, in the sense that every meaningful byte was explicitly
5739 given a value. Return true if the same is true for the constructor
5740 as a whole. */
5741
5742 bool
5743 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5744 const_tree last_type)
5745 {
5746 if (TREE_CODE (type) == UNION_TYPE
5747 || TREE_CODE (type) == QUAL_UNION_TYPE)
5748 {
5749 if (num_elts == 0)
5750 return false;
5751
5752 gcc_assert (num_elts == 1 && last_type);
5753
5754 /* ??? We could look at each element of the union, and find the
5755 largest element. Which would avoid comparing the size of the
5756 initialized element against any tail padding in the union.
5757 Doesn't seem worth the effort... */
5758 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5759 }
5760
5761 return count_type_elements (type, true) == num_elts;
5762 }
5763
5764 /* Return 1 if EXP contains mostly (3/4) zeros. */
5765
5766 static int
5767 mostly_zeros_p (const_tree exp)
5768 {
5769 if (TREE_CODE (exp) == CONSTRUCTOR)
5770 {
5771 HOST_WIDE_INT nz_elts, init_elts;
5772 bool complete_p;
5773
5774 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5775 return !complete_p || nz_elts < init_elts / 4;
5776 }
5777
5778 return initializer_zerop (exp);
5779 }
5780
5781 /* Return 1 if EXP contains all zeros. */
5782
5783 static int
5784 all_zeros_p (const_tree exp)
5785 {
5786 if (TREE_CODE (exp) == CONSTRUCTOR)
5787 {
5788 HOST_WIDE_INT nz_elts, init_elts;
5789 bool complete_p;
5790
5791 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5792 return nz_elts == 0;
5793 }
5794
5795 return initializer_zerop (exp);
5796 }
5797 \f
5798 /* Helper function for store_constructor.
5799 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5800 CLEARED is as for store_constructor.
5801 ALIAS_SET is the alias set to use for any stores.
5802
5803 This provides a recursive shortcut back to store_constructor when it isn't
5804 necessary to go through store_field. This is so that we can pass through
5805 the cleared field to let store_constructor know that we may not have to
5806 clear a substructure if the outer structure has already been cleared. */
5807
5808 static void
5809 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5810 HOST_WIDE_INT bitpos, enum machine_mode mode,
5811 tree exp, int cleared, alias_set_type alias_set)
5812 {
5813 if (TREE_CODE (exp) == CONSTRUCTOR
5814 /* We can only call store_constructor recursively if the size and
5815 bit position are on a byte boundary. */
5816 && bitpos % BITS_PER_UNIT == 0
5817 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5818 /* If we have a nonzero bitpos for a register target, then we just
5819 let store_field do the bitfield handling. This is unlikely to
5820 generate unnecessary clear instructions anyways. */
5821 && (bitpos == 0 || MEM_P (target)))
5822 {
5823 if (MEM_P (target))
5824 target
5825 = adjust_address (target,
5826 GET_MODE (target) == BLKmode
5827 || 0 != (bitpos
5828 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5829 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5830
5831
5832 /* Update the alias set, if required. */
5833 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5834 && MEM_ALIAS_SET (target) != 0)
5835 {
5836 target = copy_rtx (target);
5837 set_mem_alias_set (target, alias_set);
5838 }
5839
5840 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5841 }
5842 else
5843 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false);
5844 }
5845
5846
5847 /* Returns the number of FIELD_DECLs in TYPE. */
5848
5849 static int
5850 fields_length (const_tree type)
5851 {
5852 tree t = TYPE_FIELDS (type);
5853 int count = 0;
5854
5855 for (; t; t = DECL_CHAIN (t))
5856 if (TREE_CODE (t) == FIELD_DECL)
5857 ++count;
5858
5859 return count;
5860 }
5861
5862
5863 /* Store the value of constructor EXP into the rtx TARGET.
5864 TARGET is either a REG or a MEM; we know it cannot conflict, since
5865 safe_from_p has been called.
5866 CLEARED is true if TARGET is known to have been zero'd.
5867 SIZE is the number of bytes of TARGET we are allowed to modify: this
5868 may not be the same as the size of EXP if we are assigning to a field
5869 which has been packed to exclude padding bits. */
5870
5871 static void
5872 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5873 {
5874 tree type = TREE_TYPE (exp);
5875 #ifdef WORD_REGISTER_OPERATIONS
5876 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5877 #endif
5878
5879 switch (TREE_CODE (type))
5880 {
5881 case RECORD_TYPE:
5882 case UNION_TYPE:
5883 case QUAL_UNION_TYPE:
5884 {
5885 unsigned HOST_WIDE_INT idx;
5886 tree field, value;
5887
5888 /* If size is zero or the target is already cleared, do nothing. */
5889 if (size == 0 || cleared)
5890 cleared = 1;
5891 /* We either clear the aggregate or indicate the value is dead. */
5892 else if ((TREE_CODE (type) == UNION_TYPE
5893 || TREE_CODE (type) == QUAL_UNION_TYPE)
5894 && ! CONSTRUCTOR_ELTS (exp))
5895 /* If the constructor is empty, clear the union. */
5896 {
5897 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5898 cleared = 1;
5899 }
5900
5901 /* If we are building a static constructor into a register,
5902 set the initial value as zero so we can fold the value into
5903 a constant. But if more than one register is involved,
5904 this probably loses. */
5905 else if (REG_P (target) && TREE_STATIC (exp)
5906 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5907 {
5908 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5909 cleared = 1;
5910 }
5911
5912 /* If the constructor has fewer fields than the structure or
5913 if we are initializing the structure to mostly zeros, clear
5914 the whole structure first. Don't do this if TARGET is a
5915 register whose mode size isn't equal to SIZE since
5916 clear_storage can't handle this case. */
5917 else if (size > 0
5918 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
5919 != fields_length (type))
5920 || mostly_zeros_p (exp))
5921 && (!REG_P (target)
5922 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5923 == size)))
5924 {
5925 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5926 cleared = 1;
5927 }
5928
5929 if (REG_P (target) && !cleared)
5930 emit_clobber (target);
5931
5932 /* Store each element of the constructor into the
5933 corresponding field of TARGET. */
5934 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5935 {
5936 enum machine_mode mode;
5937 HOST_WIDE_INT bitsize;
5938 HOST_WIDE_INT bitpos = 0;
5939 tree offset;
5940 rtx to_rtx = target;
5941
5942 /* Just ignore missing fields. We cleared the whole
5943 structure, above, if any fields are missing. */
5944 if (field == 0)
5945 continue;
5946
5947 if (cleared && initializer_zerop (value))
5948 continue;
5949
5950 if (tree_fits_uhwi_p (DECL_SIZE (field)))
5951 bitsize = tree_to_uhwi (DECL_SIZE (field));
5952 else
5953 bitsize = -1;
5954
5955 mode = DECL_MODE (field);
5956 if (DECL_BIT_FIELD (field))
5957 mode = VOIDmode;
5958
5959 offset = DECL_FIELD_OFFSET (field);
5960 if (tree_fits_shwi_p (offset)
5961 && tree_fits_shwi_p (bit_position (field)))
5962 {
5963 bitpos = int_bit_position (field);
5964 offset = 0;
5965 }
5966 else
5967 bitpos = tree_to_shwi (DECL_FIELD_BIT_OFFSET (field));
5968
5969 if (offset)
5970 {
5971 enum machine_mode address_mode;
5972 rtx offset_rtx;
5973
5974 offset
5975 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5976 make_tree (TREE_TYPE (exp),
5977 target));
5978
5979 offset_rtx = expand_normal (offset);
5980 gcc_assert (MEM_P (to_rtx));
5981
5982 address_mode = get_address_mode (to_rtx);
5983 if (GET_MODE (offset_rtx) != address_mode)
5984 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5985
5986 to_rtx = offset_address (to_rtx, offset_rtx,
5987 highest_pow2_factor (offset));
5988 }
5989
5990 #ifdef WORD_REGISTER_OPERATIONS
5991 /* If this initializes a field that is smaller than a
5992 word, at the start of a word, try to widen it to a full
5993 word. This special case allows us to output C++ member
5994 function initializations in a form that the optimizers
5995 can understand. */
5996 if (REG_P (target)
5997 && bitsize < BITS_PER_WORD
5998 && bitpos % BITS_PER_WORD == 0
5999 && GET_MODE_CLASS (mode) == MODE_INT
6000 && TREE_CODE (value) == INTEGER_CST
6001 && exp_size >= 0
6002 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6003 {
6004 tree type = TREE_TYPE (value);
6005
6006 if (TYPE_PRECISION (type) < BITS_PER_WORD)
6007 {
6008 type = lang_hooks.types.type_for_mode
6009 (word_mode, TYPE_UNSIGNED (type));
6010 value = fold_convert (type, value);
6011 }
6012
6013 if (BYTES_BIG_ENDIAN)
6014 value
6015 = fold_build2 (LSHIFT_EXPR, type, value,
6016 build_int_cst (type,
6017 BITS_PER_WORD - bitsize));
6018 bitsize = BITS_PER_WORD;
6019 mode = word_mode;
6020 }
6021 #endif
6022
6023 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6024 && DECL_NONADDRESSABLE_P (field))
6025 {
6026 to_rtx = copy_rtx (to_rtx);
6027 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6028 }
6029
6030 store_constructor_field (to_rtx, bitsize, bitpos, mode,
6031 value, cleared,
6032 get_alias_set (TREE_TYPE (field)));
6033 }
6034 break;
6035 }
6036 case ARRAY_TYPE:
6037 {
6038 tree value, index;
6039 unsigned HOST_WIDE_INT i;
6040 int need_to_clear;
6041 tree domain;
6042 tree elttype = TREE_TYPE (type);
6043 int const_bounds_p;
6044 HOST_WIDE_INT minelt = 0;
6045 HOST_WIDE_INT maxelt = 0;
6046
6047 domain = TYPE_DOMAIN (type);
6048 const_bounds_p = (TYPE_MIN_VALUE (domain)
6049 && TYPE_MAX_VALUE (domain)
6050 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6051 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6052
6053 /* If we have constant bounds for the range of the type, get them. */
6054 if (const_bounds_p)
6055 {
6056 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6057 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6058 }
6059
6060 /* If the constructor has fewer elements than the array, clear
6061 the whole array first. Similarly if this is static
6062 constructor of a non-BLKmode object. */
6063 if (cleared)
6064 need_to_clear = 0;
6065 else if (REG_P (target) && TREE_STATIC (exp))
6066 need_to_clear = 1;
6067 else
6068 {
6069 unsigned HOST_WIDE_INT idx;
6070 tree index, value;
6071 HOST_WIDE_INT count = 0, zero_count = 0;
6072 need_to_clear = ! const_bounds_p;
6073
6074 /* This loop is a more accurate version of the loop in
6075 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6076 is also needed to check for missing elements. */
6077 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6078 {
6079 HOST_WIDE_INT this_node_count;
6080
6081 if (need_to_clear)
6082 break;
6083
6084 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6085 {
6086 tree lo_index = TREE_OPERAND (index, 0);
6087 tree hi_index = TREE_OPERAND (index, 1);
6088
6089 if (! tree_fits_uhwi_p (lo_index)
6090 || ! tree_fits_uhwi_p (hi_index))
6091 {
6092 need_to_clear = 1;
6093 break;
6094 }
6095
6096 this_node_count = (tree_to_uhwi (hi_index)
6097 - tree_to_uhwi (lo_index) + 1);
6098 }
6099 else
6100 this_node_count = 1;
6101
6102 count += this_node_count;
6103 if (mostly_zeros_p (value))
6104 zero_count += this_node_count;
6105 }
6106
6107 /* Clear the entire array first if there are any missing
6108 elements, or if the incidence of zero elements is >=
6109 75%. */
6110 if (! need_to_clear
6111 && (count < maxelt - minelt + 1
6112 || 4 * zero_count >= 3 * count))
6113 need_to_clear = 1;
6114 }
6115
6116 if (need_to_clear && size > 0)
6117 {
6118 if (REG_P (target))
6119 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6120 else
6121 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6122 cleared = 1;
6123 }
6124
6125 if (!cleared && REG_P (target))
6126 /* Inform later passes that the old value is dead. */
6127 emit_clobber (target);
6128
6129 /* Store each element of the constructor into the
6130 corresponding element of TARGET, determined by counting the
6131 elements. */
6132 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6133 {
6134 enum machine_mode mode;
6135 HOST_WIDE_INT bitsize;
6136 HOST_WIDE_INT bitpos;
6137 rtx xtarget = target;
6138
6139 if (cleared && initializer_zerop (value))
6140 continue;
6141
6142 mode = TYPE_MODE (elttype);
6143 if (mode == BLKmode)
6144 bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6145 ? tree_to_uhwi (TYPE_SIZE (elttype))
6146 : -1);
6147 else
6148 bitsize = GET_MODE_BITSIZE (mode);
6149
6150 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6151 {
6152 tree lo_index = TREE_OPERAND (index, 0);
6153 tree hi_index = TREE_OPERAND (index, 1);
6154 rtx index_r, pos_rtx;
6155 HOST_WIDE_INT lo, hi, count;
6156 tree position;
6157
6158 /* If the range is constant and "small", unroll the loop. */
6159 if (const_bounds_p
6160 && tree_fits_shwi_p (lo_index)
6161 && tree_fits_shwi_p (hi_index)
6162 && (lo = tree_to_shwi (lo_index),
6163 hi = tree_to_shwi (hi_index),
6164 count = hi - lo + 1,
6165 (!MEM_P (target)
6166 || count <= 2
6167 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6168 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6169 <= 40 * 8)))))
6170 {
6171 lo -= minelt; hi -= minelt;
6172 for (; lo <= hi; lo++)
6173 {
6174 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6175
6176 if (MEM_P (target)
6177 && !MEM_KEEP_ALIAS_SET_P (target)
6178 && TREE_CODE (type) == ARRAY_TYPE
6179 && TYPE_NONALIASED_COMPONENT (type))
6180 {
6181 target = copy_rtx (target);
6182 MEM_KEEP_ALIAS_SET_P (target) = 1;
6183 }
6184
6185 store_constructor_field
6186 (target, bitsize, bitpos, mode, value, cleared,
6187 get_alias_set (elttype));
6188 }
6189 }
6190 else
6191 {
6192 rtx loop_start = gen_label_rtx ();
6193 rtx loop_end = gen_label_rtx ();
6194 tree exit_cond;
6195
6196 expand_normal (hi_index);
6197
6198 index = build_decl (EXPR_LOCATION (exp),
6199 VAR_DECL, NULL_TREE, domain);
6200 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6201 SET_DECL_RTL (index, index_r);
6202 store_expr (lo_index, index_r, 0, false);
6203
6204 /* Build the head of the loop. */
6205 do_pending_stack_adjust ();
6206 emit_label (loop_start);
6207
6208 /* Assign value to element index. */
6209 position =
6210 fold_convert (ssizetype,
6211 fold_build2 (MINUS_EXPR,
6212 TREE_TYPE (index),
6213 index,
6214 TYPE_MIN_VALUE (domain)));
6215
6216 position =
6217 size_binop (MULT_EXPR, position,
6218 fold_convert (ssizetype,
6219 TYPE_SIZE_UNIT (elttype)));
6220
6221 pos_rtx = expand_normal (position);
6222 xtarget = offset_address (target, pos_rtx,
6223 highest_pow2_factor (position));
6224 xtarget = adjust_address (xtarget, mode, 0);
6225 if (TREE_CODE (value) == CONSTRUCTOR)
6226 store_constructor (value, xtarget, cleared,
6227 bitsize / BITS_PER_UNIT);
6228 else
6229 store_expr (value, xtarget, 0, false);
6230
6231 /* Generate a conditional jump to exit the loop. */
6232 exit_cond = build2 (LT_EXPR, integer_type_node,
6233 index, hi_index);
6234 jumpif (exit_cond, loop_end, -1);
6235
6236 /* Update the loop counter, and jump to the head of
6237 the loop. */
6238 expand_assignment (index,
6239 build2 (PLUS_EXPR, TREE_TYPE (index),
6240 index, integer_one_node),
6241 false);
6242
6243 emit_jump (loop_start);
6244
6245 /* Build the end of the loop. */
6246 emit_label (loop_end);
6247 }
6248 }
6249 else if ((index != 0 && ! tree_fits_shwi_p (index))
6250 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6251 {
6252 tree position;
6253
6254 if (index == 0)
6255 index = ssize_int (1);
6256
6257 if (minelt)
6258 index = fold_convert (ssizetype,
6259 fold_build2 (MINUS_EXPR,
6260 TREE_TYPE (index),
6261 index,
6262 TYPE_MIN_VALUE (domain)));
6263
6264 position =
6265 size_binop (MULT_EXPR, index,
6266 fold_convert (ssizetype,
6267 TYPE_SIZE_UNIT (elttype)));
6268 xtarget = offset_address (target,
6269 expand_normal (position),
6270 highest_pow2_factor (position));
6271 xtarget = adjust_address (xtarget, mode, 0);
6272 store_expr (value, xtarget, 0, false);
6273 }
6274 else
6275 {
6276 if (index != 0)
6277 bitpos = ((tree_to_shwi (index) - minelt)
6278 * tree_to_uhwi (TYPE_SIZE (elttype)));
6279 else
6280 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6281
6282 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6283 && TREE_CODE (type) == ARRAY_TYPE
6284 && TYPE_NONALIASED_COMPONENT (type))
6285 {
6286 target = copy_rtx (target);
6287 MEM_KEEP_ALIAS_SET_P (target) = 1;
6288 }
6289 store_constructor_field (target, bitsize, bitpos, mode, value,
6290 cleared, get_alias_set (elttype));
6291 }
6292 }
6293 break;
6294 }
6295
6296 case VECTOR_TYPE:
6297 {
6298 unsigned HOST_WIDE_INT idx;
6299 constructor_elt *ce;
6300 int i;
6301 int need_to_clear;
6302 int icode = CODE_FOR_nothing;
6303 tree elttype = TREE_TYPE (type);
6304 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6305 enum machine_mode eltmode = TYPE_MODE (elttype);
6306 HOST_WIDE_INT bitsize;
6307 HOST_WIDE_INT bitpos;
6308 rtvec vector = NULL;
6309 unsigned n_elts;
6310 alias_set_type alias;
6311
6312 gcc_assert (eltmode != BLKmode);
6313
6314 n_elts = TYPE_VECTOR_SUBPARTS (type);
6315 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6316 {
6317 enum machine_mode mode = GET_MODE (target);
6318
6319 icode = (int) optab_handler (vec_init_optab, mode);
6320 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6321 if (icode != CODE_FOR_nothing)
6322 {
6323 tree value;
6324
6325 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6326 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
6327 {
6328 icode = CODE_FOR_nothing;
6329 break;
6330 }
6331 }
6332 if (icode != CODE_FOR_nothing)
6333 {
6334 unsigned int i;
6335
6336 vector = rtvec_alloc (n_elts);
6337 for (i = 0; i < n_elts; i++)
6338 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6339 }
6340 }
6341
6342 /* If the constructor has fewer elements than the vector,
6343 clear the whole array first. Similarly if this is static
6344 constructor of a non-BLKmode object. */
6345 if (cleared)
6346 need_to_clear = 0;
6347 else if (REG_P (target) && TREE_STATIC (exp))
6348 need_to_clear = 1;
6349 else
6350 {
6351 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6352 tree value;
6353
6354 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6355 {
6356 int n_elts_here = tree_to_uhwi
6357 (int_const_binop (TRUNC_DIV_EXPR,
6358 TYPE_SIZE (TREE_TYPE (value)),
6359 TYPE_SIZE (elttype)));
6360
6361 count += n_elts_here;
6362 if (mostly_zeros_p (value))
6363 zero_count += n_elts_here;
6364 }
6365
6366 /* Clear the entire vector first if there are any missing elements,
6367 or if the incidence of zero elements is >= 75%. */
6368 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6369 }
6370
6371 if (need_to_clear && size > 0 && !vector)
6372 {
6373 if (REG_P (target))
6374 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6375 else
6376 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6377 cleared = 1;
6378 }
6379
6380 /* Inform later passes that the old value is dead. */
6381 if (!cleared && !vector && REG_P (target))
6382 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6383
6384 if (MEM_P (target))
6385 alias = MEM_ALIAS_SET (target);
6386 else
6387 alias = get_alias_set (elttype);
6388
6389 /* Store each element of the constructor into the corresponding
6390 element of TARGET, determined by counting the elements. */
6391 for (idx = 0, i = 0;
6392 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6393 idx++, i += bitsize / elt_size)
6394 {
6395 HOST_WIDE_INT eltpos;
6396 tree value = ce->value;
6397
6398 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6399 if (cleared && initializer_zerop (value))
6400 continue;
6401
6402 if (ce->index)
6403 eltpos = tree_to_uhwi (ce->index);
6404 else
6405 eltpos = i;
6406
6407 if (vector)
6408 {
6409 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6410 elements. */
6411 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6412 RTVEC_ELT (vector, eltpos)
6413 = expand_normal (value);
6414 }
6415 else
6416 {
6417 enum machine_mode value_mode =
6418 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6419 ? TYPE_MODE (TREE_TYPE (value))
6420 : eltmode;
6421 bitpos = eltpos * elt_size;
6422 store_constructor_field (target, bitsize, bitpos, value_mode,
6423 value, cleared, alias);
6424 }
6425 }
6426
6427 if (vector)
6428 emit_insn (GEN_FCN (icode)
6429 (target,
6430 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6431 break;
6432 }
6433
6434 default:
6435 gcc_unreachable ();
6436 }
6437 }
6438
6439 /* Store the value of EXP (an expression tree)
6440 into a subfield of TARGET which has mode MODE and occupies
6441 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6442 If MODE is VOIDmode, it means that we are storing into a bit-field.
6443
6444 BITREGION_START is bitpos of the first bitfield in this region.
6445 BITREGION_END is the bitpos of the ending bitfield in this region.
6446 These two fields are 0, if the C++ memory model does not apply,
6447 or we are not interested in keeping track of bitfield regions.
6448
6449 Always return const0_rtx unless we have something particular to
6450 return.
6451
6452 ALIAS_SET is the alias set for the destination. This value will
6453 (in general) be different from that for TARGET, since TARGET is a
6454 reference to the containing structure.
6455
6456 If NONTEMPORAL is true, try generating a nontemporal store. */
6457
6458 static rtx
6459 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6460 unsigned HOST_WIDE_INT bitregion_start,
6461 unsigned HOST_WIDE_INT bitregion_end,
6462 enum machine_mode mode, tree exp,
6463 alias_set_type alias_set, bool nontemporal)
6464 {
6465 if (TREE_CODE (exp) == ERROR_MARK)
6466 return const0_rtx;
6467
6468 /* If we have nothing to store, do nothing unless the expression has
6469 side-effects. */
6470 if (bitsize == 0)
6471 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6472
6473 if (GET_CODE (target) == CONCAT)
6474 {
6475 /* We're storing into a struct containing a single __complex. */
6476
6477 gcc_assert (!bitpos);
6478 return store_expr (exp, target, 0, nontemporal);
6479 }
6480
6481 /* If the structure is in a register or if the component
6482 is a bit field, we cannot use addressing to access it.
6483 Use bit-field techniques or SUBREG to store in it. */
6484
6485 if (mode == VOIDmode
6486 || (mode != BLKmode && ! direct_store[(int) mode]
6487 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6488 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6489 || REG_P (target)
6490 || GET_CODE (target) == SUBREG
6491 /* If the field isn't aligned enough to store as an ordinary memref,
6492 store it as a bit field. */
6493 || (mode != BLKmode
6494 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6495 || bitpos % GET_MODE_ALIGNMENT (mode))
6496 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6497 || (bitpos % BITS_PER_UNIT != 0)))
6498 || (bitsize >= 0 && mode != BLKmode
6499 && GET_MODE_BITSIZE (mode) > bitsize)
6500 /* If the RHS and field are a constant size and the size of the
6501 RHS isn't the same size as the bitfield, we must use bitfield
6502 operations. */
6503 || (bitsize >= 0
6504 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6505 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6506 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6507 decl we must use bitfield operations. */
6508 || (bitsize >= 0
6509 && TREE_CODE (exp) == MEM_REF
6510 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6511 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6512 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6513 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6514 {
6515 rtx temp;
6516 gimple nop_def;
6517
6518 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6519 implies a mask operation. If the precision is the same size as
6520 the field we're storing into, that mask is redundant. This is
6521 particularly common with bit field assignments generated by the
6522 C front end. */
6523 nop_def = get_def_for_expr (exp, NOP_EXPR);
6524 if (nop_def)
6525 {
6526 tree type = TREE_TYPE (exp);
6527 if (INTEGRAL_TYPE_P (type)
6528 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6529 && bitsize == TYPE_PRECISION (type))
6530 {
6531 tree op = gimple_assign_rhs1 (nop_def);
6532 type = TREE_TYPE (op);
6533 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6534 exp = op;
6535 }
6536 }
6537
6538 temp = expand_normal (exp);
6539
6540 /* If BITSIZE is narrower than the size of the type of EXP
6541 we will be narrowing TEMP. Normally, what's wanted are the
6542 low-order bits. However, if EXP's type is a record and this is
6543 big-endian machine, we want the upper BITSIZE bits. */
6544 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6545 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6546 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6547 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6548 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6549 NULL_RTX, 1);
6550
6551 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6552 if (mode != VOIDmode && mode != BLKmode
6553 && mode != TYPE_MODE (TREE_TYPE (exp)))
6554 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6555
6556 /* If the modes of TEMP and TARGET are both BLKmode, both
6557 must be in memory and BITPOS must be aligned on a byte
6558 boundary. If so, we simply do a block copy. Likewise
6559 for a BLKmode-like TARGET. */
6560 if (GET_MODE (temp) == BLKmode
6561 && (GET_MODE (target) == BLKmode
6562 || (MEM_P (target)
6563 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6564 && (bitpos % BITS_PER_UNIT) == 0
6565 && (bitsize % BITS_PER_UNIT) == 0)))
6566 {
6567 gcc_assert (MEM_P (target) && MEM_P (temp)
6568 && (bitpos % BITS_PER_UNIT) == 0);
6569
6570 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6571 emit_block_move (target, temp,
6572 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6573 / BITS_PER_UNIT),
6574 BLOCK_OP_NORMAL);
6575
6576 return const0_rtx;
6577 }
6578
6579 /* Handle calls that return values in multiple non-contiguous locations.
6580 The Irix 6 ABI has examples of this. */
6581 if (GET_CODE (temp) == PARALLEL)
6582 {
6583 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6584 rtx temp_target;
6585 if (mode == BLKmode || mode == VOIDmode)
6586 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6587 temp_target = gen_reg_rtx (mode);
6588 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6589 temp = temp_target;
6590 }
6591 else if (mode == BLKmode)
6592 {
6593 /* Handle calls that return BLKmode values in registers. */
6594 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6595 {
6596 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6597 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6598 temp = temp_target;
6599 }
6600 else
6601 {
6602 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6603 rtx temp_target;
6604 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6605 temp_target = gen_reg_rtx (mode);
6606 temp_target
6607 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6608 temp_target, mode, mode);
6609 temp = temp_target;
6610 }
6611 }
6612
6613 /* Store the value in the bitfield. */
6614 store_bit_field (target, bitsize, bitpos,
6615 bitregion_start, bitregion_end,
6616 mode, temp);
6617
6618 return const0_rtx;
6619 }
6620 else
6621 {
6622 /* Now build a reference to just the desired component. */
6623 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6624
6625 if (to_rtx == target)
6626 to_rtx = copy_rtx (to_rtx);
6627
6628 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6629 set_mem_alias_set (to_rtx, alias_set);
6630
6631 return store_expr (exp, to_rtx, 0, nontemporal);
6632 }
6633 }
6634 \f
6635 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6636 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6637 codes and find the ultimate containing object, which we return.
6638
6639 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6640 bit position, and *PUNSIGNEDP to the signedness of the field.
6641 If the position of the field is variable, we store a tree
6642 giving the variable offset (in units) in *POFFSET.
6643 This offset is in addition to the bit position.
6644 If the position is not variable, we store 0 in *POFFSET.
6645
6646 If any of the extraction expressions is volatile,
6647 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6648
6649 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6650 Otherwise, it is a mode that can be used to access the field.
6651
6652 If the field describes a variable-sized object, *PMODE is set to
6653 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6654 this case, but the address of the object can be found.
6655
6656 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6657 look through nodes that serve as markers of a greater alignment than
6658 the one that can be deduced from the expression. These nodes make it
6659 possible for front-ends to prevent temporaries from being created by
6660 the middle-end on alignment considerations. For that purpose, the
6661 normal operating mode at high-level is to always pass FALSE so that
6662 the ultimate containing object is really returned; moreover, the
6663 associated predicate handled_component_p will always return TRUE
6664 on these nodes, thus indicating that they are essentially handled
6665 by get_inner_reference. TRUE should only be passed when the caller
6666 is scanning the expression in order to build another representation
6667 and specifically knows how to handle these nodes; as such, this is
6668 the normal operating mode in the RTL expanders. */
6669
6670 tree
6671 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6672 HOST_WIDE_INT *pbitpos, tree *poffset,
6673 enum machine_mode *pmode, int *punsignedp,
6674 int *pvolatilep, bool keep_aligning)
6675 {
6676 tree size_tree = 0;
6677 enum machine_mode mode = VOIDmode;
6678 bool blkmode_bitfield = false;
6679 tree offset = size_zero_node;
6680 offset_int bit_offset = 0;
6681
6682 /* First get the mode, signedness, and size. We do this from just the
6683 outermost expression. */
6684 *pbitsize = -1;
6685 if (TREE_CODE (exp) == COMPONENT_REF)
6686 {
6687 tree field = TREE_OPERAND (exp, 1);
6688 size_tree = DECL_SIZE (field);
6689 if (flag_strict_volatile_bitfields > 0
6690 && TREE_THIS_VOLATILE (exp)
6691 && DECL_BIT_FIELD_TYPE (field)
6692 && DECL_MODE (field) != BLKmode)
6693 /* Volatile bitfields should be accessed in the mode of the
6694 field's type, not the mode computed based on the bit
6695 size. */
6696 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6697 else if (!DECL_BIT_FIELD (field))
6698 mode = DECL_MODE (field);
6699 else if (DECL_MODE (field) == BLKmode)
6700 blkmode_bitfield = true;
6701
6702 *punsignedp = DECL_UNSIGNED (field);
6703 }
6704 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6705 {
6706 size_tree = TREE_OPERAND (exp, 1);
6707 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6708 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6709
6710 /* For vector types, with the correct size of access, use the mode of
6711 inner type. */
6712 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6713 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6714 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6715 mode = TYPE_MODE (TREE_TYPE (exp));
6716 }
6717 else
6718 {
6719 mode = TYPE_MODE (TREE_TYPE (exp));
6720 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6721
6722 if (mode == BLKmode)
6723 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6724 else
6725 *pbitsize = GET_MODE_BITSIZE (mode);
6726 }
6727
6728 if (size_tree != 0)
6729 {
6730 if (! tree_fits_uhwi_p (size_tree))
6731 mode = BLKmode, *pbitsize = -1;
6732 else
6733 *pbitsize = tree_to_uhwi (size_tree);
6734 }
6735
6736 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6737 and find the ultimate containing object. */
6738 while (1)
6739 {
6740 switch (TREE_CODE (exp))
6741 {
6742 case BIT_FIELD_REF:
6743 bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
6744 break;
6745
6746 case COMPONENT_REF:
6747 {
6748 tree field = TREE_OPERAND (exp, 1);
6749 tree this_offset = component_ref_field_offset (exp);
6750
6751 /* If this field hasn't been filled in yet, don't go past it.
6752 This should only happen when folding expressions made during
6753 type construction. */
6754 if (this_offset == 0)
6755 break;
6756
6757 offset = size_binop (PLUS_EXPR, offset, this_offset);
6758 bit_offset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
6759
6760 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6761 }
6762 break;
6763
6764 case ARRAY_REF:
6765 case ARRAY_RANGE_REF:
6766 {
6767 tree index = TREE_OPERAND (exp, 1);
6768 tree low_bound = array_ref_low_bound (exp);
6769 tree unit_size = array_ref_element_size (exp);
6770
6771 /* We assume all arrays have sizes that are a multiple of a byte.
6772 First subtract the lower bound, if any, in the type of the
6773 index, then convert to sizetype and multiply by the size of
6774 the array element. */
6775 if (! integer_zerop (low_bound))
6776 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6777 index, low_bound);
6778
6779 offset = size_binop (PLUS_EXPR, offset,
6780 size_binop (MULT_EXPR,
6781 fold_convert (sizetype, index),
6782 unit_size));
6783 }
6784 break;
6785
6786 case REALPART_EXPR:
6787 break;
6788
6789 case IMAGPART_EXPR:
6790 bit_offset += *pbitsize;
6791 break;
6792
6793 case VIEW_CONVERT_EXPR:
6794 if (keep_aligning && STRICT_ALIGNMENT
6795 && (TYPE_ALIGN (TREE_TYPE (exp))
6796 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6797 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6798 < BIGGEST_ALIGNMENT)
6799 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6800 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6801 goto done;
6802 break;
6803
6804 case MEM_REF:
6805 /* Hand back the decl for MEM[&decl, off]. */
6806 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6807 {
6808 tree off = TREE_OPERAND (exp, 1);
6809 if (!integer_zerop (off))
6810 {
6811 offset_int boff, coff = mem_ref_offset (exp);
6812 boff = wi::lshift (coff, LOG2_BITS_PER_UNIT);
6813 bit_offset += boff;
6814 }
6815 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6816 }
6817 goto done;
6818
6819 default:
6820 goto done;
6821 }
6822
6823 /* If any reference in the chain is volatile, the effect is volatile. */
6824 if (TREE_THIS_VOLATILE (exp))
6825 *pvolatilep = 1;
6826
6827 exp = TREE_OPERAND (exp, 0);
6828 }
6829 done:
6830
6831 /* If OFFSET is constant, see if we can return the whole thing as a
6832 constant bit position. Make sure to handle overflow during
6833 this conversion. */
6834 if (TREE_CODE (offset) == INTEGER_CST)
6835 {
6836 offset_int tem = wi::sext (wi::to_offset (offset),
6837 TYPE_PRECISION (sizetype));
6838 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
6839 tem += bit_offset;
6840 if (wi::fits_shwi_p (tem))
6841 {
6842 *pbitpos = tem.to_shwi ();
6843 *poffset = offset = NULL_TREE;
6844 }
6845 }
6846
6847 /* Otherwise, split it up. */
6848 if (offset)
6849 {
6850 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6851 if (wi::neg_p (bit_offset))
6852 {
6853 offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
6854 offset_int tem = bit_offset.and_not (mask);
6855 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6856 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6857 bit_offset -= tem;
6858 tem = wi::arshift (tem, LOG2_BITS_PER_UNIT);
6859 offset = size_binop (PLUS_EXPR, offset,
6860 wide_int_to_tree (sizetype, tem));
6861 }
6862
6863 *pbitpos = bit_offset.to_shwi ();
6864 *poffset = offset;
6865 }
6866
6867 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6868 if (mode == VOIDmode
6869 && blkmode_bitfield
6870 && (*pbitpos % BITS_PER_UNIT) == 0
6871 && (*pbitsize % BITS_PER_UNIT) == 0)
6872 *pmode = BLKmode;
6873 else
6874 *pmode = mode;
6875
6876 return exp;
6877 }
6878
6879 /* Return a tree of sizetype representing the size, in bytes, of the element
6880 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6881
6882 tree
6883 array_ref_element_size (tree exp)
6884 {
6885 tree aligned_size = TREE_OPERAND (exp, 3);
6886 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6887 location_t loc = EXPR_LOCATION (exp);
6888
6889 /* If a size was specified in the ARRAY_REF, it's the size measured
6890 in alignment units of the element type. So multiply by that value. */
6891 if (aligned_size)
6892 {
6893 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6894 sizetype from another type of the same width and signedness. */
6895 if (TREE_TYPE (aligned_size) != sizetype)
6896 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6897 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6898 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6899 }
6900
6901 /* Otherwise, take the size from that of the element type. Substitute
6902 any PLACEHOLDER_EXPR that we have. */
6903 else
6904 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6905 }
6906
6907 /* Return a tree representing the lower bound of the array mentioned in
6908 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6909
6910 tree
6911 array_ref_low_bound (tree exp)
6912 {
6913 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6914
6915 /* If a lower bound is specified in EXP, use it. */
6916 if (TREE_OPERAND (exp, 2))
6917 return TREE_OPERAND (exp, 2);
6918
6919 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6920 substituting for a PLACEHOLDER_EXPR as needed. */
6921 if (domain_type && TYPE_MIN_VALUE (domain_type))
6922 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6923
6924 /* Otherwise, return a zero of the appropriate type. */
6925 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6926 }
6927
6928 /* Returns true if REF is an array reference to an array at the end of
6929 a structure. If this is the case, the array may be allocated larger
6930 than its upper bound implies. */
6931
6932 bool
6933 array_at_struct_end_p (tree ref)
6934 {
6935 if (TREE_CODE (ref) != ARRAY_REF
6936 && TREE_CODE (ref) != ARRAY_RANGE_REF)
6937 return false;
6938
6939 while (handled_component_p (ref))
6940 {
6941 /* If the reference chain contains a component reference to a
6942 non-union type and there follows another field the reference
6943 is not at the end of a structure. */
6944 if (TREE_CODE (ref) == COMPONENT_REF
6945 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
6946 {
6947 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
6948 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
6949 nextf = DECL_CHAIN (nextf);
6950 if (nextf)
6951 return false;
6952 }
6953
6954 ref = TREE_OPERAND (ref, 0);
6955 }
6956
6957 /* If the reference is based on a declared entity, the size of the array
6958 is constrained by its given domain. */
6959 if (DECL_P (ref))
6960 return false;
6961
6962 return true;
6963 }
6964
6965 /* Return a tree representing the upper bound of the array mentioned in
6966 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6967
6968 tree
6969 array_ref_up_bound (tree exp)
6970 {
6971 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6972
6973 /* If there is a domain type and it has an upper bound, use it, substituting
6974 for a PLACEHOLDER_EXPR as needed. */
6975 if (domain_type && TYPE_MAX_VALUE (domain_type))
6976 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6977
6978 /* Otherwise fail. */
6979 return NULL_TREE;
6980 }
6981
6982 /* Return a tree representing the offset, in bytes, of the field referenced
6983 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6984
6985 tree
6986 component_ref_field_offset (tree exp)
6987 {
6988 tree aligned_offset = TREE_OPERAND (exp, 2);
6989 tree field = TREE_OPERAND (exp, 1);
6990 location_t loc = EXPR_LOCATION (exp);
6991
6992 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6993 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6994 value. */
6995 if (aligned_offset)
6996 {
6997 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6998 sizetype from another type of the same width and signedness. */
6999 if (TREE_TYPE (aligned_offset) != sizetype)
7000 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
7001 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
7002 size_int (DECL_OFFSET_ALIGN (field)
7003 / BITS_PER_UNIT));
7004 }
7005
7006 /* Otherwise, take the offset from that of the field. Substitute
7007 any PLACEHOLDER_EXPR that we have. */
7008 else
7009 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
7010 }
7011
7012 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7013
7014 static unsigned HOST_WIDE_INT
7015 target_align (const_tree target)
7016 {
7017 /* We might have a chain of nested references with intermediate misaligning
7018 bitfields components, so need to recurse to find out. */
7019
7020 unsigned HOST_WIDE_INT this_align, outer_align;
7021
7022 switch (TREE_CODE (target))
7023 {
7024 case BIT_FIELD_REF:
7025 return 1;
7026
7027 case COMPONENT_REF:
7028 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7029 outer_align = target_align (TREE_OPERAND (target, 0));
7030 return MIN (this_align, outer_align);
7031
7032 case ARRAY_REF:
7033 case ARRAY_RANGE_REF:
7034 this_align = TYPE_ALIGN (TREE_TYPE (target));
7035 outer_align = target_align (TREE_OPERAND (target, 0));
7036 return MIN (this_align, outer_align);
7037
7038 CASE_CONVERT:
7039 case NON_LVALUE_EXPR:
7040 case VIEW_CONVERT_EXPR:
7041 this_align = TYPE_ALIGN (TREE_TYPE (target));
7042 outer_align = target_align (TREE_OPERAND (target, 0));
7043 return MAX (this_align, outer_align);
7044
7045 default:
7046 return TYPE_ALIGN (TREE_TYPE (target));
7047 }
7048 }
7049
7050 \f
7051 /* Given an rtx VALUE that may contain additions and multiplications, return
7052 an equivalent value that just refers to a register, memory, or constant.
7053 This is done by generating instructions to perform the arithmetic and
7054 returning a pseudo-register containing the value.
7055
7056 The returned value may be a REG, SUBREG, MEM or constant. */
7057
7058 rtx
7059 force_operand (rtx value, rtx target)
7060 {
7061 rtx op1, op2;
7062 /* Use subtarget as the target for operand 0 of a binary operation. */
7063 rtx subtarget = get_subtarget (target);
7064 enum rtx_code code = GET_CODE (value);
7065
7066 /* Check for subreg applied to an expression produced by loop optimizer. */
7067 if (code == SUBREG
7068 && !REG_P (SUBREG_REG (value))
7069 && !MEM_P (SUBREG_REG (value)))
7070 {
7071 value
7072 = simplify_gen_subreg (GET_MODE (value),
7073 force_reg (GET_MODE (SUBREG_REG (value)),
7074 force_operand (SUBREG_REG (value),
7075 NULL_RTX)),
7076 GET_MODE (SUBREG_REG (value)),
7077 SUBREG_BYTE (value));
7078 code = GET_CODE (value);
7079 }
7080
7081 /* Check for a PIC address load. */
7082 if ((code == PLUS || code == MINUS)
7083 && XEXP (value, 0) == pic_offset_table_rtx
7084 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7085 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7086 || GET_CODE (XEXP (value, 1)) == CONST))
7087 {
7088 if (!subtarget)
7089 subtarget = gen_reg_rtx (GET_MODE (value));
7090 emit_move_insn (subtarget, value);
7091 return subtarget;
7092 }
7093
7094 if (ARITHMETIC_P (value))
7095 {
7096 op2 = XEXP (value, 1);
7097 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7098 subtarget = 0;
7099 if (code == MINUS && CONST_INT_P (op2))
7100 {
7101 code = PLUS;
7102 op2 = negate_rtx (GET_MODE (value), op2);
7103 }
7104
7105 /* Check for an addition with OP2 a constant integer and our first
7106 operand a PLUS of a virtual register and something else. In that
7107 case, we want to emit the sum of the virtual register and the
7108 constant first and then add the other value. This allows virtual
7109 register instantiation to simply modify the constant rather than
7110 creating another one around this addition. */
7111 if (code == PLUS && CONST_INT_P (op2)
7112 && GET_CODE (XEXP (value, 0)) == PLUS
7113 && REG_P (XEXP (XEXP (value, 0), 0))
7114 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7115 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7116 {
7117 rtx temp = expand_simple_binop (GET_MODE (value), code,
7118 XEXP (XEXP (value, 0), 0), op2,
7119 subtarget, 0, OPTAB_LIB_WIDEN);
7120 return expand_simple_binop (GET_MODE (value), code, temp,
7121 force_operand (XEXP (XEXP (value,
7122 0), 1), 0),
7123 target, 0, OPTAB_LIB_WIDEN);
7124 }
7125
7126 op1 = force_operand (XEXP (value, 0), subtarget);
7127 op2 = force_operand (op2, NULL_RTX);
7128 switch (code)
7129 {
7130 case MULT:
7131 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7132 case DIV:
7133 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7134 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7135 target, 1, OPTAB_LIB_WIDEN);
7136 else
7137 return expand_divmod (0,
7138 FLOAT_MODE_P (GET_MODE (value))
7139 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7140 GET_MODE (value), op1, op2, target, 0);
7141 case MOD:
7142 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7143 target, 0);
7144 case UDIV:
7145 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7146 target, 1);
7147 case UMOD:
7148 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7149 target, 1);
7150 case ASHIFTRT:
7151 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7152 target, 0, OPTAB_LIB_WIDEN);
7153 default:
7154 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7155 target, 1, OPTAB_LIB_WIDEN);
7156 }
7157 }
7158 if (UNARY_P (value))
7159 {
7160 if (!target)
7161 target = gen_reg_rtx (GET_MODE (value));
7162 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7163 switch (code)
7164 {
7165 case ZERO_EXTEND:
7166 case SIGN_EXTEND:
7167 case TRUNCATE:
7168 case FLOAT_EXTEND:
7169 case FLOAT_TRUNCATE:
7170 convert_move (target, op1, code == ZERO_EXTEND);
7171 return target;
7172
7173 case FIX:
7174 case UNSIGNED_FIX:
7175 expand_fix (target, op1, code == UNSIGNED_FIX);
7176 return target;
7177
7178 case FLOAT:
7179 case UNSIGNED_FLOAT:
7180 expand_float (target, op1, code == UNSIGNED_FLOAT);
7181 return target;
7182
7183 default:
7184 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7185 }
7186 }
7187
7188 #ifdef INSN_SCHEDULING
7189 /* On machines that have insn scheduling, we want all memory reference to be
7190 explicit, so we need to deal with such paradoxical SUBREGs. */
7191 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7192 value
7193 = simplify_gen_subreg (GET_MODE (value),
7194 force_reg (GET_MODE (SUBREG_REG (value)),
7195 force_operand (SUBREG_REG (value),
7196 NULL_RTX)),
7197 GET_MODE (SUBREG_REG (value)),
7198 SUBREG_BYTE (value));
7199 #endif
7200
7201 return value;
7202 }
7203 \f
7204 /* Subroutine of expand_expr: return nonzero iff there is no way that
7205 EXP can reference X, which is being modified. TOP_P is nonzero if this
7206 call is going to be used to determine whether we need a temporary
7207 for EXP, as opposed to a recursive call to this function.
7208
7209 It is always safe for this routine to return zero since it merely
7210 searches for optimization opportunities. */
7211
7212 int
7213 safe_from_p (const_rtx x, tree exp, int top_p)
7214 {
7215 rtx exp_rtl = 0;
7216 int i, nops;
7217
7218 if (x == 0
7219 /* If EXP has varying size, we MUST use a target since we currently
7220 have no way of allocating temporaries of variable size
7221 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7222 So we assume here that something at a higher level has prevented a
7223 clash. This is somewhat bogus, but the best we can do. Only
7224 do this when X is BLKmode and when we are at the top level. */
7225 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7226 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7227 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7228 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7229 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7230 != INTEGER_CST)
7231 && GET_MODE (x) == BLKmode)
7232 /* If X is in the outgoing argument area, it is always safe. */
7233 || (MEM_P (x)
7234 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7235 || (GET_CODE (XEXP (x, 0)) == PLUS
7236 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7237 return 1;
7238
7239 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7240 find the underlying pseudo. */
7241 if (GET_CODE (x) == SUBREG)
7242 {
7243 x = SUBREG_REG (x);
7244 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7245 return 0;
7246 }
7247
7248 /* Now look at our tree code and possibly recurse. */
7249 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7250 {
7251 case tcc_declaration:
7252 exp_rtl = DECL_RTL_IF_SET (exp);
7253 break;
7254
7255 case tcc_constant:
7256 return 1;
7257
7258 case tcc_exceptional:
7259 if (TREE_CODE (exp) == TREE_LIST)
7260 {
7261 while (1)
7262 {
7263 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7264 return 0;
7265 exp = TREE_CHAIN (exp);
7266 if (!exp)
7267 return 1;
7268 if (TREE_CODE (exp) != TREE_LIST)
7269 return safe_from_p (x, exp, 0);
7270 }
7271 }
7272 else if (TREE_CODE (exp) == CONSTRUCTOR)
7273 {
7274 constructor_elt *ce;
7275 unsigned HOST_WIDE_INT idx;
7276
7277 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7278 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7279 || !safe_from_p (x, ce->value, 0))
7280 return 0;
7281 return 1;
7282 }
7283 else if (TREE_CODE (exp) == ERROR_MARK)
7284 return 1; /* An already-visited SAVE_EXPR? */
7285 else
7286 return 0;
7287
7288 case tcc_statement:
7289 /* The only case we look at here is the DECL_INITIAL inside a
7290 DECL_EXPR. */
7291 return (TREE_CODE (exp) != DECL_EXPR
7292 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7293 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7294 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7295
7296 case tcc_binary:
7297 case tcc_comparison:
7298 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7299 return 0;
7300 /* Fall through. */
7301
7302 case tcc_unary:
7303 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7304
7305 case tcc_expression:
7306 case tcc_reference:
7307 case tcc_vl_exp:
7308 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7309 the expression. If it is set, we conflict iff we are that rtx or
7310 both are in memory. Otherwise, we check all operands of the
7311 expression recursively. */
7312
7313 switch (TREE_CODE (exp))
7314 {
7315 case ADDR_EXPR:
7316 /* If the operand is static or we are static, we can't conflict.
7317 Likewise if we don't conflict with the operand at all. */
7318 if (staticp (TREE_OPERAND (exp, 0))
7319 || TREE_STATIC (exp)
7320 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7321 return 1;
7322
7323 /* Otherwise, the only way this can conflict is if we are taking
7324 the address of a DECL a that address if part of X, which is
7325 very rare. */
7326 exp = TREE_OPERAND (exp, 0);
7327 if (DECL_P (exp))
7328 {
7329 if (!DECL_RTL_SET_P (exp)
7330 || !MEM_P (DECL_RTL (exp)))
7331 return 0;
7332 else
7333 exp_rtl = XEXP (DECL_RTL (exp), 0);
7334 }
7335 break;
7336
7337 case MEM_REF:
7338 if (MEM_P (x)
7339 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7340 get_alias_set (exp)))
7341 return 0;
7342 break;
7343
7344 case CALL_EXPR:
7345 /* Assume that the call will clobber all hard registers and
7346 all of memory. */
7347 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7348 || MEM_P (x))
7349 return 0;
7350 break;
7351
7352 case WITH_CLEANUP_EXPR:
7353 case CLEANUP_POINT_EXPR:
7354 /* Lowered by gimplify.c. */
7355 gcc_unreachable ();
7356
7357 case SAVE_EXPR:
7358 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7359
7360 default:
7361 break;
7362 }
7363
7364 /* If we have an rtx, we do not need to scan our operands. */
7365 if (exp_rtl)
7366 break;
7367
7368 nops = TREE_OPERAND_LENGTH (exp);
7369 for (i = 0; i < nops; i++)
7370 if (TREE_OPERAND (exp, i) != 0
7371 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7372 return 0;
7373
7374 break;
7375
7376 case tcc_type:
7377 /* Should never get a type here. */
7378 gcc_unreachable ();
7379 }
7380
7381 /* If we have an rtl, find any enclosed object. Then see if we conflict
7382 with it. */
7383 if (exp_rtl)
7384 {
7385 if (GET_CODE (exp_rtl) == SUBREG)
7386 {
7387 exp_rtl = SUBREG_REG (exp_rtl);
7388 if (REG_P (exp_rtl)
7389 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7390 return 0;
7391 }
7392
7393 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7394 are memory and they conflict. */
7395 return ! (rtx_equal_p (x, exp_rtl)
7396 || (MEM_P (x) && MEM_P (exp_rtl)
7397 && true_dependence (exp_rtl, VOIDmode, x)));
7398 }
7399
7400 /* If we reach here, it is safe. */
7401 return 1;
7402 }
7403
7404 \f
7405 /* Return the highest power of two that EXP is known to be a multiple of.
7406 This is used in updating alignment of MEMs in array references. */
7407
7408 unsigned HOST_WIDE_INT
7409 highest_pow2_factor (const_tree exp)
7410 {
7411 unsigned HOST_WIDE_INT ret;
7412 int trailing_zeros = tree_ctz (exp);
7413 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7414 return BIGGEST_ALIGNMENT;
7415 ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7416 if (ret > BIGGEST_ALIGNMENT)
7417 return BIGGEST_ALIGNMENT;
7418 return ret;
7419 }
7420
7421 /* Similar, except that the alignment requirements of TARGET are
7422 taken into account. Assume it is at least as aligned as its
7423 type, unless it is a COMPONENT_REF in which case the layout of
7424 the structure gives the alignment. */
7425
7426 static unsigned HOST_WIDE_INT
7427 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7428 {
7429 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7430 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7431
7432 return MAX (factor, talign);
7433 }
7434 \f
7435 #ifdef HAVE_conditional_move
7436 /* Convert the tree comparison code TCODE to the rtl one where the
7437 signedness is UNSIGNEDP. */
7438
7439 static enum rtx_code
7440 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7441 {
7442 enum rtx_code code;
7443 switch (tcode)
7444 {
7445 case EQ_EXPR:
7446 code = EQ;
7447 break;
7448 case NE_EXPR:
7449 code = NE;
7450 break;
7451 case LT_EXPR:
7452 code = unsignedp ? LTU : LT;
7453 break;
7454 case LE_EXPR:
7455 code = unsignedp ? LEU : LE;
7456 break;
7457 case GT_EXPR:
7458 code = unsignedp ? GTU : GT;
7459 break;
7460 case GE_EXPR:
7461 code = unsignedp ? GEU : GE;
7462 break;
7463 case UNORDERED_EXPR:
7464 code = UNORDERED;
7465 break;
7466 case ORDERED_EXPR:
7467 code = ORDERED;
7468 break;
7469 case UNLT_EXPR:
7470 code = UNLT;
7471 break;
7472 case UNLE_EXPR:
7473 code = UNLE;
7474 break;
7475 case UNGT_EXPR:
7476 code = UNGT;
7477 break;
7478 case UNGE_EXPR:
7479 code = UNGE;
7480 break;
7481 case UNEQ_EXPR:
7482 code = UNEQ;
7483 break;
7484 case LTGT_EXPR:
7485 code = LTGT;
7486 break;
7487
7488 default:
7489 gcc_unreachable ();
7490 }
7491 return code;
7492 }
7493 #endif
7494
7495 /* Subroutine of expand_expr. Expand the two operands of a binary
7496 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7497 The value may be stored in TARGET if TARGET is nonzero. The
7498 MODIFIER argument is as documented by expand_expr. */
7499
7500 static void
7501 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7502 enum expand_modifier modifier)
7503 {
7504 if (! safe_from_p (target, exp1, 1))
7505 target = 0;
7506 if (operand_equal_p (exp0, exp1, 0))
7507 {
7508 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7509 *op1 = copy_rtx (*op0);
7510 }
7511 else
7512 {
7513 /* If we need to preserve evaluation order, copy exp0 into its own
7514 temporary variable so that it can't be clobbered by exp1. */
7515 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7516 exp0 = save_expr (exp0);
7517 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7518 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7519 }
7520 }
7521
7522 \f
7523 /* Return a MEM that contains constant EXP. DEFER is as for
7524 output_constant_def and MODIFIER is as for expand_expr. */
7525
7526 static rtx
7527 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7528 {
7529 rtx mem;
7530
7531 mem = output_constant_def (exp, defer);
7532 if (modifier != EXPAND_INITIALIZER)
7533 mem = use_anchored_address (mem);
7534 return mem;
7535 }
7536
7537 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7538 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7539
7540 static rtx
7541 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
7542 enum expand_modifier modifier, addr_space_t as)
7543 {
7544 rtx result, subtarget;
7545 tree inner, offset;
7546 HOST_WIDE_INT bitsize, bitpos;
7547 int volatilep, unsignedp;
7548 enum machine_mode mode1;
7549
7550 /* If we are taking the address of a constant and are at the top level,
7551 we have to use output_constant_def since we can't call force_const_mem
7552 at top level. */
7553 /* ??? This should be considered a front-end bug. We should not be
7554 generating ADDR_EXPR of something that isn't an LVALUE. The only
7555 exception here is STRING_CST. */
7556 if (CONSTANT_CLASS_P (exp))
7557 {
7558 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7559 if (modifier < EXPAND_SUM)
7560 result = force_operand (result, target);
7561 return result;
7562 }
7563
7564 /* Everything must be something allowed by is_gimple_addressable. */
7565 switch (TREE_CODE (exp))
7566 {
7567 case INDIRECT_REF:
7568 /* This case will happen via recursion for &a->b. */
7569 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7570
7571 case MEM_REF:
7572 {
7573 tree tem = TREE_OPERAND (exp, 0);
7574 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7575 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7576 return expand_expr (tem, target, tmode, modifier);
7577 }
7578
7579 case CONST_DECL:
7580 /* Expand the initializer like constants above. */
7581 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7582 0, modifier), 0);
7583 if (modifier < EXPAND_SUM)
7584 result = force_operand (result, target);
7585 return result;
7586
7587 case REALPART_EXPR:
7588 /* The real part of the complex number is always first, therefore
7589 the address is the same as the address of the parent object. */
7590 offset = 0;
7591 bitpos = 0;
7592 inner = TREE_OPERAND (exp, 0);
7593 break;
7594
7595 case IMAGPART_EXPR:
7596 /* The imaginary part of the complex number is always second.
7597 The expression is therefore always offset by the size of the
7598 scalar type. */
7599 offset = 0;
7600 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7601 inner = TREE_OPERAND (exp, 0);
7602 break;
7603
7604 case COMPOUND_LITERAL_EXPR:
7605 /* Allow COMPOUND_LITERAL_EXPR in initializers, if e.g.
7606 rtl_for_decl_init is called on DECL_INITIAL with
7607 COMPOUNT_LITERAL_EXPRs in it, they aren't gimplified. */
7608 if (modifier == EXPAND_INITIALIZER
7609 && COMPOUND_LITERAL_EXPR_DECL (exp))
7610 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7611 target, tmode, modifier, as);
7612 /* FALLTHRU */
7613 default:
7614 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7615 expand_expr, as that can have various side effects; LABEL_DECLs for
7616 example, may not have their DECL_RTL set yet. Expand the rtl of
7617 CONSTRUCTORs too, which should yield a memory reference for the
7618 constructor's contents. Assume language specific tree nodes can
7619 be expanded in some interesting way. */
7620 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7621 if (DECL_P (exp)
7622 || TREE_CODE (exp) == CONSTRUCTOR
7623 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7624 {
7625 result = expand_expr (exp, target, tmode,
7626 modifier == EXPAND_INITIALIZER
7627 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7628
7629 /* If the DECL isn't in memory, then the DECL wasn't properly
7630 marked TREE_ADDRESSABLE, which will be either a front-end
7631 or a tree optimizer bug. */
7632
7633 if (TREE_ADDRESSABLE (exp)
7634 && ! MEM_P (result)
7635 && ! targetm.calls.allocate_stack_slots_for_args ())
7636 {
7637 error ("local frame unavailable (naked function?)");
7638 return result;
7639 }
7640 else
7641 gcc_assert (MEM_P (result));
7642 result = XEXP (result, 0);
7643
7644 /* ??? Is this needed anymore? */
7645 if (DECL_P (exp))
7646 TREE_USED (exp) = 1;
7647
7648 if (modifier != EXPAND_INITIALIZER
7649 && modifier != EXPAND_CONST_ADDRESS
7650 && modifier != EXPAND_SUM)
7651 result = force_operand (result, target);
7652 return result;
7653 }
7654
7655 /* Pass FALSE as the last argument to get_inner_reference although
7656 we are expanding to RTL. The rationale is that we know how to
7657 handle "aligning nodes" here: we can just bypass them because
7658 they won't change the final object whose address will be returned
7659 (they actually exist only for that purpose). */
7660 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7661 &mode1, &unsignedp, &volatilep, false);
7662 break;
7663 }
7664
7665 /* We must have made progress. */
7666 gcc_assert (inner != exp);
7667
7668 subtarget = offset || bitpos ? NULL_RTX : target;
7669 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7670 inner alignment, force the inner to be sufficiently aligned. */
7671 if (CONSTANT_CLASS_P (inner)
7672 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7673 {
7674 inner = copy_node (inner);
7675 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7676 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7677 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7678 }
7679 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7680
7681 if (offset)
7682 {
7683 rtx tmp;
7684
7685 if (modifier != EXPAND_NORMAL)
7686 result = force_operand (result, NULL);
7687 tmp = expand_expr (offset, NULL_RTX, tmode,
7688 modifier == EXPAND_INITIALIZER
7689 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7690
7691 /* expand_expr is allowed to return an object in a mode other
7692 than TMODE. If it did, we need to convert. */
7693 if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
7694 tmp = convert_modes (tmode, GET_MODE (tmp),
7695 tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
7696 result = convert_memory_address_addr_space (tmode, result, as);
7697 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7698
7699 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7700 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7701 else
7702 {
7703 subtarget = bitpos ? NULL_RTX : target;
7704 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7705 1, OPTAB_LIB_WIDEN);
7706 }
7707 }
7708
7709 if (bitpos)
7710 {
7711 /* Someone beforehand should have rejected taking the address
7712 of such an object. */
7713 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7714
7715 result = convert_memory_address_addr_space (tmode, result, as);
7716 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7717 if (modifier < EXPAND_SUM)
7718 result = force_operand (result, target);
7719 }
7720
7721 return result;
7722 }
7723
7724 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7725 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7726
7727 static rtx
7728 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7729 enum expand_modifier modifier)
7730 {
7731 addr_space_t as = ADDR_SPACE_GENERIC;
7732 enum machine_mode address_mode = Pmode;
7733 enum machine_mode pointer_mode = ptr_mode;
7734 enum machine_mode rmode;
7735 rtx result;
7736
7737 /* Target mode of VOIDmode says "whatever's natural". */
7738 if (tmode == VOIDmode)
7739 tmode = TYPE_MODE (TREE_TYPE (exp));
7740
7741 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7742 {
7743 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7744 address_mode = targetm.addr_space.address_mode (as);
7745 pointer_mode = targetm.addr_space.pointer_mode (as);
7746 }
7747
7748 /* We can get called with some Weird Things if the user does silliness
7749 like "(short) &a". In that case, convert_memory_address won't do
7750 the right thing, so ignore the given target mode. */
7751 if (tmode != address_mode && tmode != pointer_mode)
7752 tmode = address_mode;
7753
7754 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7755 tmode, modifier, as);
7756
7757 /* Despite expand_expr claims concerning ignoring TMODE when not
7758 strictly convenient, stuff breaks if we don't honor it. Note
7759 that combined with the above, we only do this for pointer modes. */
7760 rmode = GET_MODE (result);
7761 if (rmode == VOIDmode)
7762 rmode = tmode;
7763 if (rmode != tmode)
7764 result = convert_memory_address_addr_space (tmode, result, as);
7765
7766 return result;
7767 }
7768
7769 /* Generate code for computing CONSTRUCTOR EXP.
7770 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7771 is TRUE, instead of creating a temporary variable in memory
7772 NULL is returned and the caller needs to handle it differently. */
7773
7774 static rtx
7775 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7776 bool avoid_temp_mem)
7777 {
7778 tree type = TREE_TYPE (exp);
7779 enum machine_mode mode = TYPE_MODE (type);
7780
7781 /* Try to avoid creating a temporary at all. This is possible
7782 if all of the initializer is zero.
7783 FIXME: try to handle all [0..255] initializers we can handle
7784 with memset. */
7785 if (TREE_STATIC (exp)
7786 && !TREE_ADDRESSABLE (exp)
7787 && target != 0 && mode == BLKmode
7788 && all_zeros_p (exp))
7789 {
7790 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7791 return target;
7792 }
7793
7794 /* All elts simple constants => refer to a constant in memory. But
7795 if this is a non-BLKmode mode, let it store a field at a time
7796 since that should make a CONST_INT, CONST_WIDE_INT or
7797 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7798 use, it is best to store directly into the target unless the type
7799 is large enough that memcpy will be used. If we are making an
7800 initializer and all operands are constant, put it in memory as
7801 well.
7802
7803 FIXME: Avoid trying to fill vector constructors piece-meal.
7804 Output them with output_constant_def below unless we're sure
7805 they're zeros. This should go away when vector initializers
7806 are treated like VECTOR_CST instead of arrays. */
7807 if ((TREE_STATIC (exp)
7808 && ((mode == BLKmode
7809 && ! (target != 0 && safe_from_p (target, exp, 1)))
7810 || TREE_ADDRESSABLE (exp)
7811 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
7812 && (! MOVE_BY_PIECES_P
7813 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
7814 TYPE_ALIGN (type)))
7815 && ! mostly_zeros_p (exp))))
7816 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7817 && TREE_CONSTANT (exp)))
7818 {
7819 rtx constructor;
7820
7821 if (avoid_temp_mem)
7822 return NULL_RTX;
7823
7824 constructor = expand_expr_constant (exp, 1, modifier);
7825
7826 if (modifier != EXPAND_CONST_ADDRESS
7827 && modifier != EXPAND_INITIALIZER
7828 && modifier != EXPAND_SUM)
7829 constructor = validize_mem (constructor);
7830
7831 return constructor;
7832 }
7833
7834 /* Handle calls that pass values in multiple non-contiguous
7835 locations. The Irix 6 ABI has examples of this. */
7836 if (target == 0 || ! safe_from_p (target, exp, 1)
7837 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7838 {
7839 if (avoid_temp_mem)
7840 return NULL_RTX;
7841
7842 target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
7843 }
7844
7845 store_constructor (exp, target, 0, int_expr_size (exp));
7846 return target;
7847 }
7848
7849
7850 /* expand_expr: generate code for computing expression EXP.
7851 An rtx for the computed value is returned. The value is never null.
7852 In the case of a void EXP, const0_rtx is returned.
7853
7854 The value may be stored in TARGET if TARGET is nonzero.
7855 TARGET is just a suggestion; callers must assume that
7856 the rtx returned may not be the same as TARGET.
7857
7858 If TARGET is CONST0_RTX, it means that the value will be ignored.
7859
7860 If TMODE is not VOIDmode, it suggests generating the
7861 result in mode TMODE. But this is done only when convenient.
7862 Otherwise, TMODE is ignored and the value generated in its natural mode.
7863 TMODE is just a suggestion; callers must assume that
7864 the rtx returned may not have mode TMODE.
7865
7866 Note that TARGET may have neither TMODE nor MODE. In that case, it
7867 probably will not be used.
7868
7869 If MODIFIER is EXPAND_SUM then when EXP is an addition
7870 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7871 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7872 products as above, or REG or MEM, or constant.
7873 Ordinarily in such cases we would output mul or add instructions
7874 and then return a pseudo reg containing the sum.
7875
7876 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7877 it also marks a label as absolutely required (it can't be dead).
7878 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7879 This is used for outputting expressions used in initializers.
7880
7881 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7882 with a constant address even if that address is not normally legitimate.
7883 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7884
7885 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7886 a call parameter. Such targets require special care as we haven't yet
7887 marked TARGET so that it's safe from being trashed by libcalls. We
7888 don't want to use TARGET for anything but the final result;
7889 Intermediate values must go elsewhere. Additionally, calls to
7890 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7891
7892 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7893 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7894 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7895 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7896 recursively.
7897
7898 If INNER_REFERENCE_P is true, we are expanding an inner reference.
7899 In this case, we don't adjust a returned MEM rtx that wouldn't be
7900 sufficiently aligned for its mode; instead, it's up to the caller
7901 to deal with it afterwards. This is used to make sure that unaligned
7902 base objects for which out-of-bounds accesses are supported, for
7903 example record types with trailing arrays, aren't realigned behind
7904 the back of the caller.
7905 The normal operating mode is to pass FALSE for this parameter. */
7906
7907 rtx
7908 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7909 enum expand_modifier modifier, rtx *alt_rtl,
7910 bool inner_reference_p)
7911 {
7912 rtx ret;
7913
7914 /* Handle ERROR_MARK before anybody tries to access its type. */
7915 if (TREE_CODE (exp) == ERROR_MARK
7916 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7917 {
7918 ret = CONST0_RTX (tmode);
7919 return ret ? ret : const0_rtx;
7920 }
7921
7922 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
7923 inner_reference_p);
7924 return ret;
7925 }
7926
7927 /* Try to expand the conditional expression which is represented by
7928 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
7929 return the rtl reg which repsents the result. Otherwise return
7930 NULL_RTL. */
7931
7932 static rtx
7933 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7934 tree treeop1 ATTRIBUTE_UNUSED,
7935 tree treeop2 ATTRIBUTE_UNUSED)
7936 {
7937 #ifdef HAVE_conditional_move
7938 rtx insn;
7939 rtx op00, op01, op1, op2;
7940 enum rtx_code comparison_code;
7941 enum machine_mode comparison_mode;
7942 gimple srcstmt;
7943 rtx temp;
7944 tree type = TREE_TYPE (treeop1);
7945 int unsignedp = TYPE_UNSIGNED (type);
7946 enum machine_mode mode = TYPE_MODE (type);
7947 enum machine_mode orig_mode = mode;
7948
7949 /* If we cannot do a conditional move on the mode, try doing it
7950 with the promoted mode. */
7951 if (!can_conditionally_move_p (mode))
7952 {
7953 mode = promote_mode (type, mode, &unsignedp);
7954 if (!can_conditionally_move_p (mode))
7955 return NULL_RTX;
7956 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
7957 }
7958 else
7959 temp = assign_temp (type, 0, 1);
7960
7961 start_sequence ();
7962 expand_operands (treeop1, treeop2,
7963 temp, &op1, &op2, EXPAND_NORMAL);
7964
7965 if (TREE_CODE (treeop0) == SSA_NAME
7966 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
7967 {
7968 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
7969 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
7970 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
7971 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
7972 comparison_mode = TYPE_MODE (type);
7973 unsignedp = TYPE_UNSIGNED (type);
7974 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7975 }
7976 else if (TREE_CODE_CLASS (TREE_CODE (treeop0)) == tcc_comparison)
7977 {
7978 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
7979 enum tree_code cmpcode = TREE_CODE (treeop0);
7980 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
7981 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
7982 unsignedp = TYPE_UNSIGNED (type);
7983 comparison_mode = TYPE_MODE (type);
7984 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7985 }
7986 else
7987 {
7988 op00 = expand_normal (treeop0);
7989 op01 = const0_rtx;
7990 comparison_code = NE;
7991 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
7992 }
7993
7994 if (GET_MODE (op1) != mode)
7995 op1 = gen_lowpart (mode, op1);
7996
7997 if (GET_MODE (op2) != mode)
7998 op2 = gen_lowpart (mode, op2);
7999
8000 /* Try to emit the conditional move. */
8001 insn = emit_conditional_move (temp, comparison_code,
8002 op00, op01, comparison_mode,
8003 op1, op2, mode,
8004 unsignedp);
8005
8006 /* If we could do the conditional move, emit the sequence,
8007 and return. */
8008 if (insn)
8009 {
8010 rtx seq = get_insns ();
8011 end_sequence ();
8012 emit_insn (seq);
8013 return convert_modes (orig_mode, mode, temp, 0);
8014 }
8015
8016 /* Otherwise discard the sequence and fall back to code with
8017 branches. */
8018 end_sequence ();
8019 #endif
8020 return NULL_RTX;
8021 }
8022
8023 rtx
8024 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
8025 enum expand_modifier modifier)
8026 {
8027 rtx op0, op1, op2, temp;
8028 tree type;
8029 int unsignedp;
8030 enum machine_mode mode;
8031 enum tree_code code = ops->code;
8032 optab this_optab;
8033 rtx subtarget, original_target;
8034 int ignore;
8035 bool reduce_bit_field;
8036 location_t loc = ops->location;
8037 tree treeop0, treeop1, treeop2;
8038 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8039 ? reduce_to_bit_field_precision ((expr), \
8040 target, \
8041 type) \
8042 : (expr))
8043
8044 type = ops->type;
8045 mode = TYPE_MODE (type);
8046 unsignedp = TYPE_UNSIGNED (type);
8047
8048 treeop0 = ops->op0;
8049 treeop1 = ops->op1;
8050 treeop2 = ops->op2;
8051
8052 /* We should be called only on simple (binary or unary) expressions,
8053 exactly those that are valid in gimple expressions that aren't
8054 GIMPLE_SINGLE_RHS (or invalid). */
8055 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8056 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8057 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8058
8059 ignore = (target == const0_rtx
8060 || ((CONVERT_EXPR_CODE_P (code)
8061 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8062 && TREE_CODE (type) == VOID_TYPE));
8063
8064 /* We should be called only if we need the result. */
8065 gcc_assert (!ignore);
8066
8067 /* An operation in what may be a bit-field type needs the
8068 result to be reduced to the precision of the bit-field type,
8069 which is narrower than that of the type's mode. */
8070 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8071 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8072
8073 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8074 target = 0;
8075
8076 /* Use subtarget as the target for operand 0 of a binary operation. */
8077 subtarget = get_subtarget (target);
8078 original_target = target;
8079
8080 switch (code)
8081 {
8082 case NON_LVALUE_EXPR:
8083 case PAREN_EXPR:
8084 CASE_CONVERT:
8085 if (treeop0 == error_mark_node)
8086 return const0_rtx;
8087
8088 if (TREE_CODE (type) == UNION_TYPE)
8089 {
8090 tree valtype = TREE_TYPE (treeop0);
8091
8092 /* If both input and output are BLKmode, this conversion isn't doing
8093 anything except possibly changing memory attribute. */
8094 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8095 {
8096 rtx result = expand_expr (treeop0, target, tmode,
8097 modifier);
8098
8099 result = copy_rtx (result);
8100 set_mem_attributes (result, type, 0);
8101 return result;
8102 }
8103
8104 if (target == 0)
8105 {
8106 if (TYPE_MODE (type) != BLKmode)
8107 target = gen_reg_rtx (TYPE_MODE (type));
8108 else
8109 target = assign_temp (type, 1, 1);
8110 }
8111
8112 if (MEM_P (target))
8113 /* Store data into beginning of memory target. */
8114 store_expr (treeop0,
8115 adjust_address (target, TYPE_MODE (valtype), 0),
8116 modifier == EXPAND_STACK_PARM,
8117 false);
8118
8119 else
8120 {
8121 gcc_assert (REG_P (target));
8122
8123 /* Store this field into a union of the proper type. */
8124 store_field (target,
8125 MIN ((int_size_in_bytes (TREE_TYPE
8126 (treeop0))
8127 * BITS_PER_UNIT),
8128 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8129 0, 0, 0, TYPE_MODE (valtype), treeop0, 0, false);
8130 }
8131
8132 /* Return the entire union. */
8133 return target;
8134 }
8135
8136 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8137 {
8138 op0 = expand_expr (treeop0, target, VOIDmode,
8139 modifier);
8140
8141 /* If the signedness of the conversion differs and OP0 is
8142 a promoted SUBREG, clear that indication since we now
8143 have to do the proper extension. */
8144 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8145 && GET_CODE (op0) == SUBREG)
8146 SUBREG_PROMOTED_VAR_P (op0) = 0;
8147
8148 return REDUCE_BIT_FIELD (op0);
8149 }
8150
8151 op0 = expand_expr (treeop0, NULL_RTX, mode,
8152 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8153 if (GET_MODE (op0) == mode)
8154 ;
8155
8156 /* If OP0 is a constant, just convert it into the proper mode. */
8157 else if (CONSTANT_P (op0))
8158 {
8159 tree inner_type = TREE_TYPE (treeop0);
8160 enum machine_mode inner_mode = GET_MODE (op0);
8161
8162 if (inner_mode == VOIDmode)
8163 inner_mode = TYPE_MODE (inner_type);
8164
8165 if (modifier == EXPAND_INITIALIZER)
8166 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8167 subreg_lowpart_offset (mode,
8168 inner_mode));
8169 else
8170 op0= convert_modes (mode, inner_mode, op0,
8171 TYPE_UNSIGNED (inner_type));
8172 }
8173
8174 else if (modifier == EXPAND_INITIALIZER)
8175 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8176
8177 else if (target == 0)
8178 op0 = convert_to_mode (mode, op0,
8179 TYPE_UNSIGNED (TREE_TYPE
8180 (treeop0)));
8181 else
8182 {
8183 convert_move (target, op0,
8184 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8185 op0 = target;
8186 }
8187
8188 return REDUCE_BIT_FIELD (op0);
8189
8190 case ADDR_SPACE_CONVERT_EXPR:
8191 {
8192 tree treeop0_type = TREE_TYPE (treeop0);
8193 addr_space_t as_to;
8194 addr_space_t as_from;
8195
8196 gcc_assert (POINTER_TYPE_P (type));
8197 gcc_assert (POINTER_TYPE_P (treeop0_type));
8198
8199 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8200 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8201
8202 /* Conversions between pointers to the same address space should
8203 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8204 gcc_assert (as_to != as_from);
8205
8206 /* Ask target code to handle conversion between pointers
8207 to overlapping address spaces. */
8208 if (targetm.addr_space.subset_p (as_to, as_from)
8209 || targetm.addr_space.subset_p (as_from, as_to))
8210 {
8211 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8212 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8213 gcc_assert (op0);
8214 return op0;
8215 }
8216
8217 /* For disjoint address spaces, converting anything but
8218 a null pointer invokes undefined behaviour. We simply
8219 always return a null pointer here. */
8220 return CONST0_RTX (mode);
8221 }
8222
8223 case POINTER_PLUS_EXPR:
8224 /* Even though the sizetype mode and the pointer's mode can be different
8225 expand is able to handle this correctly and get the correct result out
8226 of the PLUS_EXPR code. */
8227 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8228 if sizetype precision is smaller than pointer precision. */
8229 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8230 treeop1 = fold_convert_loc (loc, type,
8231 fold_convert_loc (loc, ssizetype,
8232 treeop1));
8233 /* If sizetype precision is larger than pointer precision, truncate the
8234 offset to have matching modes. */
8235 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8236 treeop1 = fold_convert_loc (loc, type, treeop1);
8237
8238 case PLUS_EXPR:
8239 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8240 something else, make sure we add the register to the constant and
8241 then to the other thing. This case can occur during strength
8242 reduction and doing it this way will produce better code if the
8243 frame pointer or argument pointer is eliminated.
8244
8245 fold-const.c will ensure that the constant is always in the inner
8246 PLUS_EXPR, so the only case we need to do anything about is if
8247 sp, ap, or fp is our second argument, in which case we must swap
8248 the innermost first argument and our second argument. */
8249
8250 if (TREE_CODE (treeop0) == PLUS_EXPR
8251 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8252 && TREE_CODE (treeop1) == VAR_DECL
8253 && (DECL_RTL (treeop1) == frame_pointer_rtx
8254 || DECL_RTL (treeop1) == stack_pointer_rtx
8255 || DECL_RTL (treeop1) == arg_pointer_rtx))
8256 {
8257 gcc_unreachable ();
8258 }
8259
8260 /* If the result is to be ptr_mode and we are adding an integer to
8261 something, we might be forming a constant. So try to use
8262 plus_constant. If it produces a sum and we can't accept it,
8263 use force_operand. This allows P = &ARR[const] to generate
8264 efficient code on machines where a SYMBOL_REF is not a valid
8265 address.
8266
8267 If this is an EXPAND_SUM call, always return the sum. */
8268 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8269 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8270 {
8271 if (modifier == EXPAND_STACK_PARM)
8272 target = 0;
8273 if (TREE_CODE (treeop0) == INTEGER_CST
8274 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8275 && TREE_CONSTANT (treeop1))
8276 {
8277 rtx constant_part;
8278 HOST_WIDE_INT wc;
8279 enum machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8280
8281 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8282 EXPAND_SUM);
8283 /* Use wi::shwi to ensure that the constant is
8284 truncated according to the mode of OP1, then sign extended
8285 to a HOST_WIDE_INT. Using the constant directly can result
8286 in non-canonical RTL in a 64x32 cross compile. */
8287 wc = TREE_INT_CST_LOW (treeop0);
8288 constant_part =
8289 immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8290 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8291 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8292 op1 = force_operand (op1, target);
8293 return REDUCE_BIT_FIELD (op1);
8294 }
8295
8296 else if (TREE_CODE (treeop1) == INTEGER_CST
8297 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8298 && TREE_CONSTANT (treeop0))
8299 {
8300 rtx constant_part;
8301 HOST_WIDE_INT wc;
8302 enum machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8303
8304 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8305 (modifier == EXPAND_INITIALIZER
8306 ? EXPAND_INITIALIZER : EXPAND_SUM));
8307 if (! CONSTANT_P (op0))
8308 {
8309 op1 = expand_expr (treeop1, NULL_RTX,
8310 VOIDmode, modifier);
8311 /* Return a PLUS if modifier says it's OK. */
8312 if (modifier == EXPAND_SUM
8313 || modifier == EXPAND_INITIALIZER)
8314 return simplify_gen_binary (PLUS, mode, op0, op1);
8315 goto binop2;
8316 }
8317 /* Use wi::shwi to ensure that the constant is
8318 truncated according to the mode of OP1, then sign extended
8319 to a HOST_WIDE_INT. Using the constant directly can result
8320 in non-canonical RTL in a 64x32 cross compile. */
8321 wc = TREE_INT_CST_LOW (treeop1);
8322 constant_part
8323 = immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8324 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8325 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8326 op0 = force_operand (op0, target);
8327 return REDUCE_BIT_FIELD (op0);
8328 }
8329 }
8330
8331 /* Use TER to expand pointer addition of a negated value
8332 as pointer subtraction. */
8333 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8334 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8335 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8336 && TREE_CODE (treeop1) == SSA_NAME
8337 && TYPE_MODE (TREE_TYPE (treeop0))
8338 == TYPE_MODE (TREE_TYPE (treeop1)))
8339 {
8340 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8341 if (def)
8342 {
8343 treeop1 = gimple_assign_rhs1 (def);
8344 code = MINUS_EXPR;
8345 goto do_minus;
8346 }
8347 }
8348
8349 /* No sense saving up arithmetic to be done
8350 if it's all in the wrong mode to form part of an address.
8351 And force_operand won't know whether to sign-extend or
8352 zero-extend. */
8353 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8354 || mode != ptr_mode)
8355 {
8356 expand_operands (treeop0, treeop1,
8357 subtarget, &op0, &op1, EXPAND_NORMAL);
8358 if (op0 == const0_rtx)
8359 return op1;
8360 if (op1 == const0_rtx)
8361 return op0;
8362 goto binop2;
8363 }
8364
8365 expand_operands (treeop0, treeop1,
8366 subtarget, &op0, &op1, modifier);
8367 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8368
8369 case MINUS_EXPR:
8370 do_minus:
8371 /* For initializers, we are allowed to return a MINUS of two
8372 symbolic constants. Here we handle all cases when both operands
8373 are constant. */
8374 /* Handle difference of two symbolic constants,
8375 for the sake of an initializer. */
8376 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8377 && really_constant_p (treeop0)
8378 && really_constant_p (treeop1))
8379 {
8380 expand_operands (treeop0, treeop1,
8381 NULL_RTX, &op0, &op1, modifier);
8382
8383 /* If the last operand is a CONST_INT, use plus_constant of
8384 the negated constant. Else make the MINUS. */
8385 if (CONST_INT_P (op1))
8386 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8387 -INTVAL (op1)));
8388 else
8389 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8390 }
8391
8392 /* No sense saving up arithmetic to be done
8393 if it's all in the wrong mode to form part of an address.
8394 And force_operand won't know whether to sign-extend or
8395 zero-extend. */
8396 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8397 || mode != ptr_mode)
8398 goto binop;
8399
8400 expand_operands (treeop0, treeop1,
8401 subtarget, &op0, &op1, modifier);
8402
8403 /* Convert A - const to A + (-const). */
8404 if (CONST_INT_P (op1))
8405 {
8406 op1 = negate_rtx (mode, op1);
8407 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8408 }
8409
8410 goto binop2;
8411
8412 case WIDEN_MULT_PLUS_EXPR:
8413 case WIDEN_MULT_MINUS_EXPR:
8414 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8415 op2 = expand_normal (treeop2);
8416 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8417 target, unsignedp);
8418 return target;
8419
8420 case WIDEN_MULT_EXPR:
8421 /* If first operand is constant, swap them.
8422 Thus the following special case checks need only
8423 check the second operand. */
8424 if (TREE_CODE (treeop0) == INTEGER_CST)
8425 {
8426 tree t1 = treeop0;
8427 treeop0 = treeop1;
8428 treeop1 = t1;
8429 }
8430
8431 /* First, check if we have a multiplication of one signed and one
8432 unsigned operand. */
8433 if (TREE_CODE (treeop1) != INTEGER_CST
8434 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8435 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8436 {
8437 enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8438 this_optab = usmul_widen_optab;
8439 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8440 != CODE_FOR_nothing)
8441 {
8442 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8443 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8444 EXPAND_NORMAL);
8445 else
8446 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8447 EXPAND_NORMAL);
8448 /* op0 and op1 might still be constant, despite the above
8449 != INTEGER_CST check. Handle it. */
8450 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8451 {
8452 op0 = convert_modes (innermode, mode, op0, true);
8453 op1 = convert_modes (innermode, mode, op1, false);
8454 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8455 target, unsignedp));
8456 }
8457 goto binop3;
8458 }
8459 }
8460 /* Check for a multiplication with matching signedness. */
8461 else if ((TREE_CODE (treeop1) == INTEGER_CST
8462 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8463 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8464 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8465 {
8466 tree op0type = TREE_TYPE (treeop0);
8467 enum machine_mode innermode = TYPE_MODE (op0type);
8468 bool zextend_p = TYPE_UNSIGNED (op0type);
8469 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8470 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8471
8472 if (TREE_CODE (treeop0) != INTEGER_CST)
8473 {
8474 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8475 != CODE_FOR_nothing)
8476 {
8477 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8478 EXPAND_NORMAL);
8479 /* op0 and op1 might still be constant, despite the above
8480 != INTEGER_CST check. Handle it. */
8481 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8482 {
8483 widen_mult_const:
8484 op0 = convert_modes (innermode, mode, op0, zextend_p);
8485 op1
8486 = convert_modes (innermode, mode, op1,
8487 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8488 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8489 target,
8490 unsignedp));
8491 }
8492 temp = expand_widening_mult (mode, op0, op1, target,
8493 unsignedp, this_optab);
8494 return REDUCE_BIT_FIELD (temp);
8495 }
8496 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8497 != CODE_FOR_nothing
8498 && innermode == word_mode)
8499 {
8500 rtx htem, hipart;
8501 op0 = expand_normal (treeop0);
8502 if (TREE_CODE (treeop1) == INTEGER_CST)
8503 op1 = convert_modes (innermode, mode,
8504 expand_normal (treeop1),
8505 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8506 else
8507 op1 = expand_normal (treeop1);
8508 /* op0 and op1 might still be constant, despite the above
8509 != INTEGER_CST check. Handle it. */
8510 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8511 goto widen_mult_const;
8512 temp = expand_binop (mode, other_optab, op0, op1, target,
8513 unsignedp, OPTAB_LIB_WIDEN);
8514 hipart = gen_highpart (innermode, temp);
8515 htem = expand_mult_highpart_adjust (innermode, hipart,
8516 op0, op1, hipart,
8517 zextend_p);
8518 if (htem != hipart)
8519 emit_move_insn (hipart, htem);
8520 return REDUCE_BIT_FIELD (temp);
8521 }
8522 }
8523 }
8524 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8525 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8526 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8527 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8528
8529 case FMA_EXPR:
8530 {
8531 optab opt = fma_optab;
8532 gimple def0, def2;
8533
8534 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8535 call. */
8536 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8537 {
8538 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8539 tree call_expr;
8540
8541 gcc_assert (fn != NULL_TREE);
8542 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8543 return expand_builtin (call_expr, target, subtarget, mode, false);
8544 }
8545
8546 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8547 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8548
8549 op0 = op2 = NULL;
8550
8551 if (def0 && def2
8552 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8553 {
8554 opt = fnms_optab;
8555 op0 = expand_normal (gimple_assign_rhs1 (def0));
8556 op2 = expand_normal (gimple_assign_rhs1 (def2));
8557 }
8558 else if (def0
8559 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8560 {
8561 opt = fnma_optab;
8562 op0 = expand_normal (gimple_assign_rhs1 (def0));
8563 }
8564 else if (def2
8565 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8566 {
8567 opt = fms_optab;
8568 op2 = expand_normal (gimple_assign_rhs1 (def2));
8569 }
8570
8571 if (op0 == NULL)
8572 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8573 if (op2 == NULL)
8574 op2 = expand_normal (treeop2);
8575 op1 = expand_normal (treeop1);
8576
8577 return expand_ternary_op (TYPE_MODE (type), opt,
8578 op0, op1, op2, target, 0);
8579 }
8580
8581 case MULT_EXPR:
8582 /* If this is a fixed-point operation, then we cannot use the code
8583 below because "expand_mult" doesn't support sat/no-sat fixed-point
8584 multiplications. */
8585 if (ALL_FIXED_POINT_MODE_P (mode))
8586 goto binop;
8587
8588 /* If first operand is constant, swap them.
8589 Thus the following special case checks need only
8590 check the second operand. */
8591 if (TREE_CODE (treeop0) == INTEGER_CST)
8592 {
8593 tree t1 = treeop0;
8594 treeop0 = treeop1;
8595 treeop1 = t1;
8596 }
8597
8598 /* Attempt to return something suitable for generating an
8599 indexed address, for machines that support that. */
8600
8601 if (modifier == EXPAND_SUM && mode == ptr_mode
8602 && tree_fits_shwi_p (treeop1))
8603 {
8604 tree exp1 = treeop1;
8605
8606 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8607 EXPAND_SUM);
8608
8609 if (!REG_P (op0))
8610 op0 = force_operand (op0, NULL_RTX);
8611 if (!REG_P (op0))
8612 op0 = copy_to_mode_reg (mode, op0);
8613
8614 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8615 gen_int_mode (tree_to_shwi (exp1),
8616 TYPE_MODE (TREE_TYPE (exp1)))));
8617 }
8618
8619 if (modifier == EXPAND_STACK_PARM)
8620 target = 0;
8621
8622 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8623 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8624
8625 case TRUNC_DIV_EXPR:
8626 case FLOOR_DIV_EXPR:
8627 case CEIL_DIV_EXPR:
8628 case ROUND_DIV_EXPR:
8629 case EXACT_DIV_EXPR:
8630 /* If this is a fixed-point operation, then we cannot use the code
8631 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8632 divisions. */
8633 if (ALL_FIXED_POINT_MODE_P (mode))
8634 goto binop;
8635
8636 if (modifier == EXPAND_STACK_PARM)
8637 target = 0;
8638 /* Possible optimization: compute the dividend with EXPAND_SUM
8639 then if the divisor is constant can optimize the case
8640 where some terms of the dividend have coeffs divisible by it. */
8641 expand_operands (treeop0, treeop1,
8642 subtarget, &op0, &op1, EXPAND_NORMAL);
8643 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8644
8645 case RDIV_EXPR:
8646 goto binop;
8647
8648 case MULT_HIGHPART_EXPR:
8649 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8650 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8651 gcc_assert (temp);
8652 return temp;
8653
8654 case TRUNC_MOD_EXPR:
8655 case FLOOR_MOD_EXPR:
8656 case CEIL_MOD_EXPR:
8657 case ROUND_MOD_EXPR:
8658 if (modifier == EXPAND_STACK_PARM)
8659 target = 0;
8660 expand_operands (treeop0, treeop1,
8661 subtarget, &op0, &op1, EXPAND_NORMAL);
8662 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8663
8664 case FIXED_CONVERT_EXPR:
8665 op0 = expand_normal (treeop0);
8666 if (target == 0 || modifier == EXPAND_STACK_PARM)
8667 target = gen_reg_rtx (mode);
8668
8669 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8670 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8671 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8672 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8673 else
8674 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8675 return target;
8676
8677 case FIX_TRUNC_EXPR:
8678 op0 = expand_normal (treeop0);
8679 if (target == 0 || modifier == EXPAND_STACK_PARM)
8680 target = gen_reg_rtx (mode);
8681 expand_fix (target, op0, unsignedp);
8682 return target;
8683
8684 case FLOAT_EXPR:
8685 op0 = expand_normal (treeop0);
8686 if (target == 0 || modifier == EXPAND_STACK_PARM)
8687 target = gen_reg_rtx (mode);
8688 /* expand_float can't figure out what to do if FROM has VOIDmode.
8689 So give it the correct mode. With -O, cse will optimize this. */
8690 if (GET_MODE (op0) == VOIDmode)
8691 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8692 op0);
8693 expand_float (target, op0,
8694 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8695 return target;
8696
8697 case NEGATE_EXPR:
8698 op0 = expand_expr (treeop0, subtarget,
8699 VOIDmode, EXPAND_NORMAL);
8700 if (modifier == EXPAND_STACK_PARM)
8701 target = 0;
8702 temp = expand_unop (mode,
8703 optab_for_tree_code (NEGATE_EXPR, type,
8704 optab_default),
8705 op0, target, 0);
8706 gcc_assert (temp);
8707 return REDUCE_BIT_FIELD (temp);
8708
8709 case ABS_EXPR:
8710 op0 = expand_expr (treeop0, subtarget,
8711 VOIDmode, EXPAND_NORMAL);
8712 if (modifier == EXPAND_STACK_PARM)
8713 target = 0;
8714
8715 /* ABS_EXPR is not valid for complex arguments. */
8716 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8717 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8718
8719 /* Unsigned abs is simply the operand. Testing here means we don't
8720 risk generating incorrect code below. */
8721 if (TYPE_UNSIGNED (type))
8722 return op0;
8723
8724 return expand_abs (mode, op0, target, unsignedp,
8725 safe_from_p (target, treeop0, 1));
8726
8727 case MAX_EXPR:
8728 case MIN_EXPR:
8729 target = original_target;
8730 if (target == 0
8731 || modifier == EXPAND_STACK_PARM
8732 || (MEM_P (target) && MEM_VOLATILE_P (target))
8733 || GET_MODE (target) != mode
8734 || (REG_P (target)
8735 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8736 target = gen_reg_rtx (mode);
8737 expand_operands (treeop0, treeop1,
8738 target, &op0, &op1, EXPAND_NORMAL);
8739
8740 /* First try to do it with a special MIN or MAX instruction.
8741 If that does not win, use a conditional jump to select the proper
8742 value. */
8743 this_optab = optab_for_tree_code (code, type, optab_default);
8744 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8745 OPTAB_WIDEN);
8746 if (temp != 0)
8747 return temp;
8748
8749 /* At this point, a MEM target is no longer useful; we will get better
8750 code without it. */
8751
8752 if (! REG_P (target))
8753 target = gen_reg_rtx (mode);
8754
8755 /* If op1 was placed in target, swap op0 and op1. */
8756 if (target != op0 && target == op1)
8757 {
8758 temp = op0;
8759 op0 = op1;
8760 op1 = temp;
8761 }
8762
8763 /* We generate better code and avoid problems with op1 mentioning
8764 target by forcing op1 into a pseudo if it isn't a constant. */
8765 if (! CONSTANT_P (op1))
8766 op1 = force_reg (mode, op1);
8767
8768 {
8769 enum rtx_code comparison_code;
8770 rtx cmpop1 = op1;
8771
8772 if (code == MAX_EXPR)
8773 comparison_code = unsignedp ? GEU : GE;
8774 else
8775 comparison_code = unsignedp ? LEU : LE;
8776
8777 /* Canonicalize to comparisons against 0. */
8778 if (op1 == const1_rtx)
8779 {
8780 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8781 or (a != 0 ? a : 1) for unsigned.
8782 For MIN we are safe converting (a <= 1 ? a : 1)
8783 into (a <= 0 ? a : 1) */
8784 cmpop1 = const0_rtx;
8785 if (code == MAX_EXPR)
8786 comparison_code = unsignedp ? NE : GT;
8787 }
8788 if (op1 == constm1_rtx && !unsignedp)
8789 {
8790 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8791 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8792 cmpop1 = const0_rtx;
8793 if (code == MIN_EXPR)
8794 comparison_code = LT;
8795 }
8796 #ifdef HAVE_conditional_move
8797 /* Use a conditional move if possible. */
8798 if (can_conditionally_move_p (mode))
8799 {
8800 rtx insn;
8801
8802 start_sequence ();
8803
8804 /* Try to emit the conditional move. */
8805 insn = emit_conditional_move (target, comparison_code,
8806 op0, cmpop1, mode,
8807 op0, op1, mode,
8808 unsignedp);
8809
8810 /* If we could do the conditional move, emit the sequence,
8811 and return. */
8812 if (insn)
8813 {
8814 rtx seq = get_insns ();
8815 end_sequence ();
8816 emit_insn (seq);
8817 return target;
8818 }
8819
8820 /* Otherwise discard the sequence and fall back to code with
8821 branches. */
8822 end_sequence ();
8823 }
8824 #endif
8825 if (target != op0)
8826 emit_move_insn (target, op0);
8827
8828 temp = gen_label_rtx ();
8829 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8830 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8831 -1);
8832 }
8833 emit_move_insn (target, op1);
8834 emit_label (temp);
8835 return target;
8836
8837 case BIT_NOT_EXPR:
8838 op0 = expand_expr (treeop0, subtarget,
8839 VOIDmode, EXPAND_NORMAL);
8840 if (modifier == EXPAND_STACK_PARM)
8841 target = 0;
8842 /* In case we have to reduce the result to bitfield precision
8843 for unsigned bitfield expand this as XOR with a proper constant
8844 instead. */
8845 if (reduce_bit_field && TYPE_UNSIGNED (type))
8846 {
8847 wide_int mask = wi::mask (TYPE_PRECISION (type),
8848 false, GET_MODE_PRECISION (mode));
8849
8850 temp = expand_binop (mode, xor_optab, op0,
8851 immed_wide_int_const (mask, mode),
8852 target, 1, OPTAB_LIB_WIDEN);
8853 }
8854 else
8855 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8856 gcc_assert (temp);
8857 return temp;
8858
8859 /* ??? Can optimize bitwise operations with one arg constant.
8860 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8861 and (a bitwise1 b) bitwise2 b (etc)
8862 but that is probably not worth while. */
8863
8864 case BIT_AND_EXPR:
8865 case BIT_IOR_EXPR:
8866 case BIT_XOR_EXPR:
8867 goto binop;
8868
8869 case LROTATE_EXPR:
8870 case RROTATE_EXPR:
8871 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8872 || (GET_MODE_PRECISION (TYPE_MODE (type))
8873 == TYPE_PRECISION (type)));
8874 /* fall through */
8875
8876 case LSHIFT_EXPR:
8877 case RSHIFT_EXPR:
8878 /* If this is a fixed-point operation, then we cannot use the code
8879 below because "expand_shift" doesn't support sat/no-sat fixed-point
8880 shifts. */
8881 if (ALL_FIXED_POINT_MODE_P (mode))
8882 goto binop;
8883
8884 if (! safe_from_p (subtarget, treeop1, 1))
8885 subtarget = 0;
8886 if (modifier == EXPAND_STACK_PARM)
8887 target = 0;
8888 op0 = expand_expr (treeop0, subtarget,
8889 VOIDmode, EXPAND_NORMAL);
8890 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8891 unsignedp);
8892 if (code == LSHIFT_EXPR)
8893 temp = REDUCE_BIT_FIELD (temp);
8894 return temp;
8895
8896 /* Could determine the answer when only additive constants differ. Also,
8897 the addition of one can be handled by changing the condition. */
8898 case LT_EXPR:
8899 case LE_EXPR:
8900 case GT_EXPR:
8901 case GE_EXPR:
8902 case EQ_EXPR:
8903 case NE_EXPR:
8904 case UNORDERED_EXPR:
8905 case ORDERED_EXPR:
8906 case UNLT_EXPR:
8907 case UNLE_EXPR:
8908 case UNGT_EXPR:
8909 case UNGE_EXPR:
8910 case UNEQ_EXPR:
8911 case LTGT_EXPR:
8912 temp = do_store_flag (ops,
8913 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8914 tmode != VOIDmode ? tmode : mode);
8915 if (temp)
8916 return temp;
8917
8918 /* Use a compare and a jump for BLKmode comparisons, or for function
8919 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8920
8921 if ((target == 0
8922 || modifier == EXPAND_STACK_PARM
8923 || ! safe_from_p (target, treeop0, 1)
8924 || ! safe_from_p (target, treeop1, 1)
8925 /* Make sure we don't have a hard reg (such as function's return
8926 value) live across basic blocks, if not optimizing. */
8927 || (!optimize && REG_P (target)
8928 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8929 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8930
8931 emit_move_insn (target, const0_rtx);
8932
8933 op1 = gen_label_rtx ();
8934 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8935
8936 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8937 emit_move_insn (target, constm1_rtx);
8938 else
8939 emit_move_insn (target, const1_rtx);
8940
8941 emit_label (op1);
8942 return target;
8943
8944 case COMPLEX_EXPR:
8945 /* Get the rtx code of the operands. */
8946 op0 = expand_normal (treeop0);
8947 op1 = expand_normal (treeop1);
8948
8949 if (!target)
8950 target = gen_reg_rtx (TYPE_MODE (type));
8951 else
8952 /* If target overlaps with op1, then either we need to force
8953 op1 into a pseudo (if target also overlaps with op0),
8954 or write the complex parts in reverse order. */
8955 switch (GET_CODE (target))
8956 {
8957 case CONCAT:
8958 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
8959 {
8960 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
8961 {
8962 complex_expr_force_op1:
8963 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
8964 emit_move_insn (temp, op1);
8965 op1 = temp;
8966 break;
8967 }
8968 complex_expr_swap_order:
8969 /* Move the imaginary (op1) and real (op0) parts to their
8970 location. */
8971 write_complex_part (target, op1, true);
8972 write_complex_part (target, op0, false);
8973
8974 return target;
8975 }
8976 break;
8977 case MEM:
8978 temp = adjust_address_nv (target,
8979 GET_MODE_INNER (GET_MODE (target)), 0);
8980 if (reg_overlap_mentioned_p (temp, op1))
8981 {
8982 enum machine_mode imode = GET_MODE_INNER (GET_MODE (target));
8983 temp = adjust_address_nv (target, imode,
8984 GET_MODE_SIZE (imode));
8985 if (reg_overlap_mentioned_p (temp, op0))
8986 goto complex_expr_force_op1;
8987 goto complex_expr_swap_order;
8988 }
8989 break;
8990 default:
8991 if (reg_overlap_mentioned_p (target, op1))
8992 {
8993 if (reg_overlap_mentioned_p (target, op0))
8994 goto complex_expr_force_op1;
8995 goto complex_expr_swap_order;
8996 }
8997 break;
8998 }
8999
9000 /* Move the real (op0) and imaginary (op1) parts to their location. */
9001 write_complex_part (target, op0, false);
9002 write_complex_part (target, op1, true);
9003
9004 return target;
9005
9006 case WIDEN_SUM_EXPR:
9007 {
9008 tree oprnd0 = treeop0;
9009 tree oprnd1 = treeop1;
9010
9011 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9012 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9013 target, unsignedp);
9014 return target;
9015 }
9016
9017 case REDUC_MAX_EXPR:
9018 case REDUC_MIN_EXPR:
9019 case REDUC_PLUS_EXPR:
9020 {
9021 op0 = expand_normal (treeop0);
9022 this_optab = optab_for_tree_code (code, type, optab_default);
9023 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
9024 gcc_assert (temp);
9025 return temp;
9026 }
9027
9028 case VEC_LSHIFT_EXPR:
9029 case VEC_RSHIFT_EXPR:
9030 {
9031 target = expand_vec_shift_expr (ops, target);
9032 return target;
9033 }
9034
9035 case VEC_UNPACK_HI_EXPR:
9036 case VEC_UNPACK_LO_EXPR:
9037 {
9038 op0 = expand_normal (treeop0);
9039 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9040 target, unsignedp);
9041 gcc_assert (temp);
9042 return temp;
9043 }
9044
9045 case VEC_UNPACK_FLOAT_HI_EXPR:
9046 case VEC_UNPACK_FLOAT_LO_EXPR:
9047 {
9048 op0 = expand_normal (treeop0);
9049 /* The signedness is determined from input operand. */
9050 temp = expand_widen_pattern_expr
9051 (ops, op0, NULL_RTX, NULL_RTX,
9052 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9053
9054 gcc_assert (temp);
9055 return temp;
9056 }
9057
9058 case VEC_WIDEN_MULT_HI_EXPR:
9059 case VEC_WIDEN_MULT_LO_EXPR:
9060 case VEC_WIDEN_MULT_EVEN_EXPR:
9061 case VEC_WIDEN_MULT_ODD_EXPR:
9062 case VEC_WIDEN_LSHIFT_HI_EXPR:
9063 case VEC_WIDEN_LSHIFT_LO_EXPR:
9064 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9065 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9066 target, unsignedp);
9067 gcc_assert (target);
9068 return target;
9069
9070 case VEC_PACK_TRUNC_EXPR:
9071 case VEC_PACK_SAT_EXPR:
9072 case VEC_PACK_FIX_TRUNC_EXPR:
9073 mode = TYPE_MODE (TREE_TYPE (treeop0));
9074 goto binop;
9075
9076 case VEC_PERM_EXPR:
9077 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9078 op2 = expand_normal (treeop2);
9079
9080 /* Careful here: if the target doesn't support integral vector modes,
9081 a constant selection vector could wind up smooshed into a normal
9082 integral constant. */
9083 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9084 {
9085 tree sel_type = TREE_TYPE (treeop2);
9086 enum machine_mode vmode
9087 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9088 TYPE_VECTOR_SUBPARTS (sel_type));
9089 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9090 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9091 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9092 }
9093 else
9094 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9095
9096 temp = expand_vec_perm (mode, op0, op1, op2, target);
9097 gcc_assert (temp);
9098 return temp;
9099
9100 case DOT_PROD_EXPR:
9101 {
9102 tree oprnd0 = treeop0;
9103 tree oprnd1 = treeop1;
9104 tree oprnd2 = treeop2;
9105 rtx op2;
9106
9107 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9108 op2 = expand_normal (oprnd2);
9109 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9110 target, unsignedp);
9111 return target;
9112 }
9113
9114 case SAD_EXPR:
9115 {
9116 tree oprnd0 = treeop0;
9117 tree oprnd1 = treeop1;
9118 tree oprnd2 = treeop2;
9119 rtx op2;
9120
9121 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9122 op2 = expand_normal (oprnd2);
9123 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9124 target, unsignedp);
9125 return target;
9126 }
9127
9128 case REALIGN_LOAD_EXPR:
9129 {
9130 tree oprnd0 = treeop0;
9131 tree oprnd1 = treeop1;
9132 tree oprnd2 = treeop2;
9133 rtx op2;
9134
9135 this_optab = optab_for_tree_code (code, type, optab_default);
9136 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9137 op2 = expand_normal (oprnd2);
9138 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9139 target, unsignedp);
9140 gcc_assert (temp);
9141 return temp;
9142 }
9143
9144 case COND_EXPR:
9145 /* A COND_EXPR with its type being VOID_TYPE represents a
9146 conditional jump and is handled in
9147 expand_gimple_cond_expr. */
9148 gcc_assert (!VOID_TYPE_P (type));
9149
9150 /* Note that COND_EXPRs whose type is a structure or union
9151 are required to be constructed to contain assignments of
9152 a temporary variable, so that we can evaluate them here
9153 for side effect only. If type is void, we must do likewise. */
9154
9155 gcc_assert (!TREE_ADDRESSABLE (type)
9156 && !ignore
9157 && TREE_TYPE (treeop1) != void_type_node
9158 && TREE_TYPE (treeop2) != void_type_node);
9159
9160 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9161 if (temp)
9162 return temp;
9163
9164 /* If we are not to produce a result, we have no target. Otherwise,
9165 if a target was specified use it; it will not be used as an
9166 intermediate target unless it is safe. If no target, use a
9167 temporary. */
9168
9169 if (modifier != EXPAND_STACK_PARM
9170 && original_target
9171 && safe_from_p (original_target, treeop0, 1)
9172 && GET_MODE (original_target) == mode
9173 && !MEM_P (original_target))
9174 temp = original_target;
9175 else
9176 temp = assign_temp (type, 0, 1);
9177
9178 do_pending_stack_adjust ();
9179 NO_DEFER_POP;
9180 op0 = gen_label_rtx ();
9181 op1 = gen_label_rtx ();
9182 jumpifnot (treeop0, op0, -1);
9183 store_expr (treeop1, temp,
9184 modifier == EXPAND_STACK_PARM,
9185 false);
9186
9187 emit_jump_insn (gen_jump (op1));
9188 emit_barrier ();
9189 emit_label (op0);
9190 store_expr (treeop2, temp,
9191 modifier == EXPAND_STACK_PARM,
9192 false);
9193
9194 emit_label (op1);
9195 OK_DEFER_POP;
9196 return temp;
9197
9198 case VEC_COND_EXPR:
9199 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9200 return target;
9201
9202 default:
9203 gcc_unreachable ();
9204 }
9205
9206 /* Here to do an ordinary binary operator. */
9207 binop:
9208 expand_operands (treeop0, treeop1,
9209 subtarget, &op0, &op1, EXPAND_NORMAL);
9210 binop2:
9211 this_optab = optab_for_tree_code (code, type, optab_default);
9212 binop3:
9213 if (modifier == EXPAND_STACK_PARM)
9214 target = 0;
9215 temp = expand_binop (mode, this_optab, op0, op1, target,
9216 unsignedp, OPTAB_LIB_WIDEN);
9217 gcc_assert (temp);
9218 /* Bitwise operations do not need bitfield reduction as we expect their
9219 operands being properly truncated. */
9220 if (code == BIT_XOR_EXPR
9221 || code == BIT_AND_EXPR
9222 || code == BIT_IOR_EXPR)
9223 return temp;
9224 return REDUCE_BIT_FIELD (temp);
9225 }
9226 #undef REDUCE_BIT_FIELD
9227
9228 /* Return TRUE if value in SSA is zero and sign extended for wider mode MODE
9229 using value range information stored. Return FALSE otherwise.
9230
9231 This is used to check if SUBREG is zero and sign extended and to set
9232 promoted mode SRP_SIGNED_AND_UNSIGNED to SUBREG. */
9233
9234 bool
9235 promoted_for_signed_and_unsigned_p (tree ssa, enum machine_mode mode)
9236 {
9237 wide_int min, max;
9238
9239 if (ssa == NULL_TREE
9240 || TREE_CODE (ssa) != SSA_NAME
9241 || !INTEGRAL_TYPE_P (TREE_TYPE (ssa))
9242 || (TYPE_PRECISION (TREE_TYPE (ssa)) != GET_MODE_PRECISION (mode)))
9243 return false;
9244
9245 /* Return FALSE if value_range is not recorded for SSA. */
9246 if (get_range_info (ssa, &min, &max) != VR_RANGE)
9247 return false;
9248
9249 /* Return true (to set SRP_SIGNED_AND_UNSIGNED to SUBREG) if MSB of the
9250 smaller mode is not set (i.e. MSB of ssa is not set). */
9251 if (!wi::neg_p (min, SIGNED) && !wi::neg_p(max, SIGNED))
9252 return true;
9253 else
9254 return false;
9255
9256 }
9257
9258 /* Return TRUE if expression STMT is suitable for replacement.
9259 Never consider memory loads as replaceable, because those don't ever lead
9260 into constant expressions. */
9261
9262 static bool
9263 stmt_is_replaceable_p (gimple stmt)
9264 {
9265 if (ssa_is_replaceable_p (stmt))
9266 {
9267 /* Don't move around loads. */
9268 if (!gimple_assign_single_p (stmt)
9269 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9270 return true;
9271 }
9272 return false;
9273 }
9274
9275 rtx
9276 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
9277 enum expand_modifier modifier, rtx *alt_rtl,
9278 bool inner_reference_p)
9279 {
9280 rtx op0, op1, temp, decl_rtl;
9281 tree type;
9282 int unsignedp;
9283 enum machine_mode mode;
9284 enum tree_code code = TREE_CODE (exp);
9285 rtx subtarget, original_target;
9286 int ignore;
9287 tree context;
9288 bool reduce_bit_field;
9289 location_t loc = EXPR_LOCATION (exp);
9290 struct separate_ops ops;
9291 tree treeop0, treeop1, treeop2;
9292 tree ssa_name = NULL_TREE;
9293 gimple g;
9294
9295 type = TREE_TYPE (exp);
9296 mode = TYPE_MODE (type);
9297 unsignedp = TYPE_UNSIGNED (type);
9298
9299 treeop0 = treeop1 = treeop2 = NULL_TREE;
9300 if (!VL_EXP_CLASS_P (exp))
9301 switch (TREE_CODE_LENGTH (code))
9302 {
9303 default:
9304 case 3: treeop2 = TREE_OPERAND (exp, 2);
9305 case 2: treeop1 = TREE_OPERAND (exp, 1);
9306 case 1: treeop0 = TREE_OPERAND (exp, 0);
9307 case 0: break;
9308 }
9309 ops.code = code;
9310 ops.type = type;
9311 ops.op0 = treeop0;
9312 ops.op1 = treeop1;
9313 ops.op2 = treeop2;
9314 ops.location = loc;
9315
9316 ignore = (target == const0_rtx
9317 || ((CONVERT_EXPR_CODE_P (code)
9318 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9319 && TREE_CODE (type) == VOID_TYPE));
9320
9321 /* An operation in what may be a bit-field type needs the
9322 result to be reduced to the precision of the bit-field type,
9323 which is narrower than that of the type's mode. */
9324 reduce_bit_field = (!ignore
9325 && INTEGRAL_TYPE_P (type)
9326 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9327
9328 /* If we are going to ignore this result, we need only do something
9329 if there is a side-effect somewhere in the expression. If there
9330 is, short-circuit the most common cases here. Note that we must
9331 not call expand_expr with anything but const0_rtx in case this
9332 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9333
9334 if (ignore)
9335 {
9336 if (! TREE_SIDE_EFFECTS (exp))
9337 return const0_rtx;
9338
9339 /* Ensure we reference a volatile object even if value is ignored, but
9340 don't do this if all we are doing is taking its address. */
9341 if (TREE_THIS_VOLATILE (exp)
9342 && TREE_CODE (exp) != FUNCTION_DECL
9343 && mode != VOIDmode && mode != BLKmode
9344 && modifier != EXPAND_CONST_ADDRESS)
9345 {
9346 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9347 if (MEM_P (temp))
9348 copy_to_reg (temp);
9349 return const0_rtx;
9350 }
9351
9352 if (TREE_CODE_CLASS (code) == tcc_unary
9353 || code == BIT_FIELD_REF
9354 || code == COMPONENT_REF
9355 || code == INDIRECT_REF)
9356 return expand_expr (treeop0, const0_rtx, VOIDmode,
9357 modifier);
9358
9359 else if (TREE_CODE_CLASS (code) == tcc_binary
9360 || TREE_CODE_CLASS (code) == tcc_comparison
9361 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9362 {
9363 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9364 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9365 return const0_rtx;
9366 }
9367
9368 target = 0;
9369 }
9370
9371 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9372 target = 0;
9373
9374 /* Use subtarget as the target for operand 0 of a binary operation. */
9375 subtarget = get_subtarget (target);
9376 original_target = target;
9377
9378 switch (code)
9379 {
9380 case LABEL_DECL:
9381 {
9382 tree function = decl_function_context (exp);
9383
9384 temp = label_rtx (exp);
9385 temp = gen_rtx_LABEL_REF (Pmode, temp);
9386
9387 if (function != current_function_decl
9388 && function != 0)
9389 LABEL_REF_NONLOCAL_P (temp) = 1;
9390
9391 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9392 return temp;
9393 }
9394
9395 case SSA_NAME:
9396 /* ??? ivopts calls expander, without any preparation from
9397 out-of-ssa. So fake instructions as if this was an access to the
9398 base variable. This unnecessarily allocates a pseudo, see how we can
9399 reuse it, if partition base vars have it set already. */
9400 if (!currently_expanding_to_rtl)
9401 {
9402 tree var = SSA_NAME_VAR (exp);
9403 if (var && DECL_RTL_SET_P (var))
9404 return DECL_RTL (var);
9405 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9406 LAST_VIRTUAL_REGISTER + 1);
9407 }
9408
9409 g = get_gimple_for_ssa_name (exp);
9410 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9411 if (g == NULL
9412 && modifier == EXPAND_INITIALIZER
9413 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9414 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9415 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9416 g = SSA_NAME_DEF_STMT (exp);
9417 if (g)
9418 {
9419 rtx r;
9420 ops.code = gimple_assign_rhs_code (g);
9421 switch (get_gimple_rhs_class (ops.code))
9422 {
9423 case GIMPLE_TERNARY_RHS:
9424 ops.op2 = gimple_assign_rhs3 (g);
9425 /* Fallthru */
9426 case GIMPLE_BINARY_RHS:
9427 ops.op1 = gimple_assign_rhs2 (g);
9428 /* Fallthru */
9429 case GIMPLE_UNARY_RHS:
9430 ops.op0 = gimple_assign_rhs1 (g);
9431 ops.type = TREE_TYPE (gimple_assign_lhs (g));
9432 ops.location = gimple_location (g);
9433 r = expand_expr_real_2 (&ops, target, tmode, modifier);
9434 break;
9435 case GIMPLE_SINGLE_RHS:
9436 {
9437 location_t saved_loc = curr_insn_location ();
9438 set_curr_insn_location (gimple_location (g));
9439 r = expand_expr_real (gimple_assign_rhs1 (g), target,
9440 tmode, modifier, NULL, inner_reference_p);
9441 set_curr_insn_location (saved_loc);
9442 break;
9443 }
9444 default:
9445 gcc_unreachable ();
9446 }
9447 if (REG_P (r) && !REG_EXPR (r))
9448 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9449 return r;
9450 }
9451
9452 ssa_name = exp;
9453 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9454 exp = SSA_NAME_VAR (ssa_name);
9455 goto expand_decl_rtl;
9456
9457 case PARM_DECL:
9458 case VAR_DECL:
9459 /* If a static var's type was incomplete when the decl was written,
9460 but the type is complete now, lay out the decl now. */
9461 if (DECL_SIZE (exp) == 0
9462 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9463 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9464 layout_decl (exp, 0);
9465
9466 /* ... fall through ... */
9467
9468 case FUNCTION_DECL:
9469 case RESULT_DECL:
9470 decl_rtl = DECL_RTL (exp);
9471 expand_decl_rtl:
9472 gcc_assert (decl_rtl);
9473 decl_rtl = copy_rtx (decl_rtl);
9474 /* Record writes to register variables. */
9475 if (modifier == EXPAND_WRITE
9476 && REG_P (decl_rtl)
9477 && HARD_REGISTER_P (decl_rtl))
9478 add_to_hard_reg_set (&crtl->asm_clobbers,
9479 GET_MODE (decl_rtl), REGNO (decl_rtl));
9480
9481 /* Ensure variable marked as used even if it doesn't go through
9482 a parser. If it hasn't be used yet, write out an external
9483 definition. */
9484 TREE_USED (exp) = 1;
9485
9486 /* Show we haven't gotten RTL for this yet. */
9487 temp = 0;
9488
9489 /* Variables inherited from containing functions should have
9490 been lowered by this point. */
9491 context = decl_function_context (exp);
9492 gcc_assert (SCOPE_FILE_SCOPE_P (context)
9493 || context == current_function_decl
9494 || TREE_STATIC (exp)
9495 || DECL_EXTERNAL (exp)
9496 /* ??? C++ creates functions that are not TREE_STATIC. */
9497 || TREE_CODE (exp) == FUNCTION_DECL);
9498
9499 /* This is the case of an array whose size is to be determined
9500 from its initializer, while the initializer is still being parsed.
9501 ??? We aren't parsing while expanding anymore. */
9502
9503 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9504 temp = validize_mem (decl_rtl);
9505
9506 /* If DECL_RTL is memory, we are in the normal case and the
9507 address is not valid, get the address into a register. */
9508
9509 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9510 {
9511 if (alt_rtl)
9512 *alt_rtl = decl_rtl;
9513 decl_rtl = use_anchored_address (decl_rtl);
9514 if (modifier != EXPAND_CONST_ADDRESS
9515 && modifier != EXPAND_SUM
9516 && !memory_address_addr_space_p (DECL_MODE (exp),
9517 XEXP (decl_rtl, 0),
9518 MEM_ADDR_SPACE (decl_rtl)))
9519 temp = replace_equiv_address (decl_rtl,
9520 copy_rtx (XEXP (decl_rtl, 0)));
9521 }
9522
9523 /* If we got something, return it. But first, set the alignment
9524 if the address is a register. */
9525 if (temp != 0)
9526 {
9527 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9528 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9529
9530 return temp;
9531 }
9532
9533 /* If the mode of DECL_RTL does not match that of the decl,
9534 there are two cases: we are dealing with a BLKmode value
9535 that is returned in a register, or we are dealing with
9536 a promoted value. In the latter case, return a SUBREG
9537 of the wanted mode, but mark it so that we know that it
9538 was already extended. */
9539 if (REG_P (decl_rtl)
9540 && DECL_MODE (exp) != BLKmode
9541 && GET_MODE (decl_rtl) != DECL_MODE (exp))
9542 {
9543 enum machine_mode pmode;
9544
9545 /* Get the signedness to be used for this variable. Ensure we get
9546 the same mode we got when the variable was declared. */
9547 if (code == SSA_NAME
9548 && (g = SSA_NAME_DEF_STMT (ssa_name))
9549 && gimple_code (g) == GIMPLE_CALL
9550 && !gimple_call_internal_p (g))
9551 pmode = promote_function_mode (type, mode, &unsignedp,
9552 gimple_call_fntype (g),
9553 2);
9554 else
9555 pmode = promote_decl_mode (exp, &unsignedp);
9556 gcc_assert (GET_MODE (decl_rtl) == pmode);
9557
9558 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9559 SUBREG_PROMOTED_VAR_P (temp) = 1;
9560 if (promoted_for_signed_and_unsigned_p (ssa_name, mode))
9561 SUBREG_PROMOTED_SET (temp, SRP_SIGNED_AND_UNSIGNED);
9562 else
9563 SUBREG_PROMOTED_SET (temp, unsignedp);
9564 return temp;
9565 }
9566
9567 return decl_rtl;
9568
9569 case INTEGER_CST:
9570 /* Given that TYPE_PRECISION (type) is not always equal to
9571 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9572 the former to the latter according to the signedness of the
9573 type. */
9574 temp = immed_wide_int_const (wide_int::from
9575 (exp,
9576 GET_MODE_PRECISION (TYPE_MODE (type)),
9577 TYPE_SIGN (type)),
9578 TYPE_MODE (type));
9579 return temp;
9580
9581 case VECTOR_CST:
9582 {
9583 tree tmp = NULL_TREE;
9584 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9585 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9586 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9587 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9588 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9589 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9590 return const_vector_from_tree (exp);
9591 if (GET_MODE_CLASS (mode) == MODE_INT)
9592 {
9593 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9594 if (type_for_mode)
9595 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9596 }
9597 if (!tmp)
9598 {
9599 vec<constructor_elt, va_gc> *v;
9600 unsigned i;
9601 vec_alloc (v, VECTOR_CST_NELTS (exp));
9602 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9603 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9604 tmp = build_constructor (type, v);
9605 }
9606 return expand_expr (tmp, ignore ? const0_rtx : target,
9607 tmode, modifier);
9608 }
9609
9610 case CONST_DECL:
9611 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9612
9613 case REAL_CST:
9614 /* If optimized, generate immediate CONST_DOUBLE
9615 which will be turned into memory by reload if necessary.
9616
9617 We used to force a register so that loop.c could see it. But
9618 this does not allow gen_* patterns to perform optimizations with
9619 the constants. It also produces two insns in cases like "x = 1.0;".
9620 On most machines, floating-point constants are not permitted in
9621 many insns, so we'd end up copying it to a register in any case.
9622
9623 Now, we do the copying in expand_binop, if appropriate. */
9624 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9625 TYPE_MODE (TREE_TYPE (exp)));
9626
9627 case FIXED_CST:
9628 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9629 TYPE_MODE (TREE_TYPE (exp)));
9630
9631 case COMPLEX_CST:
9632 /* Handle evaluating a complex constant in a CONCAT target. */
9633 if (original_target && GET_CODE (original_target) == CONCAT)
9634 {
9635 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9636 rtx rtarg, itarg;
9637
9638 rtarg = XEXP (original_target, 0);
9639 itarg = XEXP (original_target, 1);
9640
9641 /* Move the real and imaginary parts separately. */
9642 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9643 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9644
9645 if (op0 != rtarg)
9646 emit_move_insn (rtarg, op0);
9647 if (op1 != itarg)
9648 emit_move_insn (itarg, op1);
9649
9650 return original_target;
9651 }
9652
9653 /* ... fall through ... */
9654
9655 case STRING_CST:
9656 temp = expand_expr_constant (exp, 1, modifier);
9657
9658 /* temp contains a constant address.
9659 On RISC machines where a constant address isn't valid,
9660 make some insns to get that address into a register. */
9661 if (modifier != EXPAND_CONST_ADDRESS
9662 && modifier != EXPAND_INITIALIZER
9663 && modifier != EXPAND_SUM
9664 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9665 MEM_ADDR_SPACE (temp)))
9666 return replace_equiv_address (temp,
9667 copy_rtx (XEXP (temp, 0)));
9668 return temp;
9669
9670 case SAVE_EXPR:
9671 {
9672 tree val = treeop0;
9673 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
9674 inner_reference_p);
9675
9676 if (!SAVE_EXPR_RESOLVED_P (exp))
9677 {
9678 /* We can indeed still hit this case, typically via builtin
9679 expanders calling save_expr immediately before expanding
9680 something. Assume this means that we only have to deal
9681 with non-BLKmode values. */
9682 gcc_assert (GET_MODE (ret) != BLKmode);
9683
9684 val = build_decl (curr_insn_location (),
9685 VAR_DECL, NULL, TREE_TYPE (exp));
9686 DECL_ARTIFICIAL (val) = 1;
9687 DECL_IGNORED_P (val) = 1;
9688 treeop0 = val;
9689 TREE_OPERAND (exp, 0) = treeop0;
9690 SAVE_EXPR_RESOLVED_P (exp) = 1;
9691
9692 if (!CONSTANT_P (ret))
9693 ret = copy_to_reg (ret);
9694 SET_DECL_RTL (val, ret);
9695 }
9696
9697 return ret;
9698 }
9699
9700
9701 case CONSTRUCTOR:
9702 /* If we don't need the result, just ensure we evaluate any
9703 subexpressions. */
9704 if (ignore)
9705 {
9706 unsigned HOST_WIDE_INT idx;
9707 tree value;
9708
9709 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9710 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9711
9712 return const0_rtx;
9713 }
9714
9715 return expand_constructor (exp, target, modifier, false);
9716
9717 case TARGET_MEM_REF:
9718 {
9719 addr_space_t as
9720 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9721 enum insn_code icode;
9722 unsigned int align;
9723
9724 op0 = addr_for_mem_ref (exp, as, true);
9725 op0 = memory_address_addr_space (mode, op0, as);
9726 temp = gen_rtx_MEM (mode, op0);
9727 set_mem_attributes (temp, exp, 0);
9728 set_mem_addr_space (temp, as);
9729 align = get_object_alignment (exp);
9730 if (modifier != EXPAND_WRITE
9731 && modifier != EXPAND_MEMORY
9732 && mode != BLKmode
9733 && align < GET_MODE_ALIGNMENT (mode)
9734 /* If the target does not have special handling for unaligned
9735 loads of mode then it can use regular moves for them. */
9736 && ((icode = optab_handler (movmisalign_optab, mode))
9737 != CODE_FOR_nothing))
9738 {
9739 struct expand_operand ops[2];
9740
9741 /* We've already validated the memory, and we're creating a
9742 new pseudo destination. The predicates really can't fail,
9743 nor can the generator. */
9744 create_output_operand (&ops[0], NULL_RTX, mode);
9745 create_fixed_operand (&ops[1], temp);
9746 expand_insn (icode, 2, ops);
9747 temp = ops[0].value;
9748 }
9749 return temp;
9750 }
9751
9752 case MEM_REF:
9753 {
9754 addr_space_t as
9755 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9756 enum machine_mode address_mode;
9757 tree base = TREE_OPERAND (exp, 0);
9758 gimple def_stmt;
9759 enum insn_code icode;
9760 unsigned align;
9761 /* Handle expansion of non-aliased memory with non-BLKmode. That
9762 might end up in a register. */
9763 if (mem_ref_refers_to_non_mem_p (exp))
9764 {
9765 HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr ();
9766 base = TREE_OPERAND (base, 0);
9767 if (offset == 0
9768 && tree_fits_uhwi_p (TYPE_SIZE (type))
9769 && (GET_MODE_BITSIZE (DECL_MODE (base))
9770 == tree_to_uhwi (TYPE_SIZE (type))))
9771 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9772 target, tmode, modifier);
9773 if (TYPE_MODE (type) == BLKmode)
9774 {
9775 temp = assign_stack_temp (DECL_MODE (base),
9776 GET_MODE_SIZE (DECL_MODE (base)));
9777 store_expr (base, temp, 0, false);
9778 temp = adjust_address (temp, BLKmode, offset);
9779 set_mem_size (temp, int_size_in_bytes (type));
9780 return temp;
9781 }
9782 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9783 bitsize_int (offset * BITS_PER_UNIT));
9784 return expand_expr (exp, target, tmode, modifier);
9785 }
9786 address_mode = targetm.addr_space.address_mode (as);
9787 base = TREE_OPERAND (exp, 0);
9788 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9789 {
9790 tree mask = gimple_assign_rhs2 (def_stmt);
9791 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9792 gimple_assign_rhs1 (def_stmt), mask);
9793 TREE_OPERAND (exp, 0) = base;
9794 }
9795 align = get_object_alignment (exp);
9796 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9797 op0 = memory_address_addr_space (mode, op0, as);
9798 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9799 {
9800 rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
9801 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9802 op0 = memory_address_addr_space (mode, op0, as);
9803 }
9804 temp = gen_rtx_MEM (mode, op0);
9805 set_mem_attributes (temp, exp, 0);
9806 set_mem_addr_space (temp, as);
9807 if (TREE_THIS_VOLATILE (exp))
9808 MEM_VOLATILE_P (temp) = 1;
9809 if (modifier != EXPAND_WRITE
9810 && modifier != EXPAND_MEMORY
9811 && !inner_reference_p
9812 && mode != BLKmode
9813 && align < GET_MODE_ALIGNMENT (mode))
9814 {
9815 if ((icode = optab_handler (movmisalign_optab, mode))
9816 != CODE_FOR_nothing)
9817 {
9818 struct expand_operand ops[2];
9819
9820 /* We've already validated the memory, and we're creating a
9821 new pseudo destination. The predicates really can't fail,
9822 nor can the generator. */
9823 create_output_operand (&ops[0], NULL_RTX, mode);
9824 create_fixed_operand (&ops[1], temp);
9825 expand_insn (icode, 2, ops);
9826 temp = ops[0].value;
9827 }
9828 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9829 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9830 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9831 (modifier == EXPAND_STACK_PARM
9832 ? NULL_RTX : target),
9833 mode, mode);
9834 }
9835 return temp;
9836 }
9837
9838 case ARRAY_REF:
9839
9840 {
9841 tree array = treeop0;
9842 tree index = treeop1;
9843 tree init;
9844
9845 /* Fold an expression like: "foo"[2].
9846 This is not done in fold so it won't happen inside &.
9847 Don't fold if this is for wide characters since it's too
9848 difficult to do correctly and this is a very rare case. */
9849
9850 if (modifier != EXPAND_CONST_ADDRESS
9851 && modifier != EXPAND_INITIALIZER
9852 && modifier != EXPAND_MEMORY)
9853 {
9854 tree t = fold_read_from_constant_string (exp);
9855
9856 if (t)
9857 return expand_expr (t, target, tmode, modifier);
9858 }
9859
9860 /* If this is a constant index into a constant array,
9861 just get the value from the array. Handle both the cases when
9862 we have an explicit constructor and when our operand is a variable
9863 that was declared const. */
9864
9865 if (modifier != EXPAND_CONST_ADDRESS
9866 && modifier != EXPAND_INITIALIZER
9867 && modifier != EXPAND_MEMORY
9868 && TREE_CODE (array) == CONSTRUCTOR
9869 && ! TREE_SIDE_EFFECTS (array)
9870 && TREE_CODE (index) == INTEGER_CST)
9871 {
9872 unsigned HOST_WIDE_INT ix;
9873 tree field, value;
9874
9875 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9876 field, value)
9877 if (tree_int_cst_equal (field, index))
9878 {
9879 if (!TREE_SIDE_EFFECTS (value))
9880 return expand_expr (fold (value), target, tmode, modifier);
9881 break;
9882 }
9883 }
9884
9885 else if (optimize >= 1
9886 && modifier != EXPAND_CONST_ADDRESS
9887 && modifier != EXPAND_INITIALIZER
9888 && modifier != EXPAND_MEMORY
9889 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9890 && TREE_CODE (index) == INTEGER_CST
9891 && (TREE_CODE (array) == VAR_DECL
9892 || TREE_CODE (array) == CONST_DECL)
9893 && (init = ctor_for_folding (array)) != error_mark_node)
9894 {
9895 if (init == NULL_TREE)
9896 {
9897 tree value = build_zero_cst (type);
9898 if (TREE_CODE (value) == CONSTRUCTOR)
9899 {
9900 /* If VALUE is a CONSTRUCTOR, this optimization is only
9901 useful if this doesn't store the CONSTRUCTOR into
9902 memory. If it does, it is more efficient to just
9903 load the data from the array directly. */
9904 rtx ret = expand_constructor (value, target,
9905 modifier, true);
9906 if (ret == NULL_RTX)
9907 value = NULL_TREE;
9908 }
9909
9910 if (value)
9911 return expand_expr (value, target, tmode, modifier);
9912 }
9913 else if (TREE_CODE (init) == CONSTRUCTOR)
9914 {
9915 unsigned HOST_WIDE_INT ix;
9916 tree field, value;
9917
9918 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
9919 field, value)
9920 if (tree_int_cst_equal (field, index))
9921 {
9922 if (TREE_SIDE_EFFECTS (value))
9923 break;
9924
9925 if (TREE_CODE (value) == CONSTRUCTOR)
9926 {
9927 /* If VALUE is a CONSTRUCTOR, this
9928 optimization is only useful if
9929 this doesn't store the CONSTRUCTOR
9930 into memory. If it does, it is more
9931 efficient to just load the data from
9932 the array directly. */
9933 rtx ret = expand_constructor (value, target,
9934 modifier, true);
9935 if (ret == NULL_RTX)
9936 break;
9937 }
9938
9939 return
9940 expand_expr (fold (value), target, tmode, modifier);
9941 }
9942 }
9943 else if (TREE_CODE (init) == STRING_CST)
9944 {
9945 tree low_bound = array_ref_low_bound (exp);
9946 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
9947
9948 /* Optimize the special case of a zero lower bound.
9949
9950 We convert the lower bound to sizetype to avoid problems
9951 with constant folding. E.g. suppose the lower bound is
9952 1 and its mode is QI. Without the conversion
9953 (ARRAY + (INDEX - (unsigned char)1))
9954 becomes
9955 (ARRAY + (-(unsigned char)1) + INDEX)
9956 which becomes
9957 (ARRAY + 255 + INDEX). Oops! */
9958 if (!integer_zerop (low_bound))
9959 index1 = size_diffop_loc (loc, index1,
9960 fold_convert_loc (loc, sizetype,
9961 low_bound));
9962
9963 if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
9964 {
9965 tree type = TREE_TYPE (TREE_TYPE (init));
9966 enum machine_mode mode = TYPE_MODE (type);
9967
9968 if (GET_MODE_CLASS (mode) == MODE_INT
9969 && GET_MODE_SIZE (mode) == 1)
9970 return gen_int_mode (TREE_STRING_POINTER (init)
9971 [TREE_INT_CST_LOW (index1)],
9972 mode);
9973 }
9974 }
9975 }
9976 }
9977 goto normal_inner_ref;
9978
9979 case COMPONENT_REF:
9980 /* If the operand is a CONSTRUCTOR, we can just extract the
9981 appropriate field if it is present. */
9982 if (TREE_CODE (treeop0) == CONSTRUCTOR)
9983 {
9984 unsigned HOST_WIDE_INT idx;
9985 tree field, value;
9986
9987 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
9988 idx, field, value)
9989 if (field == treeop1
9990 /* We can normally use the value of the field in the
9991 CONSTRUCTOR. However, if this is a bitfield in
9992 an integral mode that we can fit in a HOST_WIDE_INT,
9993 we must mask only the number of bits in the bitfield,
9994 since this is done implicitly by the constructor. If
9995 the bitfield does not meet either of those conditions,
9996 we can't do this optimization. */
9997 && (! DECL_BIT_FIELD (field)
9998 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
9999 && (GET_MODE_PRECISION (DECL_MODE (field))
10000 <= HOST_BITS_PER_WIDE_INT))))
10001 {
10002 if (DECL_BIT_FIELD (field)
10003 && modifier == EXPAND_STACK_PARM)
10004 target = 0;
10005 op0 = expand_expr (value, target, tmode, modifier);
10006 if (DECL_BIT_FIELD (field))
10007 {
10008 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
10009 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
10010
10011 if (TYPE_UNSIGNED (TREE_TYPE (field)))
10012 {
10013 op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
10014 imode);
10015 op0 = expand_and (imode, op0, op1, target);
10016 }
10017 else
10018 {
10019 int count = GET_MODE_PRECISION (imode) - bitsize;
10020
10021 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
10022 target, 0);
10023 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
10024 target, 0);
10025 }
10026 }
10027
10028 return op0;
10029 }
10030 }
10031 goto normal_inner_ref;
10032
10033 case BIT_FIELD_REF:
10034 case ARRAY_RANGE_REF:
10035 normal_inner_ref:
10036 {
10037 enum machine_mode mode1, mode2;
10038 HOST_WIDE_INT bitsize, bitpos;
10039 tree offset;
10040 int volatilep = 0, must_force_mem;
10041 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
10042 &mode1, &unsignedp, &volatilep, true);
10043 rtx orig_op0, memloc;
10044 bool mem_attrs_from_type = false;
10045
10046 /* If we got back the original object, something is wrong. Perhaps
10047 we are evaluating an expression too early. In any event, don't
10048 infinitely recurse. */
10049 gcc_assert (tem != exp);
10050
10051 /* If TEM's type is a union of variable size, pass TARGET to the inner
10052 computation, since it will need a temporary and TARGET is known
10053 to have to do. This occurs in unchecked conversion in Ada. */
10054 orig_op0 = op0
10055 = expand_expr_real (tem,
10056 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10057 && COMPLETE_TYPE_P (TREE_TYPE (tem))
10058 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10059 != INTEGER_CST)
10060 && modifier != EXPAND_STACK_PARM
10061 ? target : NULL_RTX),
10062 VOIDmode,
10063 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10064 NULL, true);
10065
10066 /* If the field has a mode, we want to access it in the
10067 field's mode, not the computed mode.
10068 If a MEM has VOIDmode (external with incomplete type),
10069 use BLKmode for it instead. */
10070 if (MEM_P (op0))
10071 {
10072 if (mode1 != VOIDmode)
10073 op0 = adjust_address (op0, mode1, 0);
10074 else if (GET_MODE (op0) == VOIDmode)
10075 op0 = adjust_address (op0, BLKmode, 0);
10076 }
10077
10078 mode2
10079 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10080
10081 /* If we have either an offset, a BLKmode result, or a reference
10082 outside the underlying object, we must force it to memory.
10083 Such a case can occur in Ada if we have unchecked conversion
10084 of an expression from a scalar type to an aggregate type or
10085 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10086 passed a partially uninitialized object or a view-conversion
10087 to a larger size. */
10088 must_force_mem = (offset
10089 || mode1 == BLKmode
10090 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
10091
10092 /* Handle CONCAT first. */
10093 if (GET_CODE (op0) == CONCAT && !must_force_mem)
10094 {
10095 if (bitpos == 0
10096 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
10097 return op0;
10098 if (bitpos == 0
10099 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10100 && bitsize)
10101 {
10102 op0 = XEXP (op0, 0);
10103 mode2 = GET_MODE (op0);
10104 }
10105 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10106 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10107 && bitpos
10108 && bitsize)
10109 {
10110 op0 = XEXP (op0, 1);
10111 bitpos = 0;
10112 mode2 = GET_MODE (op0);
10113 }
10114 else
10115 /* Otherwise force into memory. */
10116 must_force_mem = 1;
10117 }
10118
10119 /* If this is a constant, put it in a register if it is a legitimate
10120 constant and we don't need a memory reference. */
10121 if (CONSTANT_P (op0)
10122 && mode2 != BLKmode
10123 && targetm.legitimate_constant_p (mode2, op0)
10124 && !must_force_mem)
10125 op0 = force_reg (mode2, op0);
10126
10127 /* Otherwise, if this is a constant, try to force it to the constant
10128 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10129 is a legitimate constant. */
10130 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10131 op0 = validize_mem (memloc);
10132
10133 /* Otherwise, if this is a constant or the object is not in memory
10134 and need be, put it there. */
10135 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10136 {
10137 memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10138 emit_move_insn (memloc, op0);
10139 op0 = memloc;
10140 mem_attrs_from_type = true;
10141 }
10142
10143 if (offset)
10144 {
10145 enum machine_mode address_mode;
10146 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10147 EXPAND_SUM);
10148
10149 gcc_assert (MEM_P (op0));
10150
10151 address_mode = get_address_mode (op0);
10152 if (GET_MODE (offset_rtx) != address_mode)
10153 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10154
10155 /* See the comment in expand_assignment for the rationale. */
10156 if (mode1 != VOIDmode
10157 && bitpos != 0
10158 && bitsize > 0
10159 && (bitpos % bitsize) == 0
10160 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10161 && MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10162 {
10163 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10164 bitpos = 0;
10165 }
10166
10167 op0 = offset_address (op0, offset_rtx,
10168 highest_pow2_factor (offset));
10169 }
10170
10171 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10172 record its alignment as BIGGEST_ALIGNMENT. */
10173 if (MEM_P (op0) && bitpos == 0 && offset != 0
10174 && is_aligning_offset (offset, tem))
10175 set_mem_align (op0, BIGGEST_ALIGNMENT);
10176
10177 /* Don't forget about volatility even if this is a bitfield. */
10178 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10179 {
10180 if (op0 == orig_op0)
10181 op0 = copy_rtx (op0);
10182
10183 MEM_VOLATILE_P (op0) = 1;
10184 }
10185
10186 /* In cases where an aligned union has an unaligned object
10187 as a field, we might be extracting a BLKmode value from
10188 an integer-mode (e.g., SImode) object. Handle this case
10189 by doing the extract into an object as wide as the field
10190 (which we know to be the width of a basic mode), then
10191 storing into memory, and changing the mode to BLKmode. */
10192 if (mode1 == VOIDmode
10193 || REG_P (op0) || GET_CODE (op0) == SUBREG
10194 || (mode1 != BLKmode && ! direct_load[(int) mode1]
10195 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10196 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10197 && modifier != EXPAND_CONST_ADDRESS
10198 && modifier != EXPAND_INITIALIZER
10199 && modifier != EXPAND_MEMORY)
10200 /* If the bitfield is volatile and the bitsize
10201 is narrower than the access size of the bitfield,
10202 we need to extract bitfields from the access. */
10203 || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10204 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10205 && mode1 != BLKmode
10206 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
10207 /* If the field isn't aligned enough to fetch as a memref,
10208 fetch it as a bit field. */
10209 || (mode1 != BLKmode
10210 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10211 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10212 || (MEM_P (op0)
10213 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10214 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10215 && modifier != EXPAND_MEMORY
10216 && ((modifier == EXPAND_CONST_ADDRESS
10217 || modifier == EXPAND_INITIALIZER)
10218 ? STRICT_ALIGNMENT
10219 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10220 || (bitpos % BITS_PER_UNIT != 0)))
10221 /* If the type and the field are a constant size and the
10222 size of the type isn't the same size as the bitfield,
10223 we must use bitfield operations. */
10224 || (bitsize >= 0
10225 && TYPE_SIZE (TREE_TYPE (exp))
10226 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10227 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10228 bitsize)))
10229 {
10230 enum machine_mode ext_mode = mode;
10231
10232 if (ext_mode == BLKmode
10233 && ! (target != 0 && MEM_P (op0)
10234 && MEM_P (target)
10235 && bitpos % BITS_PER_UNIT == 0))
10236 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10237
10238 if (ext_mode == BLKmode)
10239 {
10240 if (target == 0)
10241 target = assign_temp (type, 1, 1);
10242
10243 /* ??? Unlike the similar test a few lines below, this one is
10244 very likely obsolete. */
10245 if (bitsize == 0)
10246 return target;
10247
10248 /* In this case, BITPOS must start at a byte boundary and
10249 TARGET, if specified, must be a MEM. */
10250 gcc_assert (MEM_P (op0)
10251 && (!target || MEM_P (target))
10252 && !(bitpos % BITS_PER_UNIT));
10253
10254 emit_block_move (target,
10255 adjust_address (op0, VOIDmode,
10256 bitpos / BITS_PER_UNIT),
10257 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10258 / BITS_PER_UNIT),
10259 (modifier == EXPAND_STACK_PARM
10260 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10261
10262 return target;
10263 }
10264
10265 /* If we have nothing to extract, the result will be 0 for targets
10266 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10267 return 0 for the sake of consistency, as reading a zero-sized
10268 bitfield is valid in Ada and the value is fully specified. */
10269 if (bitsize == 0)
10270 return const0_rtx;
10271
10272 op0 = validize_mem (op0);
10273
10274 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10275 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10276
10277 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10278 (modifier == EXPAND_STACK_PARM
10279 ? NULL_RTX : target),
10280 ext_mode, ext_mode);
10281
10282 /* If the result is a record type and BITSIZE is narrower than
10283 the mode of OP0, an integral mode, and this is a big endian
10284 machine, we must put the field into the high-order bits. */
10285 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10286 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10287 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10288 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10289 GET_MODE_BITSIZE (GET_MODE (op0))
10290 - bitsize, op0, 1);
10291
10292 /* If the result type is BLKmode, store the data into a temporary
10293 of the appropriate type, but with the mode corresponding to the
10294 mode for the data we have (op0's mode). */
10295 if (mode == BLKmode)
10296 {
10297 rtx new_rtx
10298 = assign_stack_temp_for_type (ext_mode,
10299 GET_MODE_BITSIZE (ext_mode),
10300 type);
10301 emit_move_insn (new_rtx, op0);
10302 op0 = copy_rtx (new_rtx);
10303 PUT_MODE (op0, BLKmode);
10304 }
10305
10306 return op0;
10307 }
10308
10309 /* If the result is BLKmode, use that to access the object
10310 now as well. */
10311 if (mode == BLKmode)
10312 mode1 = BLKmode;
10313
10314 /* Get a reference to just this component. */
10315 if (modifier == EXPAND_CONST_ADDRESS
10316 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10317 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10318 else
10319 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10320
10321 if (op0 == orig_op0)
10322 op0 = copy_rtx (op0);
10323
10324 /* If op0 is a temporary because of forcing to memory, pass only the
10325 type to set_mem_attributes so that the original expression is never
10326 marked as ADDRESSABLE through MEM_EXPR of the temporary. */
10327 if (mem_attrs_from_type)
10328 set_mem_attributes (op0, type, 0);
10329 else
10330 set_mem_attributes (op0, exp, 0);
10331
10332 if (REG_P (XEXP (op0, 0)))
10333 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10334
10335 MEM_VOLATILE_P (op0) |= volatilep;
10336 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10337 || modifier == EXPAND_CONST_ADDRESS
10338 || modifier == EXPAND_INITIALIZER)
10339 return op0;
10340
10341 if (target == 0)
10342 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10343
10344 convert_move (target, op0, unsignedp);
10345 return target;
10346 }
10347
10348 case OBJ_TYPE_REF:
10349 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10350
10351 case CALL_EXPR:
10352 /* All valid uses of __builtin_va_arg_pack () are removed during
10353 inlining. */
10354 if (CALL_EXPR_VA_ARG_PACK (exp))
10355 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10356 {
10357 tree fndecl = get_callee_fndecl (exp), attr;
10358
10359 if (fndecl
10360 && (attr = lookup_attribute ("error",
10361 DECL_ATTRIBUTES (fndecl))) != NULL)
10362 error ("%Kcall to %qs declared with attribute error: %s",
10363 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10364 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10365 if (fndecl
10366 && (attr = lookup_attribute ("warning",
10367 DECL_ATTRIBUTES (fndecl))) != NULL)
10368 warning_at (tree_nonartificial_location (exp),
10369 0, "%Kcall to %qs declared with attribute warning: %s",
10370 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10371 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10372
10373 /* Check for a built-in function. */
10374 if (fndecl && DECL_BUILT_IN (fndecl))
10375 {
10376 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10377 return expand_builtin (exp, target, subtarget, tmode, ignore);
10378 }
10379 }
10380 return expand_call (exp, target, ignore);
10381
10382 case VIEW_CONVERT_EXPR:
10383 op0 = NULL_RTX;
10384
10385 /* If we are converting to BLKmode, try to avoid an intermediate
10386 temporary by fetching an inner memory reference. */
10387 if (mode == BLKmode
10388 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10389 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10390 && handled_component_p (treeop0))
10391 {
10392 enum machine_mode mode1;
10393 HOST_WIDE_INT bitsize, bitpos;
10394 tree offset;
10395 int unsignedp;
10396 int volatilep = 0;
10397 tree tem
10398 = get_inner_reference (treeop0, &bitsize, &bitpos,
10399 &offset, &mode1, &unsignedp, &volatilep,
10400 true);
10401 rtx orig_op0;
10402
10403 /* ??? We should work harder and deal with non-zero offsets. */
10404 if (!offset
10405 && (bitpos % BITS_PER_UNIT) == 0
10406 && bitsize >= 0
10407 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10408 {
10409 /* See the normal_inner_ref case for the rationale. */
10410 orig_op0
10411 = expand_expr_real (tem,
10412 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10413 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10414 != INTEGER_CST)
10415 && modifier != EXPAND_STACK_PARM
10416 ? target : NULL_RTX),
10417 VOIDmode,
10418 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10419 NULL, true);
10420
10421 if (MEM_P (orig_op0))
10422 {
10423 op0 = orig_op0;
10424
10425 /* Get a reference to just this component. */
10426 if (modifier == EXPAND_CONST_ADDRESS
10427 || modifier == EXPAND_SUM
10428 || modifier == EXPAND_INITIALIZER)
10429 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10430 else
10431 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10432
10433 if (op0 == orig_op0)
10434 op0 = copy_rtx (op0);
10435
10436 set_mem_attributes (op0, treeop0, 0);
10437 if (REG_P (XEXP (op0, 0)))
10438 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10439
10440 MEM_VOLATILE_P (op0) |= volatilep;
10441 }
10442 }
10443 }
10444
10445 if (!op0)
10446 op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10447 NULL, inner_reference_p);
10448
10449 /* If the input and output modes are both the same, we are done. */
10450 if (mode == GET_MODE (op0))
10451 ;
10452 /* If neither mode is BLKmode, and both modes are the same size
10453 then we can use gen_lowpart. */
10454 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10455 && (GET_MODE_PRECISION (mode)
10456 == GET_MODE_PRECISION (GET_MODE (op0)))
10457 && !COMPLEX_MODE_P (GET_MODE (op0)))
10458 {
10459 if (GET_CODE (op0) == SUBREG)
10460 op0 = force_reg (GET_MODE (op0), op0);
10461 temp = gen_lowpart_common (mode, op0);
10462 if (temp)
10463 op0 = temp;
10464 else
10465 {
10466 if (!REG_P (op0) && !MEM_P (op0))
10467 op0 = force_reg (GET_MODE (op0), op0);
10468 op0 = gen_lowpart (mode, op0);
10469 }
10470 }
10471 /* If both types are integral, convert from one mode to the other. */
10472 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10473 op0 = convert_modes (mode, GET_MODE (op0), op0,
10474 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10475 /* If the output type is a bit-field type, do an extraction. */
10476 else if (reduce_bit_field)
10477 return extract_bit_field (op0, TYPE_PRECISION (type), 0,
10478 TYPE_UNSIGNED (type), NULL_RTX,
10479 mode, mode);
10480 /* As a last resort, spill op0 to memory, and reload it in a
10481 different mode. */
10482 else if (!MEM_P (op0))
10483 {
10484 /* If the operand is not a MEM, force it into memory. Since we
10485 are going to be changing the mode of the MEM, don't call
10486 force_const_mem for constants because we don't allow pool
10487 constants to change mode. */
10488 tree inner_type = TREE_TYPE (treeop0);
10489
10490 gcc_assert (!TREE_ADDRESSABLE (exp));
10491
10492 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10493 target
10494 = assign_stack_temp_for_type
10495 (TYPE_MODE (inner_type),
10496 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10497
10498 emit_move_insn (target, op0);
10499 op0 = target;
10500 }
10501
10502 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10503 output type is such that the operand is known to be aligned, indicate
10504 that it is. Otherwise, we need only be concerned about alignment for
10505 non-BLKmode results. */
10506 if (MEM_P (op0))
10507 {
10508 enum insn_code icode;
10509
10510 if (TYPE_ALIGN_OK (type))
10511 {
10512 /* ??? Copying the MEM without substantially changing it might
10513 run afoul of the code handling volatile memory references in
10514 store_expr, which assumes that TARGET is returned unmodified
10515 if it has been used. */
10516 op0 = copy_rtx (op0);
10517 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10518 }
10519 else if (modifier != EXPAND_WRITE
10520 && modifier != EXPAND_MEMORY
10521 && !inner_reference_p
10522 && mode != BLKmode
10523 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10524 {
10525 /* If the target does have special handling for unaligned
10526 loads of mode then use them. */
10527 if ((icode = optab_handler (movmisalign_optab, mode))
10528 != CODE_FOR_nothing)
10529 {
10530 rtx reg, insn;
10531
10532 op0 = adjust_address (op0, mode, 0);
10533 /* We've already validated the memory, and we're creating a
10534 new pseudo destination. The predicates really can't
10535 fail. */
10536 reg = gen_reg_rtx (mode);
10537
10538 /* Nor can the insn generator. */
10539 insn = GEN_FCN (icode) (reg, op0);
10540 emit_insn (insn);
10541 return reg;
10542 }
10543 else if (STRICT_ALIGNMENT)
10544 {
10545 tree inner_type = TREE_TYPE (treeop0);
10546 HOST_WIDE_INT temp_size
10547 = MAX (int_size_in_bytes (inner_type),
10548 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10549 rtx new_rtx
10550 = assign_stack_temp_for_type (mode, temp_size, type);
10551 rtx new_with_op0_mode
10552 = adjust_address (new_rtx, GET_MODE (op0), 0);
10553
10554 gcc_assert (!TREE_ADDRESSABLE (exp));
10555
10556 if (GET_MODE (op0) == BLKmode)
10557 emit_block_move (new_with_op0_mode, op0,
10558 GEN_INT (GET_MODE_SIZE (mode)),
10559 (modifier == EXPAND_STACK_PARM
10560 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10561 else
10562 emit_move_insn (new_with_op0_mode, op0);
10563
10564 op0 = new_rtx;
10565 }
10566 }
10567
10568 op0 = adjust_address (op0, mode, 0);
10569 }
10570
10571 return op0;
10572
10573 case MODIFY_EXPR:
10574 {
10575 tree lhs = treeop0;
10576 tree rhs = treeop1;
10577 gcc_assert (ignore);
10578
10579 /* Check for |= or &= of a bitfield of size one into another bitfield
10580 of size 1. In this case, (unless we need the result of the
10581 assignment) we can do this more efficiently with a
10582 test followed by an assignment, if necessary.
10583
10584 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10585 things change so we do, this code should be enhanced to
10586 support it. */
10587 if (TREE_CODE (lhs) == COMPONENT_REF
10588 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10589 || TREE_CODE (rhs) == BIT_AND_EXPR)
10590 && TREE_OPERAND (rhs, 0) == lhs
10591 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10592 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10593 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10594 {
10595 rtx label = gen_label_rtx ();
10596 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10597 do_jump (TREE_OPERAND (rhs, 1),
10598 value ? label : 0,
10599 value ? 0 : label, -1);
10600 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10601 false);
10602 do_pending_stack_adjust ();
10603 emit_label (label);
10604 return const0_rtx;
10605 }
10606
10607 expand_assignment (lhs, rhs, false);
10608 return const0_rtx;
10609 }
10610
10611 case ADDR_EXPR:
10612 return expand_expr_addr_expr (exp, target, tmode, modifier);
10613
10614 case REALPART_EXPR:
10615 op0 = expand_normal (treeop0);
10616 return read_complex_part (op0, false);
10617
10618 case IMAGPART_EXPR:
10619 op0 = expand_normal (treeop0);
10620 return read_complex_part (op0, true);
10621
10622 case RETURN_EXPR:
10623 case LABEL_EXPR:
10624 case GOTO_EXPR:
10625 case SWITCH_EXPR:
10626 case ASM_EXPR:
10627 /* Expanded in cfgexpand.c. */
10628 gcc_unreachable ();
10629
10630 case TRY_CATCH_EXPR:
10631 case CATCH_EXPR:
10632 case EH_FILTER_EXPR:
10633 case TRY_FINALLY_EXPR:
10634 /* Lowered by tree-eh.c. */
10635 gcc_unreachable ();
10636
10637 case WITH_CLEANUP_EXPR:
10638 case CLEANUP_POINT_EXPR:
10639 case TARGET_EXPR:
10640 case CASE_LABEL_EXPR:
10641 case VA_ARG_EXPR:
10642 case BIND_EXPR:
10643 case INIT_EXPR:
10644 case CONJ_EXPR:
10645 case COMPOUND_EXPR:
10646 case PREINCREMENT_EXPR:
10647 case PREDECREMENT_EXPR:
10648 case POSTINCREMENT_EXPR:
10649 case POSTDECREMENT_EXPR:
10650 case LOOP_EXPR:
10651 case EXIT_EXPR:
10652 case COMPOUND_LITERAL_EXPR:
10653 /* Lowered by gimplify.c. */
10654 gcc_unreachable ();
10655
10656 case FDESC_EXPR:
10657 /* Function descriptors are not valid except for as
10658 initialization constants, and should not be expanded. */
10659 gcc_unreachable ();
10660
10661 case WITH_SIZE_EXPR:
10662 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10663 have pulled out the size to use in whatever context it needed. */
10664 return expand_expr_real (treeop0, original_target, tmode,
10665 modifier, alt_rtl, inner_reference_p);
10666
10667 default:
10668 return expand_expr_real_2 (&ops, target, tmode, modifier);
10669 }
10670 }
10671 \f
10672 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10673 signedness of TYPE), possibly returning the result in TARGET. */
10674 static rtx
10675 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10676 {
10677 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10678 if (target && GET_MODE (target) != GET_MODE (exp))
10679 target = 0;
10680 /* For constant values, reduce using build_int_cst_type. */
10681 if (CONST_INT_P (exp))
10682 {
10683 HOST_WIDE_INT value = INTVAL (exp);
10684 tree t = build_int_cst_type (type, value);
10685 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10686 }
10687 else if (TYPE_UNSIGNED (type))
10688 {
10689 enum machine_mode mode = GET_MODE (exp);
10690 rtx mask = immed_wide_int_const
10691 (wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
10692 return expand_and (mode, exp, mask, target);
10693 }
10694 else
10695 {
10696 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10697 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10698 exp, count, target, 0);
10699 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10700 exp, count, target, 0);
10701 }
10702 }
10703 \f
10704 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10705 when applied to the address of EXP produces an address known to be
10706 aligned more than BIGGEST_ALIGNMENT. */
10707
10708 static int
10709 is_aligning_offset (const_tree offset, const_tree exp)
10710 {
10711 /* Strip off any conversions. */
10712 while (CONVERT_EXPR_P (offset))
10713 offset = TREE_OPERAND (offset, 0);
10714
10715 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10716 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10717 if (TREE_CODE (offset) != BIT_AND_EXPR
10718 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
10719 || compare_tree_int (TREE_OPERAND (offset, 1),
10720 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10721 || !exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
10722 return 0;
10723
10724 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10725 It must be NEGATE_EXPR. Then strip any more conversions. */
10726 offset = TREE_OPERAND (offset, 0);
10727 while (CONVERT_EXPR_P (offset))
10728 offset = TREE_OPERAND (offset, 0);
10729
10730 if (TREE_CODE (offset) != NEGATE_EXPR)
10731 return 0;
10732
10733 offset = TREE_OPERAND (offset, 0);
10734 while (CONVERT_EXPR_P (offset))
10735 offset = TREE_OPERAND (offset, 0);
10736
10737 /* This must now be the address of EXP. */
10738 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10739 }
10740 \f
10741 /* Return the tree node if an ARG corresponds to a string constant or zero
10742 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10743 in bytes within the string that ARG is accessing. The type of the
10744 offset will be `sizetype'. */
10745
10746 tree
10747 string_constant (tree arg, tree *ptr_offset)
10748 {
10749 tree array, offset, lower_bound;
10750 STRIP_NOPS (arg);
10751
10752 if (TREE_CODE (arg) == ADDR_EXPR)
10753 {
10754 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10755 {
10756 *ptr_offset = size_zero_node;
10757 return TREE_OPERAND (arg, 0);
10758 }
10759 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10760 {
10761 array = TREE_OPERAND (arg, 0);
10762 offset = size_zero_node;
10763 }
10764 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10765 {
10766 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10767 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10768 if (TREE_CODE (array) != STRING_CST
10769 && TREE_CODE (array) != VAR_DECL)
10770 return 0;
10771
10772 /* Check if the array has a nonzero lower bound. */
10773 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10774 if (!integer_zerop (lower_bound))
10775 {
10776 /* If the offset and base aren't both constants, return 0. */
10777 if (TREE_CODE (lower_bound) != INTEGER_CST)
10778 return 0;
10779 if (TREE_CODE (offset) != INTEGER_CST)
10780 return 0;
10781 /* Adjust offset by the lower bound. */
10782 offset = size_diffop (fold_convert (sizetype, offset),
10783 fold_convert (sizetype, lower_bound));
10784 }
10785 }
10786 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10787 {
10788 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10789 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10790 if (TREE_CODE (array) != ADDR_EXPR)
10791 return 0;
10792 array = TREE_OPERAND (array, 0);
10793 if (TREE_CODE (array) != STRING_CST
10794 && TREE_CODE (array) != VAR_DECL)
10795 return 0;
10796 }
10797 else
10798 return 0;
10799 }
10800 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10801 {
10802 tree arg0 = TREE_OPERAND (arg, 0);
10803 tree arg1 = TREE_OPERAND (arg, 1);
10804
10805 STRIP_NOPS (arg0);
10806 STRIP_NOPS (arg1);
10807
10808 if (TREE_CODE (arg0) == ADDR_EXPR
10809 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10810 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10811 {
10812 array = TREE_OPERAND (arg0, 0);
10813 offset = arg1;
10814 }
10815 else if (TREE_CODE (arg1) == ADDR_EXPR
10816 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10817 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10818 {
10819 array = TREE_OPERAND (arg1, 0);
10820 offset = arg0;
10821 }
10822 else
10823 return 0;
10824 }
10825 else
10826 return 0;
10827
10828 if (TREE_CODE (array) == STRING_CST)
10829 {
10830 *ptr_offset = fold_convert (sizetype, offset);
10831 return array;
10832 }
10833 else if (TREE_CODE (array) == VAR_DECL
10834 || TREE_CODE (array) == CONST_DECL)
10835 {
10836 int length;
10837 tree init = ctor_for_folding (array);
10838
10839 /* Variables initialized to string literals can be handled too. */
10840 if (init == error_mark_node
10841 || !init
10842 || TREE_CODE (init) != STRING_CST)
10843 return 0;
10844
10845 /* Avoid const char foo[4] = "abcde"; */
10846 if (DECL_SIZE_UNIT (array) == NULL_TREE
10847 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10848 || (length = TREE_STRING_LENGTH (init)) <= 0
10849 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10850 return 0;
10851
10852 /* If variable is bigger than the string literal, OFFSET must be constant
10853 and inside of the bounds of the string literal. */
10854 offset = fold_convert (sizetype, offset);
10855 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10856 && (! tree_fits_uhwi_p (offset)
10857 || compare_tree_int (offset, length) >= 0))
10858 return 0;
10859
10860 *ptr_offset = offset;
10861 return init;
10862 }
10863
10864 return 0;
10865 }
10866 \f
10867 /* Generate code to calculate OPS, and exploded expression
10868 using a store-flag instruction and return an rtx for the result.
10869 OPS reflects a comparison.
10870
10871 If TARGET is nonzero, store the result there if convenient.
10872
10873 Return zero if there is no suitable set-flag instruction
10874 available on this machine.
10875
10876 Once expand_expr has been called on the arguments of the comparison,
10877 we are committed to doing the store flag, since it is not safe to
10878 re-evaluate the expression. We emit the store-flag insn by calling
10879 emit_store_flag, but only expand the arguments if we have a reason
10880 to believe that emit_store_flag will be successful. If we think that
10881 it will, but it isn't, we have to simulate the store-flag with a
10882 set/jump/set sequence. */
10883
10884 static rtx
10885 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
10886 {
10887 enum rtx_code code;
10888 tree arg0, arg1, type;
10889 tree tem;
10890 enum machine_mode operand_mode;
10891 int unsignedp;
10892 rtx op0, op1;
10893 rtx subtarget = target;
10894 location_t loc = ops->location;
10895
10896 arg0 = ops->op0;
10897 arg1 = ops->op1;
10898
10899 /* Don't crash if the comparison was erroneous. */
10900 if (arg0 == error_mark_node || arg1 == error_mark_node)
10901 return const0_rtx;
10902
10903 type = TREE_TYPE (arg0);
10904 operand_mode = TYPE_MODE (type);
10905 unsignedp = TYPE_UNSIGNED (type);
10906
10907 /* We won't bother with BLKmode store-flag operations because it would mean
10908 passing a lot of information to emit_store_flag. */
10909 if (operand_mode == BLKmode)
10910 return 0;
10911
10912 /* We won't bother with store-flag operations involving function pointers
10913 when function pointers must be canonicalized before comparisons. */
10914 #ifdef HAVE_canonicalize_funcptr_for_compare
10915 if (HAVE_canonicalize_funcptr_for_compare
10916 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
10917 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
10918 == FUNCTION_TYPE))
10919 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
10920 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
10921 == FUNCTION_TYPE))))
10922 return 0;
10923 #endif
10924
10925 STRIP_NOPS (arg0);
10926 STRIP_NOPS (arg1);
10927
10928 /* For vector typed comparisons emit code to generate the desired
10929 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10930 expander for this. */
10931 if (TREE_CODE (ops->type) == VECTOR_TYPE)
10932 {
10933 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10934 tree if_true = constant_boolean_node (true, ops->type);
10935 tree if_false = constant_boolean_node (false, ops->type);
10936 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10937 }
10938
10939 /* Get the rtx comparison code to use. We know that EXP is a comparison
10940 operation of some type. Some comparisons against 1 and -1 can be
10941 converted to comparisons with zero. Do so here so that the tests
10942 below will be aware that we have a comparison with zero. These
10943 tests will not catch constants in the first operand, but constants
10944 are rarely passed as the first operand. */
10945
10946 switch (ops->code)
10947 {
10948 case EQ_EXPR:
10949 code = EQ;
10950 break;
10951 case NE_EXPR:
10952 code = NE;
10953 break;
10954 case LT_EXPR:
10955 if (integer_onep (arg1))
10956 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10957 else
10958 code = unsignedp ? LTU : LT;
10959 break;
10960 case LE_EXPR:
10961 if (! unsignedp && integer_all_onesp (arg1))
10962 arg1 = integer_zero_node, code = LT;
10963 else
10964 code = unsignedp ? LEU : LE;
10965 break;
10966 case GT_EXPR:
10967 if (! unsignedp && integer_all_onesp (arg1))
10968 arg1 = integer_zero_node, code = GE;
10969 else
10970 code = unsignedp ? GTU : GT;
10971 break;
10972 case GE_EXPR:
10973 if (integer_onep (arg1))
10974 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10975 else
10976 code = unsignedp ? GEU : GE;
10977 break;
10978
10979 case UNORDERED_EXPR:
10980 code = UNORDERED;
10981 break;
10982 case ORDERED_EXPR:
10983 code = ORDERED;
10984 break;
10985 case UNLT_EXPR:
10986 code = UNLT;
10987 break;
10988 case UNLE_EXPR:
10989 code = UNLE;
10990 break;
10991 case UNGT_EXPR:
10992 code = UNGT;
10993 break;
10994 case UNGE_EXPR:
10995 code = UNGE;
10996 break;
10997 case UNEQ_EXPR:
10998 code = UNEQ;
10999 break;
11000 case LTGT_EXPR:
11001 code = LTGT;
11002 break;
11003
11004 default:
11005 gcc_unreachable ();
11006 }
11007
11008 /* Put a constant second. */
11009 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
11010 || TREE_CODE (arg0) == FIXED_CST)
11011 {
11012 tem = arg0; arg0 = arg1; arg1 = tem;
11013 code = swap_condition (code);
11014 }
11015
11016 /* If this is an equality or inequality test of a single bit, we can
11017 do this by shifting the bit being tested to the low-order bit and
11018 masking the result with the constant 1. If the condition was EQ,
11019 we xor it with 1. This does not require an scc insn and is faster
11020 than an scc insn even if we have it.
11021
11022 The code to make this transformation was moved into fold_single_bit_test,
11023 so we just call into the folder and expand its result. */
11024
11025 if ((code == NE || code == EQ)
11026 && integer_zerop (arg1)
11027 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
11028 {
11029 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
11030 if (srcstmt
11031 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
11032 {
11033 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
11034 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
11035 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
11036 gimple_assign_rhs1 (srcstmt),
11037 gimple_assign_rhs2 (srcstmt));
11038 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
11039 if (temp)
11040 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
11041 }
11042 }
11043
11044 if (! get_subtarget (target)
11045 || GET_MODE (subtarget) != operand_mode)
11046 subtarget = 0;
11047
11048 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
11049
11050 if (target == 0)
11051 target = gen_reg_rtx (mode);
11052
11053 /* Try a cstore if possible. */
11054 return emit_store_flag_force (target, code, op0, op1,
11055 operand_mode, unsignedp,
11056 (TYPE_PRECISION (ops->type) == 1
11057 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11058 }
11059 \f
11060
11061 /* Stubs in case we haven't got a casesi insn. */
11062 #ifndef HAVE_casesi
11063 # define HAVE_casesi 0
11064 # define gen_casesi(a, b, c, d, e) (0)
11065 # define CODE_FOR_casesi CODE_FOR_nothing
11066 #endif
11067
11068 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11069 0 otherwise (i.e. if there is no casesi instruction).
11070
11071 DEFAULT_PROBABILITY is the probability of jumping to the default
11072 label. */
11073 int
11074 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11075 rtx table_label, rtx default_label, rtx fallback_label,
11076 int default_probability)
11077 {
11078 struct expand_operand ops[5];
11079 enum machine_mode index_mode = SImode;
11080 rtx op1, op2, index;
11081
11082 if (! HAVE_casesi)
11083 return 0;
11084
11085 /* Convert the index to SImode. */
11086 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11087 {
11088 enum machine_mode omode = TYPE_MODE (index_type);
11089 rtx rangertx = expand_normal (range);
11090
11091 /* We must handle the endpoints in the original mode. */
11092 index_expr = build2 (MINUS_EXPR, index_type,
11093 index_expr, minval);
11094 minval = integer_zero_node;
11095 index = expand_normal (index_expr);
11096 if (default_label)
11097 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11098 omode, 1, default_label,
11099 default_probability);
11100 /* Now we can safely truncate. */
11101 index = convert_to_mode (index_mode, index, 0);
11102 }
11103 else
11104 {
11105 if (TYPE_MODE (index_type) != index_mode)
11106 {
11107 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11108 index_expr = fold_convert (index_type, index_expr);
11109 }
11110
11111 index = expand_normal (index_expr);
11112 }
11113
11114 do_pending_stack_adjust ();
11115
11116 op1 = expand_normal (minval);
11117 op2 = expand_normal (range);
11118
11119 create_input_operand (&ops[0], index, index_mode);
11120 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11121 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11122 create_fixed_operand (&ops[3], table_label);
11123 create_fixed_operand (&ops[4], (default_label
11124 ? default_label
11125 : fallback_label));
11126 expand_jump_insn (CODE_FOR_casesi, 5, ops);
11127 return 1;
11128 }
11129
11130 /* Attempt to generate a tablejump instruction; same concept. */
11131 #ifndef HAVE_tablejump
11132 #define HAVE_tablejump 0
11133 #define gen_tablejump(x, y) (0)
11134 #endif
11135
11136 /* Subroutine of the next function.
11137
11138 INDEX is the value being switched on, with the lowest value
11139 in the table already subtracted.
11140 MODE is its expected mode (needed if INDEX is constant).
11141 RANGE is the length of the jump table.
11142 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11143
11144 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11145 index value is out of range.
11146 DEFAULT_PROBABILITY is the probability of jumping to
11147 the default label. */
11148
11149 static void
11150 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
11151 rtx default_label, int default_probability)
11152 {
11153 rtx temp, vector;
11154
11155 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11156 cfun->cfg->max_jumptable_ents = INTVAL (range);
11157
11158 /* Do an unsigned comparison (in the proper mode) between the index
11159 expression and the value which represents the length of the range.
11160 Since we just finished subtracting the lower bound of the range
11161 from the index expression, this comparison allows us to simultaneously
11162 check that the original index expression value is both greater than
11163 or equal to the minimum value of the range and less than or equal to
11164 the maximum value of the range. */
11165
11166 if (default_label)
11167 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11168 default_label, default_probability);
11169
11170
11171 /* If index is in range, it must fit in Pmode.
11172 Convert to Pmode so we can index with it. */
11173 if (mode != Pmode)
11174 index = convert_to_mode (Pmode, index, 1);
11175
11176 /* Don't let a MEM slip through, because then INDEX that comes
11177 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11178 and break_out_memory_refs will go to work on it and mess it up. */
11179 #ifdef PIC_CASE_VECTOR_ADDRESS
11180 if (flag_pic && !REG_P (index))
11181 index = copy_to_mode_reg (Pmode, index);
11182 #endif
11183
11184 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11185 GET_MODE_SIZE, because this indicates how large insns are. The other
11186 uses should all be Pmode, because they are addresses. This code
11187 could fail if addresses and insns are not the same size. */
11188 index = simplify_gen_binary (MULT, Pmode, index,
11189 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11190 Pmode));
11191 index = simplify_gen_binary (PLUS, Pmode, index,
11192 gen_rtx_LABEL_REF (Pmode, table_label));
11193
11194 #ifdef PIC_CASE_VECTOR_ADDRESS
11195 if (flag_pic)
11196 index = PIC_CASE_VECTOR_ADDRESS (index);
11197 else
11198 #endif
11199 index = memory_address (CASE_VECTOR_MODE, index);
11200 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11201 vector = gen_const_mem (CASE_VECTOR_MODE, index);
11202 convert_move (temp, vector, 0);
11203
11204 emit_jump_insn (gen_tablejump (temp, table_label));
11205
11206 /* If we are generating PIC code or if the table is PC-relative, the
11207 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11208 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11209 emit_barrier ();
11210 }
11211
11212 int
11213 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11214 rtx table_label, rtx default_label, int default_probability)
11215 {
11216 rtx index;
11217
11218 if (! HAVE_tablejump)
11219 return 0;
11220
11221 index_expr = fold_build2 (MINUS_EXPR, index_type,
11222 fold_convert (index_type, index_expr),
11223 fold_convert (index_type, minval));
11224 index = expand_normal (index_expr);
11225 do_pending_stack_adjust ();
11226
11227 do_tablejump (index, TYPE_MODE (index_type),
11228 convert_modes (TYPE_MODE (index_type),
11229 TYPE_MODE (TREE_TYPE (range)),
11230 expand_normal (range),
11231 TYPE_UNSIGNED (TREE_TYPE (range))),
11232 table_label, default_label, default_probability);
11233 return 1;
11234 }
11235
11236 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11237 static rtx
11238 const_vector_from_tree (tree exp)
11239 {
11240 rtvec v;
11241 unsigned i;
11242 int units;
11243 tree elt;
11244 enum machine_mode inner, mode;
11245
11246 mode = TYPE_MODE (TREE_TYPE (exp));
11247
11248 if (initializer_zerop (exp))
11249 return CONST0_RTX (mode);
11250
11251 units = GET_MODE_NUNITS (mode);
11252 inner = GET_MODE_INNER (mode);
11253
11254 v = rtvec_alloc (units);
11255
11256 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11257 {
11258 elt = VECTOR_CST_ELT (exp, i);
11259
11260 if (TREE_CODE (elt) == REAL_CST)
11261 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11262 inner);
11263 else if (TREE_CODE (elt) == FIXED_CST)
11264 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11265 inner);
11266 else
11267 RTVEC_ELT (v, i) = immed_wide_int_const (elt, inner);
11268 }
11269
11270 return gen_rtx_CONST_VECTOR (mode, v);
11271 }
11272
11273 /* Build a decl for a personality function given a language prefix. */
11274
11275 tree
11276 build_personality_function (const char *lang)
11277 {
11278 const char *unwind_and_version;
11279 tree decl, type;
11280 char *name;
11281
11282 switch (targetm_common.except_unwind_info (&global_options))
11283 {
11284 case UI_NONE:
11285 return NULL;
11286 case UI_SJLJ:
11287 unwind_and_version = "_sj0";
11288 break;
11289 case UI_DWARF2:
11290 case UI_TARGET:
11291 unwind_and_version = "_v0";
11292 break;
11293 case UI_SEH:
11294 unwind_and_version = "_seh0";
11295 break;
11296 default:
11297 gcc_unreachable ();
11298 }
11299
11300 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11301
11302 type = build_function_type_list (integer_type_node, integer_type_node,
11303 long_long_unsigned_type_node,
11304 ptr_type_node, ptr_type_node, NULL_TREE);
11305 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11306 get_identifier (name), type);
11307 DECL_ARTIFICIAL (decl) = 1;
11308 DECL_EXTERNAL (decl) = 1;
11309 TREE_PUBLIC (decl) = 1;
11310
11311 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11312 are the flags assigned by targetm.encode_section_info. */
11313 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11314
11315 return decl;
11316 }
11317
11318 /* Extracts the personality function of DECL and returns the corresponding
11319 libfunc. */
11320
11321 rtx
11322 get_personality_function (tree decl)
11323 {
11324 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11325 enum eh_personality_kind pk;
11326
11327 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11328 if (pk == eh_personality_none)
11329 return NULL;
11330
11331 if (!personality
11332 && pk == eh_personality_any)
11333 personality = lang_hooks.eh_personality ();
11334
11335 if (pk == eh_personality_lang)
11336 gcc_assert (personality != NULL_TREE);
11337
11338 return XEXP (DECL_RTL (personality), 0);
11339 }
11340
11341 #include "gt-expr.h"