gimple.h: Remove all includes.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "attribs.h"
30 #include "varasm.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
39 #include "expr.h"
40 #include "optabs.h"
41 #include "libfuncs.h"
42 #include "recog.h"
43 #include "reload.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "langhooks.h"
47 #include "intl.h"
48 #include "tm_p.h"
49 #include "tree-iterator.h"
50 #include "basic-block.h"
51 #include "tree-ssa-alias.h"
52 #include "internal-fn.h"
53 #include "gimple-expr.h"
54 #include "is-a.h"
55 #include "gimple.h"
56 #include "gimple-ssa.h"
57 #include "cgraph.h"
58 #include "tree-ssanames.h"
59 #include "target.h"
60 #include "common/common-target.h"
61 #include "timevar.h"
62 #include "df.h"
63 #include "diagnostic.h"
64 #include "tree-ssa-live.h"
65 #include "tree-outof-ssa.h"
66 #include "target-globals.h"
67 #include "params.h"
68 #include "tree-ssa-address.h"
69 #include "cfgexpand.h"
70
71 /* Decide whether a function's arguments should be processed
72 from first to last or from last to first.
73
74 They should if the stack and args grow in opposite directions, but
75 only if we have push insns. */
76
77 #ifdef PUSH_ROUNDING
78
79 #ifndef PUSH_ARGS_REVERSED
80 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
81 #define PUSH_ARGS_REVERSED /* If it's last to first. */
82 #endif
83 #endif
84
85 #endif
86
87 #ifndef STACK_PUSH_CODE
88 #ifdef STACK_GROWS_DOWNWARD
89 #define STACK_PUSH_CODE PRE_DEC
90 #else
91 #define STACK_PUSH_CODE PRE_INC
92 #endif
93 #endif
94
95
96 /* If this is nonzero, we do not bother generating VOLATILE
97 around volatile memory references, and we are willing to
98 output indirect addresses. If cse is to follow, we reject
99 indirect addresses so a useful potential cse is generated;
100 if it is used only once, instruction combination will produce
101 the same indirect address eventually. */
102 int cse_not_expected;
103
104 /* This structure is used by move_by_pieces to describe the move to
105 be performed. */
106 struct move_by_pieces_d
107 {
108 rtx to;
109 rtx to_addr;
110 int autinc_to;
111 int explicit_inc_to;
112 rtx from;
113 rtx from_addr;
114 int autinc_from;
115 int explicit_inc_from;
116 unsigned HOST_WIDE_INT len;
117 HOST_WIDE_INT offset;
118 int reverse;
119 };
120
121 /* This structure is used by store_by_pieces to describe the clear to
122 be performed. */
123
124 struct store_by_pieces_d
125 {
126 rtx to;
127 rtx to_addr;
128 int autinc_to;
129 int explicit_inc_to;
130 unsigned HOST_WIDE_INT len;
131 HOST_WIDE_INT offset;
132 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
133 void *constfundata;
134 int reverse;
135 };
136
137 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
138 struct move_by_pieces_d *);
139 static bool block_move_libcall_safe_for_call_parm (void);
140 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
141 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
142 unsigned HOST_WIDE_INT);
143 static tree emit_block_move_libcall_fn (int);
144 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
145 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
146 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
147 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
148 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
149 struct store_by_pieces_d *);
150 static tree clear_storage_libcall_fn (int);
151 static rtx compress_float_constant (rtx, rtx);
152 static rtx get_subtarget (rtx);
153 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
154 HOST_WIDE_INT, enum machine_mode,
155 tree, int, alias_set_type);
156 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
157 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
158 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
159 enum machine_mode, tree, alias_set_type, bool);
160
161 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
162
163 static int is_aligning_offset (const_tree, const_tree);
164 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
165 enum expand_modifier);
166 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
167 static rtx do_store_flag (sepops, rtx, enum machine_mode);
168 #ifdef PUSH_ROUNDING
169 static void emit_single_push_insn (enum machine_mode, rtx, tree);
170 #endif
171 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx, int);
172 static rtx const_vector_from_tree (tree);
173 static void write_complex_part (rtx, rtx, bool);
174
175 /* This macro is used to determine whether move_by_pieces should be called
176 to perform a structure copy. */
177 #ifndef MOVE_BY_PIECES_P
178 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
179 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
180 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
181 #endif
182
183 /* This macro is used to determine whether clear_by_pieces should be
184 called to clear storage. */
185 #ifndef CLEAR_BY_PIECES_P
186 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
187 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
188 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
189 #endif
190
191 /* This macro is used to determine whether store_by_pieces should be
192 called to "memset" storage with byte values other than zero. */
193 #ifndef SET_BY_PIECES_P
194 #define SET_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
197 #endif
198
199 /* This macro is used to determine whether store_by_pieces should be
200 called to "memcpy" storage when the source is a constant string. */
201 #ifndef STORE_BY_PIECES_P
202 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
203 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
204 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
205 #endif
206 \f
207 /* This is run to set up which modes can be used
208 directly in memory and to initialize the block move optab. It is run
209 at the beginning of compilation and when the target is reinitialized. */
210
211 void
212 init_expr_target (void)
213 {
214 rtx insn, pat;
215 enum machine_mode mode;
216 int num_clobbers;
217 rtx mem, mem1;
218 rtx reg;
219
220 /* Try indexing by frame ptr and try by stack ptr.
221 It is known that on the Convex the stack ptr isn't a valid index.
222 With luck, one or the other is valid on any machine. */
223 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
224 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
225
226 /* A scratch register we can modify in-place below to avoid
227 useless RTL allocations. */
228 reg = gen_rtx_REG (VOIDmode, -1);
229
230 insn = rtx_alloc (INSN);
231 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
232 PATTERN (insn) = pat;
233
234 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
235 mode = (enum machine_mode) ((int) mode + 1))
236 {
237 int regno;
238
239 direct_load[(int) mode] = direct_store[(int) mode] = 0;
240 PUT_MODE (mem, mode);
241 PUT_MODE (mem1, mode);
242 PUT_MODE (reg, mode);
243
244 /* See if there is some register that can be used in this mode and
245 directly loaded or stored from memory. */
246
247 if (mode != VOIDmode && mode != BLKmode)
248 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
249 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
250 regno++)
251 {
252 if (! HARD_REGNO_MODE_OK (regno, mode))
253 continue;
254
255 SET_REGNO (reg, regno);
256
257 SET_SRC (pat) = mem;
258 SET_DEST (pat) = reg;
259 if (recog (pat, insn, &num_clobbers) >= 0)
260 direct_load[(int) mode] = 1;
261
262 SET_SRC (pat) = mem1;
263 SET_DEST (pat) = reg;
264 if (recog (pat, insn, &num_clobbers) >= 0)
265 direct_load[(int) mode] = 1;
266
267 SET_SRC (pat) = reg;
268 SET_DEST (pat) = mem;
269 if (recog (pat, insn, &num_clobbers) >= 0)
270 direct_store[(int) mode] = 1;
271
272 SET_SRC (pat) = reg;
273 SET_DEST (pat) = mem1;
274 if (recog (pat, insn, &num_clobbers) >= 0)
275 direct_store[(int) mode] = 1;
276 }
277 }
278
279 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
280
281 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
282 mode = GET_MODE_WIDER_MODE (mode))
283 {
284 enum machine_mode srcmode;
285 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
286 srcmode = GET_MODE_WIDER_MODE (srcmode))
287 {
288 enum insn_code ic;
289
290 ic = can_extend_p (mode, srcmode, 0);
291 if (ic == CODE_FOR_nothing)
292 continue;
293
294 PUT_MODE (mem, srcmode);
295
296 if (insn_operand_matches (ic, 1, mem))
297 float_extend_from_mem[mode][srcmode] = true;
298 }
299 }
300 }
301
302 /* This is run at the start of compiling a function. */
303
304 void
305 init_expr (void)
306 {
307 memset (&crtl->expr, 0, sizeof (crtl->expr));
308 }
309 \f
310 /* Copy data from FROM to TO, where the machine modes are not the same.
311 Both modes may be integer, or both may be floating, or both may be
312 fixed-point.
313 UNSIGNEDP should be nonzero if FROM is an unsigned type.
314 This causes zero-extension instead of sign-extension. */
315
316 void
317 convert_move (rtx to, rtx from, int unsignedp)
318 {
319 enum machine_mode to_mode = GET_MODE (to);
320 enum machine_mode from_mode = GET_MODE (from);
321 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
322 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
323 enum insn_code code;
324 rtx libcall;
325
326 /* rtx code for making an equivalent value. */
327 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
328 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
329
330
331 gcc_assert (to_real == from_real);
332 gcc_assert (to_mode != BLKmode);
333 gcc_assert (from_mode != BLKmode);
334
335 /* If the source and destination are already the same, then there's
336 nothing to do. */
337 if (to == from)
338 return;
339
340 /* If FROM is a SUBREG that indicates that we have already done at least
341 the required extension, strip it. We don't handle such SUBREGs as
342 TO here. */
343
344 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
345 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
346 >= GET_MODE_PRECISION (to_mode))
347 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
348 from = gen_lowpart (to_mode, from), from_mode = to_mode;
349
350 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
351
352 if (to_mode == from_mode
353 || (from_mode == VOIDmode && CONSTANT_P (from)))
354 {
355 emit_move_insn (to, from);
356 return;
357 }
358
359 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
360 {
361 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
362
363 if (VECTOR_MODE_P (to_mode))
364 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
365 else
366 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
367
368 emit_move_insn (to, from);
369 return;
370 }
371
372 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
373 {
374 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
375 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
376 return;
377 }
378
379 if (to_real)
380 {
381 rtx value, insns;
382 convert_optab tab;
383
384 gcc_assert ((GET_MODE_PRECISION (from_mode)
385 != GET_MODE_PRECISION (to_mode))
386 || (DECIMAL_FLOAT_MODE_P (from_mode)
387 != DECIMAL_FLOAT_MODE_P (to_mode)));
388
389 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
390 /* Conversion between decimal float and binary float, same size. */
391 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
392 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
393 tab = sext_optab;
394 else
395 tab = trunc_optab;
396
397 /* Try converting directly if the insn is supported. */
398
399 code = convert_optab_handler (tab, to_mode, from_mode);
400 if (code != CODE_FOR_nothing)
401 {
402 emit_unop_insn (code, to, from,
403 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
404 return;
405 }
406
407 /* Otherwise use a libcall. */
408 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
409
410 /* Is this conversion implemented yet? */
411 gcc_assert (libcall);
412
413 start_sequence ();
414 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
415 1, from, from_mode);
416 insns = get_insns ();
417 end_sequence ();
418 emit_libcall_block (insns, to, value,
419 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
420 from)
421 : gen_rtx_FLOAT_EXTEND (to_mode, from));
422 return;
423 }
424
425 /* Handle pointer conversion. */ /* SPEE 900220. */
426 /* Targets are expected to provide conversion insns between PxImode and
427 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
428 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
429 {
430 enum machine_mode full_mode
431 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
432
433 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
434 != CODE_FOR_nothing);
435
436 if (full_mode != from_mode)
437 from = convert_to_mode (full_mode, from, unsignedp);
438 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
439 to, from, UNKNOWN);
440 return;
441 }
442 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
443 {
444 rtx new_from;
445 enum machine_mode full_mode
446 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
447 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
448 enum insn_code icode;
449
450 icode = convert_optab_handler (ctab, full_mode, from_mode);
451 gcc_assert (icode != CODE_FOR_nothing);
452
453 if (to_mode == full_mode)
454 {
455 emit_unop_insn (icode, to, from, UNKNOWN);
456 return;
457 }
458
459 new_from = gen_reg_rtx (full_mode);
460 emit_unop_insn (icode, new_from, from, UNKNOWN);
461
462 /* else proceed to integer conversions below. */
463 from_mode = full_mode;
464 from = new_from;
465 }
466
467 /* Make sure both are fixed-point modes or both are not. */
468 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
469 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
470 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
471 {
472 /* If we widen from_mode to to_mode and they are in the same class,
473 we won't saturate the result.
474 Otherwise, always saturate the result to play safe. */
475 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
476 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
477 expand_fixed_convert (to, from, 0, 0);
478 else
479 expand_fixed_convert (to, from, 0, 1);
480 return;
481 }
482
483 /* Now both modes are integers. */
484
485 /* Handle expanding beyond a word. */
486 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
487 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
488 {
489 rtx insns;
490 rtx lowpart;
491 rtx fill_value;
492 rtx lowfrom;
493 int i;
494 enum machine_mode lowpart_mode;
495 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
496
497 /* Try converting directly if the insn is supported. */
498 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
499 != CODE_FOR_nothing)
500 {
501 /* If FROM is a SUBREG, put it into a register. Do this
502 so that we always generate the same set of insns for
503 better cse'ing; if an intermediate assignment occurred,
504 we won't be doing the operation directly on the SUBREG. */
505 if (optimize > 0 && GET_CODE (from) == SUBREG)
506 from = force_reg (from_mode, from);
507 emit_unop_insn (code, to, from, equiv_code);
508 return;
509 }
510 /* Next, try converting via full word. */
511 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
512 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
513 != CODE_FOR_nothing))
514 {
515 rtx word_to = gen_reg_rtx (word_mode);
516 if (REG_P (to))
517 {
518 if (reg_overlap_mentioned_p (to, from))
519 from = force_reg (from_mode, from);
520 emit_clobber (to);
521 }
522 convert_move (word_to, from, unsignedp);
523 emit_unop_insn (code, to, word_to, equiv_code);
524 return;
525 }
526
527 /* No special multiword conversion insn; do it by hand. */
528 start_sequence ();
529
530 /* Since we will turn this into a no conflict block, we must ensure the
531 the source does not overlap the target so force it into an isolated
532 register when maybe so. Likewise for any MEM input, since the
533 conversion sequence might require several references to it and we
534 must ensure we're getting the same value every time. */
535
536 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
537 from = force_reg (from_mode, from);
538
539 /* Get a copy of FROM widened to a word, if necessary. */
540 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
541 lowpart_mode = word_mode;
542 else
543 lowpart_mode = from_mode;
544
545 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
546
547 lowpart = gen_lowpart (lowpart_mode, to);
548 emit_move_insn (lowpart, lowfrom);
549
550 /* Compute the value to put in each remaining word. */
551 if (unsignedp)
552 fill_value = const0_rtx;
553 else
554 fill_value = emit_store_flag (gen_reg_rtx (word_mode),
555 LT, lowfrom, const0_rtx,
556 VOIDmode, 0, -1);
557
558 /* Fill the remaining words. */
559 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
560 {
561 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
562 rtx subword = operand_subword (to, index, 1, to_mode);
563
564 gcc_assert (subword);
565
566 if (fill_value != subword)
567 emit_move_insn (subword, fill_value);
568 }
569
570 insns = get_insns ();
571 end_sequence ();
572
573 emit_insn (insns);
574 return;
575 }
576
577 /* Truncating multi-word to a word or less. */
578 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
579 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
580 {
581 if (!((MEM_P (from)
582 && ! MEM_VOLATILE_P (from)
583 && direct_load[(int) to_mode]
584 && ! mode_dependent_address_p (XEXP (from, 0),
585 MEM_ADDR_SPACE (from)))
586 || REG_P (from)
587 || GET_CODE (from) == SUBREG))
588 from = force_reg (from_mode, from);
589 convert_move (to, gen_lowpart (word_mode, from), 0);
590 return;
591 }
592
593 /* Now follow all the conversions between integers
594 no more than a word long. */
595
596 /* For truncation, usually we can just refer to FROM in a narrower mode. */
597 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
598 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
599 {
600 if (!((MEM_P (from)
601 && ! MEM_VOLATILE_P (from)
602 && direct_load[(int) to_mode]
603 && ! mode_dependent_address_p (XEXP (from, 0),
604 MEM_ADDR_SPACE (from)))
605 || REG_P (from)
606 || GET_CODE (from) == SUBREG))
607 from = force_reg (from_mode, from);
608 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
609 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
610 from = copy_to_reg (from);
611 emit_move_insn (to, gen_lowpart (to_mode, from));
612 return;
613 }
614
615 /* Handle extension. */
616 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
617 {
618 /* Convert directly if that works. */
619 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
620 != CODE_FOR_nothing)
621 {
622 emit_unop_insn (code, to, from, equiv_code);
623 return;
624 }
625 else
626 {
627 enum machine_mode intermediate;
628 rtx tmp;
629 int shift_amount;
630
631 /* Search for a mode to convert via. */
632 for (intermediate = from_mode; intermediate != VOIDmode;
633 intermediate = GET_MODE_WIDER_MODE (intermediate))
634 if (((can_extend_p (to_mode, intermediate, unsignedp)
635 != CODE_FOR_nothing)
636 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
637 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
638 && (can_extend_p (intermediate, from_mode, unsignedp)
639 != CODE_FOR_nothing))
640 {
641 convert_move (to, convert_to_mode (intermediate, from,
642 unsignedp), unsignedp);
643 return;
644 }
645
646 /* No suitable intermediate mode.
647 Generate what we need with shifts. */
648 shift_amount = (GET_MODE_PRECISION (to_mode)
649 - GET_MODE_PRECISION (from_mode));
650 from = gen_lowpart (to_mode, force_reg (from_mode, from));
651 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
652 to, unsignedp);
653 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
654 to, unsignedp);
655 if (tmp != to)
656 emit_move_insn (to, tmp);
657 return;
658 }
659 }
660
661 /* Support special truncate insns for certain modes. */
662 if (convert_optab_handler (trunc_optab, to_mode,
663 from_mode) != CODE_FOR_nothing)
664 {
665 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
666 to, from, UNKNOWN);
667 return;
668 }
669
670 /* Handle truncation of volatile memrefs, and so on;
671 the things that couldn't be truncated directly,
672 and for which there was no special instruction.
673
674 ??? Code above formerly short-circuited this, for most integer
675 mode pairs, with a force_reg in from_mode followed by a recursive
676 call to this routine. Appears always to have been wrong. */
677 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
678 {
679 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
680 emit_move_insn (to, temp);
681 return;
682 }
683
684 /* Mode combination is not recognized. */
685 gcc_unreachable ();
686 }
687
688 /* Return an rtx for a value that would result
689 from converting X to mode MODE.
690 Both X and MODE may be floating, or both integer.
691 UNSIGNEDP is nonzero if X is an unsigned value.
692 This can be done by referring to a part of X in place
693 or by copying to a new temporary with conversion. */
694
695 rtx
696 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
697 {
698 return convert_modes (mode, VOIDmode, x, unsignedp);
699 }
700
701 /* Return an rtx for a value that would result
702 from converting X from mode OLDMODE to mode MODE.
703 Both modes may be floating, or both integer.
704 UNSIGNEDP is nonzero if X is an unsigned value.
705
706 This can be done by referring to a part of X in place
707 or by copying to a new temporary with conversion.
708
709 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
710
711 rtx
712 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
713 {
714 rtx temp;
715
716 /* If FROM is a SUBREG that indicates that we have already done at least
717 the required extension, strip it. */
718
719 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
720 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
721 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
722 x = gen_lowpart (mode, x);
723
724 if (GET_MODE (x) != VOIDmode)
725 oldmode = GET_MODE (x);
726
727 if (mode == oldmode)
728 return x;
729
730 /* There is one case that we must handle specially: If we are converting
731 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
732 we are to interpret the constant as unsigned, gen_lowpart will do
733 the wrong if the constant appears negative. What we want to do is
734 make the high-order word of the constant zero, not all ones. */
735
736 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
737 && GET_MODE_BITSIZE (mode) == HOST_BITS_PER_DOUBLE_INT
738 && CONST_INT_P (x) && INTVAL (x) < 0)
739 {
740 double_int val = double_int::from_uhwi (INTVAL (x));
741
742 /* We need to zero extend VAL. */
743 if (oldmode != VOIDmode)
744 val = val.zext (GET_MODE_BITSIZE (oldmode));
745
746 return immed_double_int_const (val, mode);
747 }
748
749 /* We can do this with a gen_lowpart if both desired and current modes
750 are integer, and this is either a constant integer, a register, or a
751 non-volatile MEM. Except for the constant case where MODE is no
752 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
753
754 if ((CONST_INT_P (x)
755 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT)
756 || (GET_MODE_CLASS (mode) == MODE_INT
757 && GET_MODE_CLASS (oldmode) == MODE_INT
758 && (CONST_DOUBLE_AS_INT_P (x)
759 || (GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
760 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
761 && direct_load[(int) mode])
762 || (REG_P (x)
763 && (! HARD_REGISTER_P (x)
764 || HARD_REGNO_MODE_OK (REGNO (x), mode))
765 && TRULY_NOOP_TRUNCATION_MODES_P (mode,
766 GET_MODE (x))))))))
767 {
768 /* ?? If we don't know OLDMODE, we have to assume here that
769 X does not need sign- or zero-extension. This may not be
770 the case, but it's the best we can do. */
771 if (CONST_INT_P (x) && oldmode != VOIDmode
772 && GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (oldmode))
773 {
774 HOST_WIDE_INT val = INTVAL (x);
775
776 /* We must sign or zero-extend in this case. Start by
777 zero-extending, then sign extend if we need to. */
778 val &= GET_MODE_MASK (oldmode);
779 if (! unsignedp
780 && val_signbit_known_set_p (oldmode, val))
781 val |= ~GET_MODE_MASK (oldmode);
782
783 return gen_int_mode (val, mode);
784 }
785
786 return gen_lowpart (mode, x);
787 }
788
789 /* Converting from integer constant into mode is always equivalent to an
790 subreg operation. */
791 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
792 {
793 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
794 return simplify_gen_subreg (mode, x, oldmode, 0);
795 }
796
797 temp = gen_reg_rtx (mode);
798 convert_move (temp, x, unsignedp);
799 return temp;
800 }
801 \f
802 /* Return the largest alignment we can use for doing a move (or store)
803 of MAX_PIECES. ALIGN is the largest alignment we could use. */
804
805 static unsigned int
806 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
807 {
808 enum machine_mode tmode;
809
810 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
811 if (align >= GET_MODE_ALIGNMENT (tmode))
812 align = GET_MODE_ALIGNMENT (tmode);
813 else
814 {
815 enum machine_mode tmode, xmode;
816
817 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
818 tmode != VOIDmode;
819 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
820 if (GET_MODE_SIZE (tmode) > max_pieces
821 || SLOW_UNALIGNED_ACCESS (tmode, align))
822 break;
823
824 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
825 }
826
827 return align;
828 }
829
830 /* Return the widest integer mode no wider than SIZE. If no such mode
831 can be found, return VOIDmode. */
832
833 static enum machine_mode
834 widest_int_mode_for_size (unsigned int size)
835 {
836 enum machine_mode tmode, mode = VOIDmode;
837
838 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
839 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
840 if (GET_MODE_SIZE (tmode) < size)
841 mode = tmode;
842
843 return mode;
844 }
845
846 /* STORE_MAX_PIECES is the number of bytes at a time that we can
847 store efficiently. Due to internal GCC limitations, this is
848 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
849 for an immediate constant. */
850
851 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
852
853 /* Determine whether the LEN bytes can be moved by using several move
854 instructions. Return nonzero if a call to move_by_pieces should
855 succeed. */
856
857 int
858 can_move_by_pieces (unsigned HOST_WIDE_INT len ATTRIBUTE_UNUSED,
859 unsigned int align ATTRIBUTE_UNUSED)
860 {
861 return MOVE_BY_PIECES_P (len, align);
862 }
863
864 /* Generate several move instructions to copy LEN bytes from block FROM to
865 block TO. (These are MEM rtx's with BLKmode).
866
867 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
868 used to push FROM to the stack.
869
870 ALIGN is maximum stack alignment we can assume.
871
872 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
873 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
874 stpcpy. */
875
876 rtx
877 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
878 unsigned int align, int endp)
879 {
880 struct move_by_pieces_d data;
881 enum machine_mode to_addr_mode;
882 enum machine_mode from_addr_mode = get_address_mode (from);
883 rtx to_addr, from_addr = XEXP (from, 0);
884 unsigned int max_size = MOVE_MAX_PIECES + 1;
885 enum insn_code icode;
886
887 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
888
889 data.offset = 0;
890 data.from_addr = from_addr;
891 if (to)
892 {
893 to_addr_mode = get_address_mode (to);
894 to_addr = XEXP (to, 0);
895 data.to = to;
896 data.autinc_to
897 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
898 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
899 data.reverse
900 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
901 }
902 else
903 {
904 to_addr_mode = VOIDmode;
905 to_addr = NULL_RTX;
906 data.to = NULL_RTX;
907 data.autinc_to = 1;
908 #ifdef STACK_GROWS_DOWNWARD
909 data.reverse = 1;
910 #else
911 data.reverse = 0;
912 #endif
913 }
914 data.to_addr = to_addr;
915 data.from = from;
916 data.autinc_from
917 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
918 || GET_CODE (from_addr) == POST_INC
919 || GET_CODE (from_addr) == POST_DEC);
920
921 data.explicit_inc_from = 0;
922 data.explicit_inc_to = 0;
923 if (data.reverse) data.offset = len;
924 data.len = len;
925
926 /* If copying requires more than two move insns,
927 copy addresses to registers (to make displacements shorter)
928 and use post-increment if available. */
929 if (!(data.autinc_from && data.autinc_to)
930 && move_by_pieces_ninsns (len, align, max_size) > 2)
931 {
932 /* Find the mode of the largest move...
933 MODE might not be used depending on the definitions of the
934 USE_* macros below. */
935 enum machine_mode mode ATTRIBUTE_UNUSED
936 = widest_int_mode_for_size (max_size);
937
938 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
939 {
940 data.from_addr = copy_to_mode_reg (from_addr_mode,
941 plus_constant (from_addr_mode,
942 from_addr, len));
943 data.autinc_from = 1;
944 data.explicit_inc_from = -1;
945 }
946 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
947 {
948 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
949 data.autinc_from = 1;
950 data.explicit_inc_from = 1;
951 }
952 if (!data.autinc_from && CONSTANT_P (from_addr))
953 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
954 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
955 {
956 data.to_addr = copy_to_mode_reg (to_addr_mode,
957 plus_constant (to_addr_mode,
958 to_addr, len));
959 data.autinc_to = 1;
960 data.explicit_inc_to = -1;
961 }
962 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
963 {
964 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
965 data.autinc_to = 1;
966 data.explicit_inc_to = 1;
967 }
968 if (!data.autinc_to && CONSTANT_P (to_addr))
969 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
970 }
971
972 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
973
974 /* First move what we can in the largest integer mode, then go to
975 successively smaller modes. */
976
977 while (max_size > 1 && data.len > 0)
978 {
979 enum machine_mode mode = widest_int_mode_for_size (max_size);
980
981 if (mode == VOIDmode)
982 break;
983
984 icode = optab_handler (mov_optab, mode);
985 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
986 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
987
988 max_size = GET_MODE_SIZE (mode);
989 }
990
991 /* The code above should have handled everything. */
992 gcc_assert (!data.len);
993
994 if (endp)
995 {
996 rtx to1;
997
998 gcc_assert (!data.reverse);
999 if (data.autinc_to)
1000 {
1001 if (endp == 2)
1002 {
1003 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1004 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1005 else
1006 data.to_addr = copy_to_mode_reg (to_addr_mode,
1007 plus_constant (to_addr_mode,
1008 data.to_addr,
1009 -1));
1010 }
1011 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1012 data.offset);
1013 }
1014 else
1015 {
1016 if (endp == 2)
1017 --data.offset;
1018 to1 = adjust_address (data.to, QImode, data.offset);
1019 }
1020 return to1;
1021 }
1022 else
1023 return data.to;
1024 }
1025
1026 /* Return number of insns required to move L bytes by pieces.
1027 ALIGN (in bits) is maximum alignment we can assume. */
1028
1029 unsigned HOST_WIDE_INT
1030 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1031 unsigned int max_size)
1032 {
1033 unsigned HOST_WIDE_INT n_insns = 0;
1034
1035 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1036
1037 while (max_size > 1 && l > 0)
1038 {
1039 enum machine_mode mode;
1040 enum insn_code icode;
1041
1042 mode = widest_int_mode_for_size (max_size);
1043
1044 if (mode == VOIDmode)
1045 break;
1046
1047 icode = optab_handler (mov_optab, mode);
1048 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1049 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1050
1051 max_size = GET_MODE_SIZE (mode);
1052 }
1053
1054 gcc_assert (!l);
1055 return n_insns;
1056 }
1057
1058 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1059 with move instructions for mode MODE. GENFUN is the gen_... function
1060 to make a move insn for that mode. DATA has all the other info. */
1061
1062 static void
1063 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
1064 struct move_by_pieces_d *data)
1065 {
1066 unsigned int size = GET_MODE_SIZE (mode);
1067 rtx to1 = NULL_RTX, from1;
1068
1069 while (data->len >= size)
1070 {
1071 if (data->reverse)
1072 data->offset -= size;
1073
1074 if (data->to)
1075 {
1076 if (data->autinc_to)
1077 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1078 data->offset);
1079 else
1080 to1 = adjust_address (data->to, mode, data->offset);
1081 }
1082
1083 if (data->autinc_from)
1084 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1085 data->offset);
1086 else
1087 from1 = adjust_address (data->from, mode, data->offset);
1088
1089 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1090 emit_insn (gen_add2_insn (data->to_addr,
1091 gen_int_mode (-(HOST_WIDE_INT) size,
1092 GET_MODE (data->to_addr))));
1093 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1094 emit_insn (gen_add2_insn (data->from_addr,
1095 gen_int_mode (-(HOST_WIDE_INT) size,
1096 GET_MODE (data->from_addr))));
1097
1098 if (data->to)
1099 emit_insn ((*genfun) (to1, from1));
1100 else
1101 {
1102 #ifdef PUSH_ROUNDING
1103 emit_single_push_insn (mode, from1, NULL);
1104 #else
1105 gcc_unreachable ();
1106 #endif
1107 }
1108
1109 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1110 emit_insn (gen_add2_insn (data->to_addr,
1111 gen_int_mode (size,
1112 GET_MODE (data->to_addr))));
1113 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1114 emit_insn (gen_add2_insn (data->from_addr,
1115 gen_int_mode (size,
1116 GET_MODE (data->from_addr))));
1117
1118 if (! data->reverse)
1119 data->offset += size;
1120
1121 data->len -= size;
1122 }
1123 }
1124 \f
1125 /* Emit code to move a block Y to a block X. This may be done with
1126 string-move instructions, with multiple scalar move instructions,
1127 or with a library call.
1128
1129 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1130 SIZE is an rtx that says how long they are.
1131 ALIGN is the maximum alignment we can assume they have.
1132 METHOD describes what kind of copy this is, and what mechanisms may be used.
1133 MIN_SIZE is the minimal size of block to move
1134 MAX_SIZE is the maximal size of block to move, if it can not be represented
1135 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1136
1137 Return the address of the new block, if memcpy is called and returns it,
1138 0 otherwise. */
1139
1140 rtx
1141 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1142 unsigned int expected_align, HOST_WIDE_INT expected_size,
1143 unsigned HOST_WIDE_INT min_size,
1144 unsigned HOST_WIDE_INT max_size,
1145 unsigned HOST_WIDE_INT probable_max_size)
1146 {
1147 bool may_use_call;
1148 rtx retval = 0;
1149 unsigned int align;
1150
1151 gcc_assert (size);
1152 if (CONST_INT_P (size)
1153 && INTVAL (size) == 0)
1154 return 0;
1155
1156 switch (method)
1157 {
1158 case BLOCK_OP_NORMAL:
1159 case BLOCK_OP_TAILCALL:
1160 may_use_call = true;
1161 break;
1162
1163 case BLOCK_OP_CALL_PARM:
1164 may_use_call = block_move_libcall_safe_for_call_parm ();
1165
1166 /* Make inhibit_defer_pop nonzero around the library call
1167 to force it to pop the arguments right away. */
1168 NO_DEFER_POP;
1169 break;
1170
1171 case BLOCK_OP_NO_LIBCALL:
1172 may_use_call = false;
1173 break;
1174
1175 default:
1176 gcc_unreachable ();
1177 }
1178
1179 gcc_assert (MEM_P (x) && MEM_P (y));
1180 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1181 gcc_assert (align >= BITS_PER_UNIT);
1182
1183 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1184 block copy is more efficient for other large modes, e.g. DCmode. */
1185 x = adjust_address (x, BLKmode, 0);
1186 y = adjust_address (y, BLKmode, 0);
1187
1188 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1189 can be incorrect is coming from __builtin_memcpy. */
1190 if (CONST_INT_P (size))
1191 {
1192 x = shallow_copy_rtx (x);
1193 y = shallow_copy_rtx (y);
1194 set_mem_size (x, INTVAL (size));
1195 set_mem_size (y, INTVAL (size));
1196 }
1197
1198 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1199 move_by_pieces (x, y, INTVAL (size), align, 0);
1200 else if (emit_block_move_via_movmem (x, y, size, align,
1201 expected_align, expected_size,
1202 min_size, max_size, probable_max_size))
1203 ;
1204 else if (may_use_call
1205 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1206 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1207 {
1208 /* Since x and y are passed to a libcall, mark the corresponding
1209 tree EXPR as addressable. */
1210 tree y_expr = MEM_EXPR (y);
1211 tree x_expr = MEM_EXPR (x);
1212 if (y_expr)
1213 mark_addressable (y_expr);
1214 if (x_expr)
1215 mark_addressable (x_expr);
1216 retval = emit_block_move_via_libcall (x, y, size,
1217 method == BLOCK_OP_TAILCALL);
1218 }
1219
1220 else
1221 emit_block_move_via_loop (x, y, size, align);
1222
1223 if (method == BLOCK_OP_CALL_PARM)
1224 OK_DEFER_POP;
1225
1226 return retval;
1227 }
1228
1229 rtx
1230 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1231 {
1232 unsigned HOST_WIDE_INT max, min = 0;
1233 if (GET_CODE (size) == CONST_INT)
1234 min = max = UINTVAL (size);
1235 else
1236 max = GET_MODE_MASK (GET_MODE (size));
1237 return emit_block_move_hints (x, y, size, method, 0, -1,
1238 min, max, max);
1239 }
1240
1241 /* A subroutine of emit_block_move. Returns true if calling the
1242 block move libcall will not clobber any parameters which may have
1243 already been placed on the stack. */
1244
1245 static bool
1246 block_move_libcall_safe_for_call_parm (void)
1247 {
1248 #if defined (REG_PARM_STACK_SPACE)
1249 tree fn;
1250 #endif
1251
1252 /* If arguments are pushed on the stack, then they're safe. */
1253 if (PUSH_ARGS)
1254 return true;
1255
1256 /* If registers go on the stack anyway, any argument is sure to clobber
1257 an outgoing argument. */
1258 #if defined (REG_PARM_STACK_SPACE)
1259 fn = emit_block_move_libcall_fn (false);
1260 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1261 depend on its argument. */
1262 (void) fn;
1263 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1264 && REG_PARM_STACK_SPACE (fn) != 0)
1265 return false;
1266 #endif
1267
1268 /* If any argument goes in memory, then it might clobber an outgoing
1269 argument. */
1270 {
1271 CUMULATIVE_ARGS args_so_far_v;
1272 cumulative_args_t args_so_far;
1273 tree fn, arg;
1274
1275 fn = emit_block_move_libcall_fn (false);
1276 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1277 args_so_far = pack_cumulative_args (&args_so_far_v);
1278
1279 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1280 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1281 {
1282 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1283 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1284 NULL_TREE, true);
1285 if (!tmp || !REG_P (tmp))
1286 return false;
1287 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1288 return false;
1289 targetm.calls.function_arg_advance (args_so_far, mode,
1290 NULL_TREE, true);
1291 }
1292 }
1293 return true;
1294 }
1295
1296 /* A subroutine of emit_block_move. Expand a movmem pattern;
1297 return true if successful. */
1298
1299 static bool
1300 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1301 unsigned int expected_align, HOST_WIDE_INT expected_size,
1302 unsigned HOST_WIDE_INT min_size,
1303 unsigned HOST_WIDE_INT max_size,
1304 unsigned HOST_WIDE_INT probable_max_size)
1305 {
1306 int save_volatile_ok = volatile_ok;
1307 enum machine_mode mode;
1308
1309 if (expected_align < align)
1310 expected_align = align;
1311 if (expected_size != -1)
1312 {
1313 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1314 expected_size = probable_max_size;
1315 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1316 expected_size = min_size;
1317 }
1318
1319 /* Since this is a move insn, we don't care about volatility. */
1320 volatile_ok = 1;
1321
1322 /* Try the most limited insn first, because there's no point
1323 including more than one in the machine description unless
1324 the more limited one has some advantage. */
1325
1326 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1327 mode = GET_MODE_WIDER_MODE (mode))
1328 {
1329 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1330
1331 if (code != CODE_FOR_nothing
1332 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1333 here because if SIZE is less than the mode mask, as it is
1334 returned by the macro, it will definitely be less than the
1335 actual mode mask. Since SIZE is within the Pmode address
1336 space, we limit MODE to Pmode. */
1337 && ((CONST_INT_P (size)
1338 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1339 <= (GET_MODE_MASK (mode) >> 1)))
1340 || max_size <= (GET_MODE_MASK (mode) >> 1)
1341 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1342 {
1343 struct expand_operand ops[9];
1344 unsigned int nops;
1345
1346 /* ??? When called via emit_block_move_for_call, it'd be
1347 nice if there were some way to inform the backend, so
1348 that it doesn't fail the expansion because it thinks
1349 emitting the libcall would be more efficient. */
1350 nops = insn_data[(int) code].n_generator_args;
1351 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1352
1353 create_fixed_operand (&ops[0], x);
1354 create_fixed_operand (&ops[1], y);
1355 /* The check above guarantees that this size conversion is valid. */
1356 create_convert_operand_to (&ops[2], size, mode, true);
1357 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1358 if (nops >= 6)
1359 {
1360 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1361 create_integer_operand (&ops[5], expected_size);
1362 }
1363 if (nops >= 8)
1364 {
1365 create_integer_operand (&ops[6], min_size);
1366 /* If we can not represent the maximal size,
1367 make parameter NULL. */
1368 if ((HOST_WIDE_INT) max_size != -1)
1369 create_integer_operand (&ops[7], max_size);
1370 else
1371 create_fixed_operand (&ops[7], NULL);
1372 }
1373 if (nops == 9)
1374 {
1375 /* If we can not represent the maximal size,
1376 make parameter NULL. */
1377 if ((HOST_WIDE_INT) probable_max_size != -1)
1378 create_integer_operand (&ops[8], probable_max_size);
1379 else
1380 create_fixed_operand (&ops[8], NULL);
1381 }
1382 if (maybe_expand_insn (code, nops, ops))
1383 {
1384 volatile_ok = save_volatile_ok;
1385 return true;
1386 }
1387 }
1388 }
1389
1390 volatile_ok = save_volatile_ok;
1391 return false;
1392 }
1393
1394 /* A subroutine of emit_block_move. Expand a call to memcpy.
1395 Return the return value from memcpy, 0 otherwise. */
1396
1397 rtx
1398 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1399 {
1400 rtx dst_addr, src_addr;
1401 tree call_expr, fn, src_tree, dst_tree, size_tree;
1402 enum machine_mode size_mode;
1403 rtx retval;
1404
1405 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1406 pseudos. We can then place those new pseudos into a VAR_DECL and
1407 use them later. */
1408
1409 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1410 src_addr = copy_addr_to_reg (XEXP (src, 0));
1411
1412 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1413 src_addr = convert_memory_address (ptr_mode, src_addr);
1414
1415 dst_tree = make_tree (ptr_type_node, dst_addr);
1416 src_tree = make_tree (ptr_type_node, src_addr);
1417
1418 size_mode = TYPE_MODE (sizetype);
1419
1420 size = convert_to_mode (size_mode, size, 1);
1421 size = copy_to_mode_reg (size_mode, size);
1422
1423 /* It is incorrect to use the libcall calling conventions to call
1424 memcpy in this context. This could be a user call to memcpy and
1425 the user may wish to examine the return value from memcpy. For
1426 targets where libcalls and normal calls have different conventions
1427 for returning pointers, we could end up generating incorrect code. */
1428
1429 size_tree = make_tree (sizetype, size);
1430
1431 fn = emit_block_move_libcall_fn (true);
1432 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1433 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1434
1435 retval = expand_normal (call_expr);
1436
1437 return retval;
1438 }
1439
1440 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1441 for the function we use for block copies. */
1442
1443 static GTY(()) tree block_move_fn;
1444
1445 void
1446 init_block_move_fn (const char *asmspec)
1447 {
1448 if (!block_move_fn)
1449 {
1450 tree args, fn, attrs, attr_args;
1451
1452 fn = get_identifier ("memcpy");
1453 args = build_function_type_list (ptr_type_node, ptr_type_node,
1454 const_ptr_type_node, sizetype,
1455 NULL_TREE);
1456
1457 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1458 DECL_EXTERNAL (fn) = 1;
1459 TREE_PUBLIC (fn) = 1;
1460 DECL_ARTIFICIAL (fn) = 1;
1461 TREE_NOTHROW (fn) = 1;
1462 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1463 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1464
1465 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1466 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1467
1468 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1469
1470 block_move_fn = fn;
1471 }
1472
1473 if (asmspec)
1474 set_user_assembler_name (block_move_fn, asmspec);
1475 }
1476
1477 static tree
1478 emit_block_move_libcall_fn (int for_call)
1479 {
1480 static bool emitted_extern;
1481
1482 if (!block_move_fn)
1483 init_block_move_fn (NULL);
1484
1485 if (for_call && !emitted_extern)
1486 {
1487 emitted_extern = true;
1488 make_decl_rtl (block_move_fn);
1489 }
1490
1491 return block_move_fn;
1492 }
1493
1494 /* A subroutine of emit_block_move. Copy the data via an explicit
1495 loop. This is used only when libcalls are forbidden. */
1496 /* ??? It'd be nice to copy in hunks larger than QImode. */
1497
1498 static void
1499 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1500 unsigned int align ATTRIBUTE_UNUSED)
1501 {
1502 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1503 enum machine_mode x_addr_mode = get_address_mode (x);
1504 enum machine_mode y_addr_mode = get_address_mode (y);
1505 enum machine_mode iter_mode;
1506
1507 iter_mode = GET_MODE (size);
1508 if (iter_mode == VOIDmode)
1509 iter_mode = word_mode;
1510
1511 top_label = gen_label_rtx ();
1512 cmp_label = gen_label_rtx ();
1513 iter = gen_reg_rtx (iter_mode);
1514
1515 emit_move_insn (iter, const0_rtx);
1516
1517 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1518 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1519 do_pending_stack_adjust ();
1520
1521 emit_jump (cmp_label);
1522 emit_label (top_label);
1523
1524 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1525 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1526
1527 if (x_addr_mode != y_addr_mode)
1528 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1529 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1530
1531 x = change_address (x, QImode, x_addr);
1532 y = change_address (y, QImode, y_addr);
1533
1534 emit_move_insn (x, y);
1535
1536 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1537 true, OPTAB_LIB_WIDEN);
1538 if (tmp != iter)
1539 emit_move_insn (iter, tmp);
1540
1541 emit_label (cmp_label);
1542
1543 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1544 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1545 }
1546 \f
1547 /* Copy all or part of a value X into registers starting at REGNO.
1548 The number of registers to be filled is NREGS. */
1549
1550 void
1551 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1552 {
1553 int i;
1554 #ifdef HAVE_load_multiple
1555 rtx pat;
1556 rtx last;
1557 #endif
1558
1559 if (nregs == 0)
1560 return;
1561
1562 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1563 x = validize_mem (force_const_mem (mode, x));
1564
1565 /* See if the machine can do this with a load multiple insn. */
1566 #ifdef HAVE_load_multiple
1567 if (HAVE_load_multiple)
1568 {
1569 last = get_last_insn ();
1570 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1571 GEN_INT (nregs));
1572 if (pat)
1573 {
1574 emit_insn (pat);
1575 return;
1576 }
1577 else
1578 delete_insns_since (last);
1579 }
1580 #endif
1581
1582 for (i = 0; i < nregs; i++)
1583 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1584 operand_subword_force (x, i, mode));
1585 }
1586
1587 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1588 The number of registers to be filled is NREGS. */
1589
1590 void
1591 move_block_from_reg (int regno, rtx x, int nregs)
1592 {
1593 int i;
1594
1595 if (nregs == 0)
1596 return;
1597
1598 /* See if the machine can do this with a store multiple insn. */
1599 #ifdef HAVE_store_multiple
1600 if (HAVE_store_multiple)
1601 {
1602 rtx last = get_last_insn ();
1603 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1604 GEN_INT (nregs));
1605 if (pat)
1606 {
1607 emit_insn (pat);
1608 return;
1609 }
1610 else
1611 delete_insns_since (last);
1612 }
1613 #endif
1614
1615 for (i = 0; i < nregs; i++)
1616 {
1617 rtx tem = operand_subword (x, i, 1, BLKmode);
1618
1619 gcc_assert (tem);
1620
1621 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1622 }
1623 }
1624
1625 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1626 ORIG, where ORIG is a non-consecutive group of registers represented by
1627 a PARALLEL. The clone is identical to the original except in that the
1628 original set of registers is replaced by a new set of pseudo registers.
1629 The new set has the same modes as the original set. */
1630
1631 rtx
1632 gen_group_rtx (rtx orig)
1633 {
1634 int i, length;
1635 rtx *tmps;
1636
1637 gcc_assert (GET_CODE (orig) == PARALLEL);
1638
1639 length = XVECLEN (orig, 0);
1640 tmps = XALLOCAVEC (rtx, length);
1641
1642 /* Skip a NULL entry in first slot. */
1643 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1644
1645 if (i)
1646 tmps[0] = 0;
1647
1648 for (; i < length; i++)
1649 {
1650 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1651 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1652
1653 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1654 }
1655
1656 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1657 }
1658
1659 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1660 except that values are placed in TMPS[i], and must later be moved
1661 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1662
1663 static void
1664 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1665 {
1666 rtx src;
1667 int start, i;
1668 enum machine_mode m = GET_MODE (orig_src);
1669
1670 gcc_assert (GET_CODE (dst) == PARALLEL);
1671
1672 if (m != VOIDmode
1673 && !SCALAR_INT_MODE_P (m)
1674 && !MEM_P (orig_src)
1675 && GET_CODE (orig_src) != CONCAT)
1676 {
1677 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1678 if (imode == BLKmode)
1679 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1680 else
1681 src = gen_reg_rtx (imode);
1682 if (imode != BLKmode)
1683 src = gen_lowpart (GET_MODE (orig_src), src);
1684 emit_move_insn (src, orig_src);
1685 /* ...and back again. */
1686 if (imode != BLKmode)
1687 src = gen_lowpart (imode, src);
1688 emit_group_load_1 (tmps, dst, src, type, ssize);
1689 return;
1690 }
1691
1692 /* Check for a NULL entry, used to indicate that the parameter goes
1693 both on the stack and in registers. */
1694 if (XEXP (XVECEXP (dst, 0, 0), 0))
1695 start = 0;
1696 else
1697 start = 1;
1698
1699 /* Process the pieces. */
1700 for (i = start; i < XVECLEN (dst, 0); i++)
1701 {
1702 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1703 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1704 unsigned int bytelen = GET_MODE_SIZE (mode);
1705 int shift = 0;
1706
1707 /* Handle trailing fragments that run over the size of the struct. */
1708 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1709 {
1710 /* Arrange to shift the fragment to where it belongs.
1711 extract_bit_field loads to the lsb of the reg. */
1712 if (
1713 #ifdef BLOCK_REG_PADDING
1714 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1715 == (BYTES_BIG_ENDIAN ? upward : downward)
1716 #else
1717 BYTES_BIG_ENDIAN
1718 #endif
1719 )
1720 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1721 bytelen = ssize - bytepos;
1722 gcc_assert (bytelen > 0);
1723 }
1724
1725 /* If we won't be loading directly from memory, protect the real source
1726 from strange tricks we might play; but make sure that the source can
1727 be loaded directly into the destination. */
1728 src = orig_src;
1729 if (!MEM_P (orig_src)
1730 && (!CONSTANT_P (orig_src)
1731 || (GET_MODE (orig_src) != mode
1732 && GET_MODE (orig_src) != VOIDmode)))
1733 {
1734 if (GET_MODE (orig_src) == VOIDmode)
1735 src = gen_reg_rtx (mode);
1736 else
1737 src = gen_reg_rtx (GET_MODE (orig_src));
1738
1739 emit_move_insn (src, orig_src);
1740 }
1741
1742 /* Optimize the access just a bit. */
1743 if (MEM_P (src)
1744 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1745 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1746 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1747 && bytelen == GET_MODE_SIZE (mode))
1748 {
1749 tmps[i] = gen_reg_rtx (mode);
1750 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1751 }
1752 else if (COMPLEX_MODE_P (mode)
1753 && GET_MODE (src) == mode
1754 && bytelen == GET_MODE_SIZE (mode))
1755 /* Let emit_move_complex do the bulk of the work. */
1756 tmps[i] = src;
1757 else if (GET_CODE (src) == CONCAT)
1758 {
1759 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1760 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1761
1762 if ((bytepos == 0 && bytelen == slen0)
1763 || (bytepos != 0 && bytepos + bytelen <= slen))
1764 {
1765 /* The following assumes that the concatenated objects all
1766 have the same size. In this case, a simple calculation
1767 can be used to determine the object and the bit field
1768 to be extracted. */
1769 tmps[i] = XEXP (src, bytepos / slen0);
1770 if (! CONSTANT_P (tmps[i])
1771 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1772 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1773 (bytepos % slen0) * BITS_PER_UNIT,
1774 1, NULL_RTX, mode, mode);
1775 }
1776 else
1777 {
1778 rtx mem;
1779
1780 gcc_assert (!bytepos);
1781 mem = assign_stack_temp (GET_MODE (src), slen);
1782 emit_move_insn (mem, src);
1783 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1784 0, 1, NULL_RTX, mode, mode);
1785 }
1786 }
1787 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1788 SIMD register, which is currently broken. While we get GCC
1789 to emit proper RTL for these cases, let's dump to memory. */
1790 else if (VECTOR_MODE_P (GET_MODE (dst))
1791 && REG_P (src))
1792 {
1793 int slen = GET_MODE_SIZE (GET_MODE (src));
1794 rtx mem;
1795
1796 mem = assign_stack_temp (GET_MODE (src), slen);
1797 emit_move_insn (mem, src);
1798 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1799 }
1800 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1801 && XVECLEN (dst, 0) > 1)
1802 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1803 else if (CONSTANT_P (src))
1804 {
1805 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1806
1807 if (len == ssize)
1808 tmps[i] = src;
1809 else
1810 {
1811 rtx first, second;
1812
1813 gcc_assert (2 * len == ssize);
1814 split_double (src, &first, &second);
1815 if (i)
1816 tmps[i] = second;
1817 else
1818 tmps[i] = first;
1819 }
1820 }
1821 else if (REG_P (src) && GET_MODE (src) == mode)
1822 tmps[i] = src;
1823 else
1824 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1825 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1826 mode, mode);
1827
1828 if (shift)
1829 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1830 shift, tmps[i], 0);
1831 }
1832 }
1833
1834 /* Emit code to move a block SRC of type TYPE to a block DST,
1835 where DST is non-consecutive registers represented by a PARALLEL.
1836 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1837 if not known. */
1838
1839 void
1840 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1841 {
1842 rtx *tmps;
1843 int i;
1844
1845 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1846 emit_group_load_1 (tmps, dst, src, type, ssize);
1847
1848 /* Copy the extracted pieces into the proper (probable) hard regs. */
1849 for (i = 0; i < XVECLEN (dst, 0); i++)
1850 {
1851 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1852 if (d == NULL)
1853 continue;
1854 emit_move_insn (d, tmps[i]);
1855 }
1856 }
1857
1858 /* Similar, but load SRC into new pseudos in a format that looks like
1859 PARALLEL. This can later be fed to emit_group_move to get things
1860 in the right place. */
1861
1862 rtx
1863 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1864 {
1865 rtvec vec;
1866 int i;
1867
1868 vec = rtvec_alloc (XVECLEN (parallel, 0));
1869 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1870
1871 /* Convert the vector to look just like the original PARALLEL, except
1872 with the computed values. */
1873 for (i = 0; i < XVECLEN (parallel, 0); i++)
1874 {
1875 rtx e = XVECEXP (parallel, 0, i);
1876 rtx d = XEXP (e, 0);
1877
1878 if (d)
1879 {
1880 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1881 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1882 }
1883 RTVEC_ELT (vec, i) = e;
1884 }
1885
1886 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1887 }
1888
1889 /* Emit code to move a block SRC to block DST, where SRC and DST are
1890 non-consecutive groups of registers, each represented by a PARALLEL. */
1891
1892 void
1893 emit_group_move (rtx dst, rtx src)
1894 {
1895 int i;
1896
1897 gcc_assert (GET_CODE (src) == PARALLEL
1898 && GET_CODE (dst) == PARALLEL
1899 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1900
1901 /* Skip first entry if NULL. */
1902 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1903 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1904 XEXP (XVECEXP (src, 0, i), 0));
1905 }
1906
1907 /* Move a group of registers represented by a PARALLEL into pseudos. */
1908
1909 rtx
1910 emit_group_move_into_temps (rtx src)
1911 {
1912 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1913 int i;
1914
1915 for (i = 0; i < XVECLEN (src, 0); i++)
1916 {
1917 rtx e = XVECEXP (src, 0, i);
1918 rtx d = XEXP (e, 0);
1919
1920 if (d)
1921 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1922 RTVEC_ELT (vec, i) = e;
1923 }
1924
1925 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1926 }
1927
1928 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1929 where SRC is non-consecutive registers represented by a PARALLEL.
1930 SSIZE represents the total size of block ORIG_DST, or -1 if not
1931 known. */
1932
1933 void
1934 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1935 {
1936 rtx *tmps, dst;
1937 int start, finish, i;
1938 enum machine_mode m = GET_MODE (orig_dst);
1939
1940 gcc_assert (GET_CODE (src) == PARALLEL);
1941
1942 if (!SCALAR_INT_MODE_P (m)
1943 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1944 {
1945 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1946 if (imode == BLKmode)
1947 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1948 else
1949 dst = gen_reg_rtx (imode);
1950 emit_group_store (dst, src, type, ssize);
1951 if (imode != BLKmode)
1952 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1953 emit_move_insn (orig_dst, dst);
1954 return;
1955 }
1956
1957 /* Check for a NULL entry, used to indicate that the parameter goes
1958 both on the stack and in registers. */
1959 if (XEXP (XVECEXP (src, 0, 0), 0))
1960 start = 0;
1961 else
1962 start = 1;
1963 finish = XVECLEN (src, 0);
1964
1965 tmps = XALLOCAVEC (rtx, finish);
1966
1967 /* Copy the (probable) hard regs into pseudos. */
1968 for (i = start; i < finish; i++)
1969 {
1970 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1971 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1972 {
1973 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1974 emit_move_insn (tmps[i], reg);
1975 }
1976 else
1977 tmps[i] = reg;
1978 }
1979
1980 /* If we won't be storing directly into memory, protect the real destination
1981 from strange tricks we might play. */
1982 dst = orig_dst;
1983 if (GET_CODE (dst) == PARALLEL)
1984 {
1985 rtx temp;
1986
1987 /* We can get a PARALLEL dst if there is a conditional expression in
1988 a return statement. In that case, the dst and src are the same,
1989 so no action is necessary. */
1990 if (rtx_equal_p (dst, src))
1991 return;
1992
1993 /* It is unclear if we can ever reach here, but we may as well handle
1994 it. Allocate a temporary, and split this into a store/load to/from
1995 the temporary. */
1996
1997 temp = assign_stack_temp (GET_MODE (dst), ssize);
1998 emit_group_store (temp, src, type, ssize);
1999 emit_group_load (dst, temp, type, ssize);
2000 return;
2001 }
2002 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
2003 {
2004 enum machine_mode outer = GET_MODE (dst);
2005 enum machine_mode inner;
2006 HOST_WIDE_INT bytepos;
2007 bool done = false;
2008 rtx temp;
2009
2010 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
2011 dst = gen_reg_rtx (outer);
2012
2013 /* Make life a bit easier for combine. */
2014 /* If the first element of the vector is the low part
2015 of the destination mode, use a paradoxical subreg to
2016 initialize the destination. */
2017 if (start < finish)
2018 {
2019 inner = GET_MODE (tmps[start]);
2020 bytepos = subreg_lowpart_offset (inner, outer);
2021 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
2022 {
2023 temp = simplify_gen_subreg (outer, tmps[start],
2024 inner, 0);
2025 if (temp)
2026 {
2027 emit_move_insn (dst, temp);
2028 done = true;
2029 start++;
2030 }
2031 }
2032 }
2033
2034 /* If the first element wasn't the low part, try the last. */
2035 if (!done
2036 && start < finish - 1)
2037 {
2038 inner = GET_MODE (tmps[finish - 1]);
2039 bytepos = subreg_lowpart_offset (inner, outer);
2040 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2041 {
2042 temp = simplify_gen_subreg (outer, tmps[finish - 1],
2043 inner, 0);
2044 if (temp)
2045 {
2046 emit_move_insn (dst, temp);
2047 done = true;
2048 finish--;
2049 }
2050 }
2051 }
2052
2053 /* Otherwise, simply initialize the result to zero. */
2054 if (!done)
2055 emit_move_insn (dst, CONST0_RTX (outer));
2056 }
2057
2058 /* Process the pieces. */
2059 for (i = start; i < finish; i++)
2060 {
2061 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2062 enum machine_mode mode = GET_MODE (tmps[i]);
2063 unsigned int bytelen = GET_MODE_SIZE (mode);
2064 unsigned int adj_bytelen = bytelen;
2065 rtx dest = dst;
2066
2067 /* Handle trailing fragments that run over the size of the struct. */
2068 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2069 adj_bytelen = ssize - bytepos;
2070
2071 if (GET_CODE (dst) == CONCAT)
2072 {
2073 if (bytepos + adj_bytelen
2074 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2075 dest = XEXP (dst, 0);
2076 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2077 {
2078 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2079 dest = XEXP (dst, 1);
2080 }
2081 else
2082 {
2083 enum machine_mode dest_mode = GET_MODE (dest);
2084 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2085
2086 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2087
2088 if (GET_MODE_ALIGNMENT (dest_mode)
2089 >= GET_MODE_ALIGNMENT (tmp_mode))
2090 {
2091 dest = assign_stack_temp (dest_mode,
2092 GET_MODE_SIZE (dest_mode));
2093 emit_move_insn (adjust_address (dest,
2094 tmp_mode,
2095 bytepos),
2096 tmps[i]);
2097 dst = dest;
2098 }
2099 else
2100 {
2101 dest = assign_stack_temp (tmp_mode,
2102 GET_MODE_SIZE (tmp_mode));
2103 emit_move_insn (dest, tmps[i]);
2104 dst = adjust_address (dest, dest_mode, bytepos);
2105 }
2106 break;
2107 }
2108 }
2109
2110 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2111 {
2112 /* store_bit_field always takes its value from the lsb.
2113 Move the fragment to the lsb if it's not already there. */
2114 if (
2115 #ifdef BLOCK_REG_PADDING
2116 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2117 == (BYTES_BIG_ENDIAN ? upward : downward)
2118 #else
2119 BYTES_BIG_ENDIAN
2120 #endif
2121 )
2122 {
2123 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2124 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2125 shift, tmps[i], 0);
2126 }
2127 bytelen = adj_bytelen;
2128 }
2129
2130 /* Optimize the access just a bit. */
2131 if (MEM_P (dest)
2132 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2133 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2134 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2135 && bytelen == GET_MODE_SIZE (mode))
2136 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2137 else
2138 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2139 0, 0, mode, tmps[i]);
2140 }
2141
2142 /* Copy from the pseudo into the (probable) hard reg. */
2143 if (orig_dst != dst)
2144 emit_move_insn (orig_dst, dst);
2145 }
2146
2147 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2148 of the value stored in X. */
2149
2150 rtx
2151 maybe_emit_group_store (rtx x, tree type)
2152 {
2153 enum machine_mode mode = TYPE_MODE (type);
2154 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2155 if (GET_CODE (x) == PARALLEL)
2156 {
2157 rtx result = gen_reg_rtx (mode);
2158 emit_group_store (result, x, type, int_size_in_bytes (type));
2159 return result;
2160 }
2161 return x;
2162 }
2163
2164 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2165
2166 This is used on targets that return BLKmode values in registers. */
2167
2168 void
2169 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2170 {
2171 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2172 rtx src = NULL, dst = NULL;
2173 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2174 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2175 enum machine_mode mode = GET_MODE (srcreg);
2176 enum machine_mode tmode = GET_MODE (target);
2177 enum machine_mode copy_mode;
2178
2179 /* BLKmode registers created in the back-end shouldn't have survived. */
2180 gcc_assert (mode != BLKmode);
2181
2182 /* If the structure doesn't take up a whole number of words, see whether
2183 SRCREG is padded on the left or on the right. If it's on the left,
2184 set PADDING_CORRECTION to the number of bits to skip.
2185
2186 In most ABIs, the structure will be returned at the least end of
2187 the register, which translates to right padding on little-endian
2188 targets and left padding on big-endian targets. The opposite
2189 holds if the structure is returned at the most significant
2190 end of the register. */
2191 if (bytes % UNITS_PER_WORD != 0
2192 && (targetm.calls.return_in_msb (type)
2193 ? !BYTES_BIG_ENDIAN
2194 : BYTES_BIG_ENDIAN))
2195 padding_correction
2196 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2197
2198 /* We can use a single move if we have an exact mode for the size. */
2199 else if (MEM_P (target)
2200 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2201 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2202 && bytes == GET_MODE_SIZE (mode))
2203 {
2204 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2205 return;
2206 }
2207
2208 /* And if we additionally have the same mode for a register. */
2209 else if (REG_P (target)
2210 && GET_MODE (target) == mode
2211 && bytes == GET_MODE_SIZE (mode))
2212 {
2213 emit_move_insn (target, srcreg);
2214 return;
2215 }
2216
2217 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2218 into a new pseudo which is a full word. */
2219 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2220 {
2221 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2222 mode = word_mode;
2223 }
2224
2225 /* Copy the structure BITSIZE bits at a time. If the target lives in
2226 memory, take care of not reading/writing past its end by selecting
2227 a copy mode suited to BITSIZE. This should always be possible given
2228 how it is computed.
2229
2230 If the target lives in register, make sure not to select a copy mode
2231 larger than the mode of the register.
2232
2233 We could probably emit more efficient code for machines which do not use
2234 strict alignment, but it doesn't seem worth the effort at the current
2235 time. */
2236
2237 copy_mode = word_mode;
2238 if (MEM_P (target))
2239 {
2240 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2241 if (mem_mode != BLKmode)
2242 copy_mode = mem_mode;
2243 }
2244 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2245 copy_mode = tmode;
2246
2247 for (bitpos = 0, xbitpos = padding_correction;
2248 bitpos < bytes * BITS_PER_UNIT;
2249 bitpos += bitsize, xbitpos += bitsize)
2250 {
2251 /* We need a new source operand each time xbitpos is on a
2252 word boundary and when xbitpos == padding_correction
2253 (the first time through). */
2254 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2255 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2256
2257 /* We need a new destination operand each time bitpos is on
2258 a word boundary. */
2259 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2260 dst = target;
2261 else if (bitpos % BITS_PER_WORD == 0)
2262 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2263
2264 /* Use xbitpos for the source extraction (right justified) and
2265 bitpos for the destination store (left justified). */
2266 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2267 extract_bit_field (src, bitsize,
2268 xbitpos % BITS_PER_WORD, 1,
2269 NULL_RTX, copy_mode, copy_mode));
2270 }
2271 }
2272
2273 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2274 register if it contains any data, otherwise return null.
2275
2276 This is used on targets that return BLKmode values in registers. */
2277
2278 rtx
2279 copy_blkmode_to_reg (enum machine_mode mode, tree src)
2280 {
2281 int i, n_regs;
2282 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2283 unsigned int bitsize;
2284 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2285 enum machine_mode dst_mode;
2286
2287 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2288
2289 x = expand_normal (src);
2290
2291 bytes = int_size_in_bytes (TREE_TYPE (src));
2292 if (bytes == 0)
2293 return NULL_RTX;
2294
2295 /* If the structure doesn't take up a whole number of words, see
2296 whether the register value should be padded on the left or on
2297 the right. Set PADDING_CORRECTION to the number of padding
2298 bits needed on the left side.
2299
2300 In most ABIs, the structure will be returned at the least end of
2301 the register, which translates to right padding on little-endian
2302 targets and left padding on big-endian targets. The opposite
2303 holds if the structure is returned at the most significant
2304 end of the register. */
2305 if (bytes % UNITS_PER_WORD != 0
2306 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2307 ? !BYTES_BIG_ENDIAN
2308 : BYTES_BIG_ENDIAN))
2309 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2310 * BITS_PER_UNIT));
2311
2312 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2313 dst_words = XALLOCAVEC (rtx, n_regs);
2314 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2315
2316 /* Copy the structure BITSIZE bits at a time. */
2317 for (bitpos = 0, xbitpos = padding_correction;
2318 bitpos < bytes * BITS_PER_UNIT;
2319 bitpos += bitsize, xbitpos += bitsize)
2320 {
2321 /* We need a new destination pseudo each time xbitpos is
2322 on a word boundary and when xbitpos == padding_correction
2323 (the first time through). */
2324 if (xbitpos % BITS_PER_WORD == 0
2325 || xbitpos == padding_correction)
2326 {
2327 /* Generate an appropriate register. */
2328 dst_word = gen_reg_rtx (word_mode);
2329 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2330
2331 /* Clear the destination before we move anything into it. */
2332 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2333 }
2334
2335 /* We need a new source operand each time bitpos is on a word
2336 boundary. */
2337 if (bitpos % BITS_PER_WORD == 0)
2338 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2339
2340 /* Use bitpos for the source extraction (left justified) and
2341 xbitpos for the destination store (right justified). */
2342 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2343 0, 0, word_mode,
2344 extract_bit_field (src_word, bitsize,
2345 bitpos % BITS_PER_WORD, 1,
2346 NULL_RTX, word_mode, word_mode));
2347 }
2348
2349 if (mode == BLKmode)
2350 {
2351 /* Find the smallest integer mode large enough to hold the
2352 entire structure. */
2353 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2354 mode != VOIDmode;
2355 mode = GET_MODE_WIDER_MODE (mode))
2356 /* Have we found a large enough mode? */
2357 if (GET_MODE_SIZE (mode) >= bytes)
2358 break;
2359
2360 /* A suitable mode should have been found. */
2361 gcc_assert (mode != VOIDmode);
2362 }
2363
2364 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2365 dst_mode = word_mode;
2366 else
2367 dst_mode = mode;
2368 dst = gen_reg_rtx (dst_mode);
2369
2370 for (i = 0; i < n_regs; i++)
2371 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2372
2373 if (mode != dst_mode)
2374 dst = gen_lowpart (mode, dst);
2375
2376 return dst;
2377 }
2378
2379 /* Add a USE expression for REG to the (possibly empty) list pointed
2380 to by CALL_FUSAGE. REG must denote a hard register. */
2381
2382 void
2383 use_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode)
2384 {
2385 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2386
2387 *call_fusage
2388 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2389 }
2390
2391 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2392 starting at REGNO. All of these registers must be hard registers. */
2393
2394 void
2395 use_regs (rtx *call_fusage, int regno, int nregs)
2396 {
2397 int i;
2398
2399 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2400
2401 for (i = 0; i < nregs; i++)
2402 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2403 }
2404
2405 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2406 PARALLEL REGS. This is for calls that pass values in multiple
2407 non-contiguous locations. The Irix 6 ABI has examples of this. */
2408
2409 void
2410 use_group_regs (rtx *call_fusage, rtx regs)
2411 {
2412 int i;
2413
2414 for (i = 0; i < XVECLEN (regs, 0); i++)
2415 {
2416 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2417
2418 /* A NULL entry means the parameter goes both on the stack and in
2419 registers. This can also be a MEM for targets that pass values
2420 partially on the stack and partially in registers. */
2421 if (reg != 0 && REG_P (reg))
2422 use_reg (call_fusage, reg);
2423 }
2424 }
2425
2426 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2427 assigment and the code of the expresion on the RHS is CODE. Return
2428 NULL otherwise. */
2429
2430 static gimple
2431 get_def_for_expr (tree name, enum tree_code code)
2432 {
2433 gimple def_stmt;
2434
2435 if (TREE_CODE (name) != SSA_NAME)
2436 return NULL;
2437
2438 def_stmt = get_gimple_for_ssa_name (name);
2439 if (!def_stmt
2440 || gimple_assign_rhs_code (def_stmt) != code)
2441 return NULL;
2442
2443 return def_stmt;
2444 }
2445
2446 #ifdef HAVE_conditional_move
2447 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2448 assigment and the class of the expresion on the RHS is CLASS. Return
2449 NULL otherwise. */
2450
2451 static gimple
2452 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2453 {
2454 gimple def_stmt;
2455
2456 if (TREE_CODE (name) != SSA_NAME)
2457 return NULL;
2458
2459 def_stmt = get_gimple_for_ssa_name (name);
2460 if (!def_stmt
2461 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2462 return NULL;
2463
2464 return def_stmt;
2465 }
2466 #endif
2467 \f
2468
2469 /* Determine whether the LEN bytes generated by CONSTFUN can be
2470 stored to memory using several move instructions. CONSTFUNDATA is
2471 a pointer which will be passed as argument in every CONSTFUN call.
2472 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2473 a memset operation and false if it's a copy of a constant string.
2474 Return nonzero if a call to store_by_pieces should succeed. */
2475
2476 int
2477 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2478 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2479 void *constfundata, unsigned int align, bool memsetp)
2480 {
2481 unsigned HOST_WIDE_INT l;
2482 unsigned int max_size;
2483 HOST_WIDE_INT offset = 0;
2484 enum machine_mode mode;
2485 enum insn_code icode;
2486 int reverse;
2487 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2488 rtx cst ATTRIBUTE_UNUSED;
2489
2490 if (len == 0)
2491 return 1;
2492
2493 if (! (memsetp
2494 ? SET_BY_PIECES_P (len, align)
2495 : STORE_BY_PIECES_P (len, align)))
2496 return 0;
2497
2498 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2499
2500 /* We would first store what we can in the largest integer mode, then go to
2501 successively smaller modes. */
2502
2503 for (reverse = 0;
2504 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2505 reverse++)
2506 {
2507 l = len;
2508 max_size = STORE_MAX_PIECES + 1;
2509 while (max_size > 1 && l > 0)
2510 {
2511 mode = widest_int_mode_for_size (max_size);
2512
2513 if (mode == VOIDmode)
2514 break;
2515
2516 icode = optab_handler (mov_optab, mode);
2517 if (icode != CODE_FOR_nothing
2518 && align >= GET_MODE_ALIGNMENT (mode))
2519 {
2520 unsigned int size = GET_MODE_SIZE (mode);
2521
2522 while (l >= size)
2523 {
2524 if (reverse)
2525 offset -= size;
2526
2527 cst = (*constfun) (constfundata, offset, mode);
2528 if (!targetm.legitimate_constant_p (mode, cst))
2529 return 0;
2530
2531 if (!reverse)
2532 offset += size;
2533
2534 l -= size;
2535 }
2536 }
2537
2538 max_size = GET_MODE_SIZE (mode);
2539 }
2540
2541 /* The code above should have handled everything. */
2542 gcc_assert (!l);
2543 }
2544
2545 return 1;
2546 }
2547
2548 /* Generate several move instructions to store LEN bytes generated by
2549 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2550 pointer which will be passed as argument in every CONSTFUN call.
2551 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2552 a memset operation and false if it's a copy of a constant string.
2553 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2554 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2555 stpcpy. */
2556
2557 rtx
2558 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2559 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2560 void *constfundata, unsigned int align, bool memsetp, int endp)
2561 {
2562 enum machine_mode to_addr_mode = get_address_mode (to);
2563 struct store_by_pieces_d data;
2564
2565 if (len == 0)
2566 {
2567 gcc_assert (endp != 2);
2568 return to;
2569 }
2570
2571 gcc_assert (memsetp
2572 ? SET_BY_PIECES_P (len, align)
2573 : STORE_BY_PIECES_P (len, align));
2574 data.constfun = constfun;
2575 data.constfundata = constfundata;
2576 data.len = len;
2577 data.to = to;
2578 store_by_pieces_1 (&data, align);
2579 if (endp)
2580 {
2581 rtx to1;
2582
2583 gcc_assert (!data.reverse);
2584 if (data.autinc_to)
2585 {
2586 if (endp == 2)
2587 {
2588 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2589 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2590 else
2591 data.to_addr = copy_to_mode_reg (to_addr_mode,
2592 plus_constant (to_addr_mode,
2593 data.to_addr,
2594 -1));
2595 }
2596 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2597 data.offset);
2598 }
2599 else
2600 {
2601 if (endp == 2)
2602 --data.offset;
2603 to1 = adjust_address (data.to, QImode, data.offset);
2604 }
2605 return to1;
2606 }
2607 else
2608 return data.to;
2609 }
2610
2611 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2612 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2613
2614 static void
2615 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2616 {
2617 struct store_by_pieces_d data;
2618
2619 if (len == 0)
2620 return;
2621
2622 data.constfun = clear_by_pieces_1;
2623 data.constfundata = NULL;
2624 data.len = len;
2625 data.to = to;
2626 store_by_pieces_1 (&data, align);
2627 }
2628
2629 /* Callback routine for clear_by_pieces.
2630 Return const0_rtx unconditionally. */
2631
2632 static rtx
2633 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2634 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2635 enum machine_mode mode ATTRIBUTE_UNUSED)
2636 {
2637 return const0_rtx;
2638 }
2639
2640 /* Subroutine of clear_by_pieces and store_by_pieces.
2641 Generate several move instructions to store LEN bytes of block TO. (A MEM
2642 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2643
2644 static void
2645 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2646 unsigned int align ATTRIBUTE_UNUSED)
2647 {
2648 enum machine_mode to_addr_mode = get_address_mode (data->to);
2649 rtx to_addr = XEXP (data->to, 0);
2650 unsigned int max_size = STORE_MAX_PIECES + 1;
2651 enum insn_code icode;
2652
2653 data->offset = 0;
2654 data->to_addr = to_addr;
2655 data->autinc_to
2656 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2657 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2658
2659 data->explicit_inc_to = 0;
2660 data->reverse
2661 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2662 if (data->reverse)
2663 data->offset = data->len;
2664
2665 /* If storing requires more than two move insns,
2666 copy addresses to registers (to make displacements shorter)
2667 and use post-increment if available. */
2668 if (!data->autinc_to
2669 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2670 {
2671 /* Determine the main mode we'll be using.
2672 MODE might not be used depending on the definitions of the
2673 USE_* macros below. */
2674 enum machine_mode mode ATTRIBUTE_UNUSED
2675 = widest_int_mode_for_size (max_size);
2676
2677 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2678 {
2679 data->to_addr = copy_to_mode_reg (to_addr_mode,
2680 plus_constant (to_addr_mode,
2681 to_addr,
2682 data->len));
2683 data->autinc_to = 1;
2684 data->explicit_inc_to = -1;
2685 }
2686
2687 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2688 && ! data->autinc_to)
2689 {
2690 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2691 data->autinc_to = 1;
2692 data->explicit_inc_to = 1;
2693 }
2694
2695 if ( !data->autinc_to && CONSTANT_P (to_addr))
2696 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2697 }
2698
2699 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2700
2701 /* First store what we can in the largest integer mode, then go to
2702 successively smaller modes. */
2703
2704 while (max_size > 1 && data->len > 0)
2705 {
2706 enum machine_mode mode = widest_int_mode_for_size (max_size);
2707
2708 if (mode == VOIDmode)
2709 break;
2710
2711 icode = optab_handler (mov_optab, mode);
2712 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2713 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2714
2715 max_size = GET_MODE_SIZE (mode);
2716 }
2717
2718 /* The code above should have handled everything. */
2719 gcc_assert (!data->len);
2720 }
2721
2722 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2723 with move instructions for mode MODE. GENFUN is the gen_... function
2724 to make a move insn for that mode. DATA has all the other info. */
2725
2726 static void
2727 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2728 struct store_by_pieces_d *data)
2729 {
2730 unsigned int size = GET_MODE_SIZE (mode);
2731 rtx to1, cst;
2732
2733 while (data->len >= size)
2734 {
2735 if (data->reverse)
2736 data->offset -= size;
2737
2738 if (data->autinc_to)
2739 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2740 data->offset);
2741 else
2742 to1 = adjust_address (data->to, mode, data->offset);
2743
2744 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2745 emit_insn (gen_add2_insn (data->to_addr,
2746 gen_int_mode (-(HOST_WIDE_INT) size,
2747 GET_MODE (data->to_addr))));
2748
2749 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2750 emit_insn ((*genfun) (to1, cst));
2751
2752 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2753 emit_insn (gen_add2_insn (data->to_addr,
2754 gen_int_mode (size,
2755 GET_MODE (data->to_addr))));
2756
2757 if (! data->reverse)
2758 data->offset += size;
2759
2760 data->len -= size;
2761 }
2762 }
2763 \f
2764 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2765 its length in bytes. */
2766
2767 rtx
2768 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2769 unsigned int expected_align, HOST_WIDE_INT expected_size,
2770 unsigned HOST_WIDE_INT min_size,
2771 unsigned HOST_WIDE_INT max_size,
2772 unsigned HOST_WIDE_INT probable_max_size)
2773 {
2774 enum machine_mode mode = GET_MODE (object);
2775 unsigned int align;
2776
2777 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2778
2779 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2780 just move a zero. Otherwise, do this a piece at a time. */
2781 if (mode != BLKmode
2782 && CONST_INT_P (size)
2783 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2784 {
2785 rtx zero = CONST0_RTX (mode);
2786 if (zero != NULL)
2787 {
2788 emit_move_insn (object, zero);
2789 return NULL;
2790 }
2791
2792 if (COMPLEX_MODE_P (mode))
2793 {
2794 zero = CONST0_RTX (GET_MODE_INNER (mode));
2795 if (zero != NULL)
2796 {
2797 write_complex_part (object, zero, 0);
2798 write_complex_part (object, zero, 1);
2799 return NULL;
2800 }
2801 }
2802 }
2803
2804 if (size == const0_rtx)
2805 return NULL;
2806
2807 align = MEM_ALIGN (object);
2808
2809 if (CONST_INT_P (size)
2810 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2811 clear_by_pieces (object, INTVAL (size), align);
2812 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2813 expected_align, expected_size,
2814 min_size, max_size, probable_max_size))
2815 ;
2816 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2817 return set_storage_via_libcall (object, size, const0_rtx,
2818 method == BLOCK_OP_TAILCALL);
2819 else
2820 gcc_unreachable ();
2821
2822 return NULL;
2823 }
2824
2825 rtx
2826 clear_storage (rtx object, rtx size, enum block_op_methods method)
2827 {
2828 unsigned HOST_WIDE_INT max, min = 0;
2829 if (GET_CODE (size) == CONST_INT)
2830 min = max = UINTVAL (size);
2831 else
2832 max = GET_MODE_MASK (GET_MODE (size));
2833 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
2834 }
2835
2836
2837 /* A subroutine of clear_storage. Expand a call to memset.
2838 Return the return value of memset, 0 otherwise. */
2839
2840 rtx
2841 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2842 {
2843 tree call_expr, fn, object_tree, size_tree, val_tree;
2844 enum machine_mode size_mode;
2845 rtx retval;
2846
2847 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2848 place those into new pseudos into a VAR_DECL and use them later. */
2849
2850 object = copy_addr_to_reg (XEXP (object, 0));
2851
2852 size_mode = TYPE_MODE (sizetype);
2853 size = convert_to_mode (size_mode, size, 1);
2854 size = copy_to_mode_reg (size_mode, size);
2855
2856 /* It is incorrect to use the libcall calling conventions to call
2857 memset in this context. This could be a user call to memset and
2858 the user may wish to examine the return value from memset. For
2859 targets where libcalls and normal calls have different conventions
2860 for returning pointers, we could end up generating incorrect code. */
2861
2862 object_tree = make_tree (ptr_type_node, object);
2863 if (!CONST_INT_P (val))
2864 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2865 size_tree = make_tree (sizetype, size);
2866 val_tree = make_tree (integer_type_node, val);
2867
2868 fn = clear_storage_libcall_fn (true);
2869 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2870 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2871
2872 retval = expand_normal (call_expr);
2873
2874 return retval;
2875 }
2876
2877 /* A subroutine of set_storage_via_libcall. Create the tree node
2878 for the function we use for block clears. */
2879
2880 tree block_clear_fn;
2881
2882 void
2883 init_block_clear_fn (const char *asmspec)
2884 {
2885 if (!block_clear_fn)
2886 {
2887 tree fn, args;
2888
2889 fn = get_identifier ("memset");
2890 args = build_function_type_list (ptr_type_node, ptr_type_node,
2891 integer_type_node, sizetype,
2892 NULL_TREE);
2893
2894 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2895 DECL_EXTERNAL (fn) = 1;
2896 TREE_PUBLIC (fn) = 1;
2897 DECL_ARTIFICIAL (fn) = 1;
2898 TREE_NOTHROW (fn) = 1;
2899 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2900 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2901
2902 block_clear_fn = fn;
2903 }
2904
2905 if (asmspec)
2906 set_user_assembler_name (block_clear_fn, asmspec);
2907 }
2908
2909 static tree
2910 clear_storage_libcall_fn (int for_call)
2911 {
2912 static bool emitted_extern;
2913
2914 if (!block_clear_fn)
2915 init_block_clear_fn (NULL);
2916
2917 if (for_call && !emitted_extern)
2918 {
2919 emitted_extern = true;
2920 make_decl_rtl (block_clear_fn);
2921 }
2922
2923 return block_clear_fn;
2924 }
2925 \f
2926 /* Expand a setmem pattern; return true if successful. */
2927
2928 bool
2929 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2930 unsigned int expected_align, HOST_WIDE_INT expected_size,
2931 unsigned HOST_WIDE_INT min_size,
2932 unsigned HOST_WIDE_INT max_size,
2933 unsigned HOST_WIDE_INT probable_max_size)
2934 {
2935 /* Try the most limited insn first, because there's no point
2936 including more than one in the machine description unless
2937 the more limited one has some advantage. */
2938
2939 enum machine_mode mode;
2940
2941 if (expected_align < align)
2942 expected_align = align;
2943 if (expected_size != -1)
2944 {
2945 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
2946 expected_size = max_size;
2947 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
2948 expected_size = min_size;
2949 }
2950
2951 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2952 mode = GET_MODE_WIDER_MODE (mode))
2953 {
2954 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2955
2956 if (code != CODE_FOR_nothing
2957 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2958 here because if SIZE is less than the mode mask, as it is
2959 returned by the macro, it will definitely be less than the
2960 actual mode mask. Since SIZE is within the Pmode address
2961 space, we limit MODE to Pmode. */
2962 && ((CONST_INT_P (size)
2963 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2964 <= (GET_MODE_MASK (mode) >> 1)))
2965 || max_size <= (GET_MODE_MASK (mode) >> 1)
2966 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2967 {
2968 struct expand_operand ops[9];
2969 unsigned int nops;
2970
2971 nops = insn_data[(int) code].n_generator_args;
2972 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
2973
2974 create_fixed_operand (&ops[0], object);
2975 /* The check above guarantees that this size conversion is valid. */
2976 create_convert_operand_to (&ops[1], size, mode, true);
2977 create_convert_operand_from (&ops[2], val, byte_mode, true);
2978 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2979 if (nops >= 6)
2980 {
2981 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2982 create_integer_operand (&ops[5], expected_size);
2983 }
2984 if (nops >= 8)
2985 {
2986 create_integer_operand (&ops[6], min_size);
2987 /* If we can not represent the maximal size,
2988 make parameter NULL. */
2989 if ((HOST_WIDE_INT) max_size != -1)
2990 create_integer_operand (&ops[7], max_size);
2991 else
2992 create_fixed_operand (&ops[7], NULL);
2993 }
2994 if (nops == 9)
2995 {
2996 /* If we can not represent the maximal size,
2997 make parameter NULL. */
2998 if ((HOST_WIDE_INT) probable_max_size != -1)
2999 create_integer_operand (&ops[8], probable_max_size);
3000 else
3001 create_fixed_operand (&ops[8], NULL);
3002 }
3003 if (maybe_expand_insn (code, nops, ops))
3004 return true;
3005 }
3006 }
3007
3008 return false;
3009 }
3010
3011 \f
3012 /* Write to one of the components of the complex value CPLX. Write VAL to
3013 the real part if IMAG_P is false, and the imaginary part if its true. */
3014
3015 static void
3016 write_complex_part (rtx cplx, rtx val, bool imag_p)
3017 {
3018 enum machine_mode cmode;
3019 enum machine_mode imode;
3020 unsigned ibitsize;
3021
3022 if (GET_CODE (cplx) == CONCAT)
3023 {
3024 emit_move_insn (XEXP (cplx, imag_p), val);
3025 return;
3026 }
3027
3028 cmode = GET_MODE (cplx);
3029 imode = GET_MODE_INNER (cmode);
3030 ibitsize = GET_MODE_BITSIZE (imode);
3031
3032 /* For MEMs simplify_gen_subreg may generate an invalid new address
3033 because, e.g., the original address is considered mode-dependent
3034 by the target, which restricts simplify_subreg from invoking
3035 adjust_address_nv. Instead of preparing fallback support for an
3036 invalid address, we call adjust_address_nv directly. */
3037 if (MEM_P (cplx))
3038 {
3039 emit_move_insn (adjust_address_nv (cplx, imode,
3040 imag_p ? GET_MODE_SIZE (imode) : 0),
3041 val);
3042 return;
3043 }
3044
3045 /* If the sub-object is at least word sized, then we know that subregging
3046 will work. This special case is important, since store_bit_field
3047 wants to operate on integer modes, and there's rarely an OImode to
3048 correspond to TCmode. */
3049 if (ibitsize >= BITS_PER_WORD
3050 /* For hard regs we have exact predicates. Assume we can split
3051 the original object if it spans an even number of hard regs.
3052 This special case is important for SCmode on 64-bit platforms
3053 where the natural size of floating-point regs is 32-bit. */
3054 || (REG_P (cplx)
3055 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3056 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3057 {
3058 rtx part = simplify_gen_subreg (imode, cplx, cmode,
3059 imag_p ? GET_MODE_SIZE (imode) : 0);
3060 if (part)
3061 {
3062 emit_move_insn (part, val);
3063 return;
3064 }
3065 else
3066 /* simplify_gen_subreg may fail for sub-word MEMs. */
3067 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3068 }
3069
3070 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
3071 }
3072
3073 /* Extract one of the components of the complex value CPLX. Extract the
3074 real part if IMAG_P is false, and the imaginary part if it's true. */
3075
3076 static rtx
3077 read_complex_part (rtx cplx, bool imag_p)
3078 {
3079 enum machine_mode cmode, imode;
3080 unsigned ibitsize;
3081
3082 if (GET_CODE (cplx) == CONCAT)
3083 return XEXP (cplx, imag_p);
3084
3085 cmode = GET_MODE (cplx);
3086 imode = GET_MODE_INNER (cmode);
3087 ibitsize = GET_MODE_BITSIZE (imode);
3088
3089 /* Special case reads from complex constants that got spilled to memory. */
3090 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3091 {
3092 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3093 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3094 {
3095 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3096 if (CONSTANT_CLASS_P (part))
3097 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3098 }
3099 }
3100
3101 /* For MEMs simplify_gen_subreg may generate an invalid new address
3102 because, e.g., the original address is considered mode-dependent
3103 by the target, which restricts simplify_subreg from invoking
3104 adjust_address_nv. Instead of preparing fallback support for an
3105 invalid address, we call adjust_address_nv directly. */
3106 if (MEM_P (cplx))
3107 return adjust_address_nv (cplx, imode,
3108 imag_p ? GET_MODE_SIZE (imode) : 0);
3109
3110 /* If the sub-object is at least word sized, then we know that subregging
3111 will work. This special case is important, since extract_bit_field
3112 wants to operate on integer modes, and there's rarely an OImode to
3113 correspond to TCmode. */
3114 if (ibitsize >= BITS_PER_WORD
3115 /* For hard regs we have exact predicates. Assume we can split
3116 the original object if it spans an even number of hard regs.
3117 This special case is important for SCmode on 64-bit platforms
3118 where the natural size of floating-point regs is 32-bit. */
3119 || (REG_P (cplx)
3120 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3121 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3122 {
3123 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3124 imag_p ? GET_MODE_SIZE (imode) : 0);
3125 if (ret)
3126 return ret;
3127 else
3128 /* simplify_gen_subreg may fail for sub-word MEMs. */
3129 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3130 }
3131
3132 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3133 true, NULL_RTX, imode, imode);
3134 }
3135 \f
3136 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3137 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3138 represented in NEW_MODE. If FORCE is true, this will never happen, as
3139 we'll force-create a SUBREG if needed. */
3140
3141 static rtx
3142 emit_move_change_mode (enum machine_mode new_mode,
3143 enum machine_mode old_mode, rtx x, bool force)
3144 {
3145 rtx ret;
3146
3147 if (push_operand (x, GET_MODE (x)))
3148 {
3149 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3150 MEM_COPY_ATTRIBUTES (ret, x);
3151 }
3152 else if (MEM_P (x))
3153 {
3154 /* We don't have to worry about changing the address since the
3155 size in bytes is supposed to be the same. */
3156 if (reload_in_progress)
3157 {
3158 /* Copy the MEM to change the mode and move any
3159 substitutions from the old MEM to the new one. */
3160 ret = adjust_address_nv (x, new_mode, 0);
3161 copy_replacements (x, ret);
3162 }
3163 else
3164 ret = adjust_address (x, new_mode, 0);
3165 }
3166 else
3167 {
3168 /* Note that we do want simplify_subreg's behavior of validating
3169 that the new mode is ok for a hard register. If we were to use
3170 simplify_gen_subreg, we would create the subreg, but would
3171 probably run into the target not being able to implement it. */
3172 /* Except, of course, when FORCE is true, when this is exactly what
3173 we want. Which is needed for CCmodes on some targets. */
3174 if (force)
3175 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3176 else
3177 ret = simplify_subreg (new_mode, x, old_mode, 0);
3178 }
3179
3180 return ret;
3181 }
3182
3183 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3184 an integer mode of the same size as MODE. Returns the instruction
3185 emitted, or NULL if such a move could not be generated. */
3186
3187 static rtx
3188 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
3189 {
3190 enum machine_mode imode;
3191 enum insn_code code;
3192
3193 /* There must exist a mode of the exact size we require. */
3194 imode = int_mode_for_mode (mode);
3195 if (imode == BLKmode)
3196 return NULL_RTX;
3197
3198 /* The target must support moves in this mode. */
3199 code = optab_handler (mov_optab, imode);
3200 if (code == CODE_FOR_nothing)
3201 return NULL_RTX;
3202
3203 x = emit_move_change_mode (imode, mode, x, force);
3204 if (x == NULL_RTX)
3205 return NULL_RTX;
3206 y = emit_move_change_mode (imode, mode, y, force);
3207 if (y == NULL_RTX)
3208 return NULL_RTX;
3209 return emit_insn (GEN_FCN (code) (x, y));
3210 }
3211
3212 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3213 Return an equivalent MEM that does not use an auto-increment. */
3214
3215 static rtx
3216 emit_move_resolve_push (enum machine_mode mode, rtx x)
3217 {
3218 enum rtx_code code = GET_CODE (XEXP (x, 0));
3219 HOST_WIDE_INT adjust;
3220 rtx temp;
3221
3222 adjust = GET_MODE_SIZE (mode);
3223 #ifdef PUSH_ROUNDING
3224 adjust = PUSH_ROUNDING (adjust);
3225 #endif
3226 if (code == PRE_DEC || code == POST_DEC)
3227 adjust = -adjust;
3228 else if (code == PRE_MODIFY || code == POST_MODIFY)
3229 {
3230 rtx expr = XEXP (XEXP (x, 0), 1);
3231 HOST_WIDE_INT val;
3232
3233 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3234 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3235 val = INTVAL (XEXP (expr, 1));
3236 if (GET_CODE (expr) == MINUS)
3237 val = -val;
3238 gcc_assert (adjust == val || adjust == -val);
3239 adjust = val;
3240 }
3241
3242 /* Do not use anti_adjust_stack, since we don't want to update
3243 stack_pointer_delta. */
3244 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3245 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3246 0, OPTAB_LIB_WIDEN);
3247 if (temp != stack_pointer_rtx)
3248 emit_move_insn (stack_pointer_rtx, temp);
3249
3250 switch (code)
3251 {
3252 case PRE_INC:
3253 case PRE_DEC:
3254 case PRE_MODIFY:
3255 temp = stack_pointer_rtx;
3256 break;
3257 case POST_INC:
3258 case POST_DEC:
3259 case POST_MODIFY:
3260 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3261 break;
3262 default:
3263 gcc_unreachable ();
3264 }
3265
3266 return replace_equiv_address (x, temp);
3267 }
3268
3269 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3270 X is known to satisfy push_operand, and MODE is known to be complex.
3271 Returns the last instruction emitted. */
3272
3273 rtx
3274 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3275 {
3276 enum machine_mode submode = GET_MODE_INNER (mode);
3277 bool imag_first;
3278
3279 #ifdef PUSH_ROUNDING
3280 unsigned int submodesize = GET_MODE_SIZE (submode);
3281
3282 /* In case we output to the stack, but the size is smaller than the
3283 machine can push exactly, we need to use move instructions. */
3284 if (PUSH_ROUNDING (submodesize) != submodesize)
3285 {
3286 x = emit_move_resolve_push (mode, x);
3287 return emit_move_insn (x, y);
3288 }
3289 #endif
3290
3291 /* Note that the real part always precedes the imag part in memory
3292 regardless of machine's endianness. */
3293 switch (GET_CODE (XEXP (x, 0)))
3294 {
3295 case PRE_DEC:
3296 case POST_DEC:
3297 imag_first = true;
3298 break;
3299 case PRE_INC:
3300 case POST_INC:
3301 imag_first = false;
3302 break;
3303 default:
3304 gcc_unreachable ();
3305 }
3306
3307 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3308 read_complex_part (y, imag_first));
3309 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3310 read_complex_part (y, !imag_first));
3311 }
3312
3313 /* A subroutine of emit_move_complex. Perform the move from Y to X
3314 via two moves of the parts. Returns the last instruction emitted. */
3315
3316 rtx
3317 emit_move_complex_parts (rtx x, rtx y)
3318 {
3319 /* Show the output dies here. This is necessary for SUBREGs
3320 of pseudos since we cannot track their lifetimes correctly;
3321 hard regs shouldn't appear here except as return values. */
3322 if (!reload_completed && !reload_in_progress
3323 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3324 emit_clobber (x);
3325
3326 write_complex_part (x, read_complex_part (y, false), false);
3327 write_complex_part (x, read_complex_part (y, true), true);
3328
3329 return get_last_insn ();
3330 }
3331
3332 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3333 MODE is known to be complex. Returns the last instruction emitted. */
3334
3335 static rtx
3336 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3337 {
3338 bool try_int;
3339
3340 /* Need to take special care for pushes, to maintain proper ordering
3341 of the data, and possibly extra padding. */
3342 if (push_operand (x, mode))
3343 return emit_move_complex_push (mode, x, y);
3344
3345 /* See if we can coerce the target into moving both values at once, except
3346 for floating point where we favor moving as parts if this is easy. */
3347 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3348 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3349 && !(REG_P (x)
3350 && HARD_REGISTER_P (x)
3351 && hard_regno_nregs[REGNO (x)][mode] == 1)
3352 && !(REG_P (y)
3353 && HARD_REGISTER_P (y)
3354 && hard_regno_nregs[REGNO (y)][mode] == 1))
3355 try_int = false;
3356 /* Not possible if the values are inherently not adjacent. */
3357 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3358 try_int = false;
3359 /* Is possible if both are registers (or subregs of registers). */
3360 else if (register_operand (x, mode) && register_operand (y, mode))
3361 try_int = true;
3362 /* If one of the operands is a memory, and alignment constraints
3363 are friendly enough, we may be able to do combined memory operations.
3364 We do not attempt this if Y is a constant because that combination is
3365 usually better with the by-parts thing below. */
3366 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3367 && (!STRICT_ALIGNMENT
3368 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3369 try_int = true;
3370 else
3371 try_int = false;
3372
3373 if (try_int)
3374 {
3375 rtx ret;
3376
3377 /* For memory to memory moves, optimal behavior can be had with the
3378 existing block move logic. */
3379 if (MEM_P (x) && MEM_P (y))
3380 {
3381 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3382 BLOCK_OP_NO_LIBCALL);
3383 return get_last_insn ();
3384 }
3385
3386 ret = emit_move_via_integer (mode, x, y, true);
3387 if (ret)
3388 return ret;
3389 }
3390
3391 return emit_move_complex_parts (x, y);
3392 }
3393
3394 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3395 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3396
3397 static rtx
3398 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3399 {
3400 rtx ret;
3401
3402 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3403 if (mode != CCmode)
3404 {
3405 enum insn_code code = optab_handler (mov_optab, CCmode);
3406 if (code != CODE_FOR_nothing)
3407 {
3408 x = emit_move_change_mode (CCmode, mode, x, true);
3409 y = emit_move_change_mode (CCmode, mode, y, true);
3410 return emit_insn (GEN_FCN (code) (x, y));
3411 }
3412 }
3413
3414 /* Otherwise, find the MODE_INT mode of the same width. */
3415 ret = emit_move_via_integer (mode, x, y, false);
3416 gcc_assert (ret != NULL);
3417 return ret;
3418 }
3419
3420 /* Return true if word I of OP lies entirely in the
3421 undefined bits of a paradoxical subreg. */
3422
3423 static bool
3424 undefined_operand_subword_p (const_rtx op, int i)
3425 {
3426 enum machine_mode innermode, innermostmode;
3427 int offset;
3428 if (GET_CODE (op) != SUBREG)
3429 return false;
3430 innermode = GET_MODE (op);
3431 innermostmode = GET_MODE (SUBREG_REG (op));
3432 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3433 /* The SUBREG_BYTE represents offset, as if the value were stored in
3434 memory, except for a paradoxical subreg where we define
3435 SUBREG_BYTE to be 0; undo this exception as in
3436 simplify_subreg. */
3437 if (SUBREG_BYTE (op) == 0
3438 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3439 {
3440 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3441 if (WORDS_BIG_ENDIAN)
3442 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3443 if (BYTES_BIG_ENDIAN)
3444 offset += difference % UNITS_PER_WORD;
3445 }
3446 if (offset >= GET_MODE_SIZE (innermostmode)
3447 || offset <= -GET_MODE_SIZE (word_mode))
3448 return true;
3449 return false;
3450 }
3451
3452 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3453 MODE is any multi-word or full-word mode that lacks a move_insn
3454 pattern. Note that you will get better code if you define such
3455 patterns, even if they must turn into multiple assembler instructions. */
3456
3457 static rtx
3458 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3459 {
3460 rtx last_insn = 0;
3461 rtx seq, inner;
3462 bool need_clobber;
3463 int i;
3464
3465 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3466
3467 /* If X is a push on the stack, do the push now and replace
3468 X with a reference to the stack pointer. */
3469 if (push_operand (x, mode))
3470 x = emit_move_resolve_push (mode, x);
3471
3472 /* If we are in reload, see if either operand is a MEM whose address
3473 is scheduled for replacement. */
3474 if (reload_in_progress && MEM_P (x)
3475 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3476 x = replace_equiv_address_nv (x, inner);
3477 if (reload_in_progress && MEM_P (y)
3478 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3479 y = replace_equiv_address_nv (y, inner);
3480
3481 start_sequence ();
3482
3483 need_clobber = false;
3484 for (i = 0;
3485 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3486 i++)
3487 {
3488 rtx xpart = operand_subword (x, i, 1, mode);
3489 rtx ypart;
3490
3491 /* Do not generate code for a move if it would come entirely
3492 from the undefined bits of a paradoxical subreg. */
3493 if (undefined_operand_subword_p (y, i))
3494 continue;
3495
3496 ypart = operand_subword (y, i, 1, mode);
3497
3498 /* If we can't get a part of Y, put Y into memory if it is a
3499 constant. Otherwise, force it into a register. Then we must
3500 be able to get a part of Y. */
3501 if (ypart == 0 && CONSTANT_P (y))
3502 {
3503 y = use_anchored_address (force_const_mem (mode, y));
3504 ypart = operand_subword (y, i, 1, mode);
3505 }
3506 else if (ypart == 0)
3507 ypart = operand_subword_force (y, i, mode);
3508
3509 gcc_assert (xpart && ypart);
3510
3511 need_clobber |= (GET_CODE (xpart) == SUBREG);
3512
3513 last_insn = emit_move_insn (xpart, ypart);
3514 }
3515
3516 seq = get_insns ();
3517 end_sequence ();
3518
3519 /* Show the output dies here. This is necessary for SUBREGs
3520 of pseudos since we cannot track their lifetimes correctly;
3521 hard regs shouldn't appear here except as return values.
3522 We never want to emit such a clobber after reload. */
3523 if (x != y
3524 && ! (reload_in_progress || reload_completed)
3525 && need_clobber != 0)
3526 emit_clobber (x);
3527
3528 emit_insn (seq);
3529
3530 return last_insn;
3531 }
3532
3533 /* Low level part of emit_move_insn.
3534 Called just like emit_move_insn, but assumes X and Y
3535 are basically valid. */
3536
3537 rtx
3538 emit_move_insn_1 (rtx x, rtx y)
3539 {
3540 enum machine_mode mode = GET_MODE (x);
3541 enum insn_code code;
3542
3543 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3544
3545 code = optab_handler (mov_optab, mode);
3546 if (code != CODE_FOR_nothing)
3547 return emit_insn (GEN_FCN (code) (x, y));
3548
3549 /* Expand complex moves by moving real part and imag part. */
3550 if (COMPLEX_MODE_P (mode))
3551 return emit_move_complex (mode, x, y);
3552
3553 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3554 || ALL_FIXED_POINT_MODE_P (mode))
3555 {
3556 rtx result = emit_move_via_integer (mode, x, y, true);
3557
3558 /* If we can't find an integer mode, use multi words. */
3559 if (result)
3560 return result;
3561 else
3562 return emit_move_multi_word (mode, x, y);
3563 }
3564
3565 if (GET_MODE_CLASS (mode) == MODE_CC)
3566 return emit_move_ccmode (mode, x, y);
3567
3568 /* Try using a move pattern for the corresponding integer mode. This is
3569 only safe when simplify_subreg can convert MODE constants into integer
3570 constants. At present, it can only do this reliably if the value
3571 fits within a HOST_WIDE_INT. */
3572 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3573 {
3574 rtx ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3575
3576 if (ret)
3577 {
3578 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3579 return ret;
3580 }
3581 }
3582
3583 return emit_move_multi_word (mode, x, y);
3584 }
3585
3586 /* Generate code to copy Y into X.
3587 Both Y and X must have the same mode, except that
3588 Y can be a constant with VOIDmode.
3589 This mode cannot be BLKmode; use emit_block_move for that.
3590
3591 Return the last instruction emitted. */
3592
3593 rtx
3594 emit_move_insn (rtx x, rtx y)
3595 {
3596 enum machine_mode mode = GET_MODE (x);
3597 rtx y_cst = NULL_RTX;
3598 rtx last_insn, set;
3599
3600 gcc_assert (mode != BLKmode
3601 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3602
3603 if (CONSTANT_P (y))
3604 {
3605 if (optimize
3606 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3607 && (last_insn = compress_float_constant (x, y)))
3608 return last_insn;
3609
3610 y_cst = y;
3611
3612 if (!targetm.legitimate_constant_p (mode, y))
3613 {
3614 y = force_const_mem (mode, y);
3615
3616 /* If the target's cannot_force_const_mem prevented the spill,
3617 assume that the target's move expanders will also take care
3618 of the non-legitimate constant. */
3619 if (!y)
3620 y = y_cst;
3621 else
3622 y = use_anchored_address (y);
3623 }
3624 }
3625
3626 /* If X or Y are memory references, verify that their addresses are valid
3627 for the machine. */
3628 if (MEM_P (x)
3629 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3630 MEM_ADDR_SPACE (x))
3631 && ! push_operand (x, GET_MODE (x))))
3632 x = validize_mem (x);
3633
3634 if (MEM_P (y)
3635 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3636 MEM_ADDR_SPACE (y)))
3637 y = validize_mem (y);
3638
3639 gcc_assert (mode != BLKmode);
3640
3641 last_insn = emit_move_insn_1 (x, y);
3642
3643 if (y_cst && REG_P (x)
3644 && (set = single_set (last_insn)) != NULL_RTX
3645 && SET_DEST (set) == x
3646 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3647 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3648
3649 return last_insn;
3650 }
3651
3652 /* If Y is representable exactly in a narrower mode, and the target can
3653 perform the extension directly from constant or memory, then emit the
3654 move as an extension. */
3655
3656 static rtx
3657 compress_float_constant (rtx x, rtx y)
3658 {
3659 enum machine_mode dstmode = GET_MODE (x);
3660 enum machine_mode orig_srcmode = GET_MODE (y);
3661 enum machine_mode srcmode;
3662 REAL_VALUE_TYPE r;
3663 int oldcost, newcost;
3664 bool speed = optimize_insn_for_speed_p ();
3665
3666 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3667
3668 if (targetm.legitimate_constant_p (dstmode, y))
3669 oldcost = set_src_cost (y, speed);
3670 else
3671 oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3672
3673 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3674 srcmode != orig_srcmode;
3675 srcmode = GET_MODE_WIDER_MODE (srcmode))
3676 {
3677 enum insn_code ic;
3678 rtx trunc_y, last_insn;
3679
3680 /* Skip if the target can't extend this way. */
3681 ic = can_extend_p (dstmode, srcmode, 0);
3682 if (ic == CODE_FOR_nothing)
3683 continue;
3684
3685 /* Skip if the narrowed value isn't exact. */
3686 if (! exact_real_truncate (srcmode, &r))
3687 continue;
3688
3689 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3690
3691 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3692 {
3693 /* Skip if the target needs extra instructions to perform
3694 the extension. */
3695 if (!insn_operand_matches (ic, 1, trunc_y))
3696 continue;
3697 /* This is valid, but may not be cheaper than the original. */
3698 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3699 speed);
3700 if (oldcost < newcost)
3701 continue;
3702 }
3703 else if (float_extend_from_mem[dstmode][srcmode])
3704 {
3705 trunc_y = force_const_mem (srcmode, trunc_y);
3706 /* This is valid, but may not be cheaper than the original. */
3707 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3708 speed);
3709 if (oldcost < newcost)
3710 continue;
3711 trunc_y = validize_mem (trunc_y);
3712 }
3713 else
3714 continue;
3715
3716 /* For CSE's benefit, force the compressed constant pool entry
3717 into a new pseudo. This constant may be used in different modes,
3718 and if not, combine will put things back together for us. */
3719 trunc_y = force_reg (srcmode, trunc_y);
3720 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3721 last_insn = get_last_insn ();
3722
3723 if (REG_P (x))
3724 set_unique_reg_note (last_insn, REG_EQUAL, y);
3725
3726 return last_insn;
3727 }
3728
3729 return NULL_RTX;
3730 }
3731 \f
3732 /* Pushing data onto the stack. */
3733
3734 /* Push a block of length SIZE (perhaps variable)
3735 and return an rtx to address the beginning of the block.
3736 The value may be virtual_outgoing_args_rtx.
3737
3738 EXTRA is the number of bytes of padding to push in addition to SIZE.
3739 BELOW nonzero means this padding comes at low addresses;
3740 otherwise, the padding comes at high addresses. */
3741
3742 rtx
3743 push_block (rtx size, int extra, int below)
3744 {
3745 rtx temp;
3746
3747 size = convert_modes (Pmode, ptr_mode, size, 1);
3748 if (CONSTANT_P (size))
3749 anti_adjust_stack (plus_constant (Pmode, size, extra));
3750 else if (REG_P (size) && extra == 0)
3751 anti_adjust_stack (size);
3752 else
3753 {
3754 temp = copy_to_mode_reg (Pmode, size);
3755 if (extra != 0)
3756 temp = expand_binop (Pmode, add_optab, temp,
3757 gen_int_mode (extra, Pmode),
3758 temp, 0, OPTAB_LIB_WIDEN);
3759 anti_adjust_stack (temp);
3760 }
3761
3762 #ifndef STACK_GROWS_DOWNWARD
3763 if (0)
3764 #else
3765 if (1)
3766 #endif
3767 {
3768 temp = virtual_outgoing_args_rtx;
3769 if (extra != 0 && below)
3770 temp = plus_constant (Pmode, temp, extra);
3771 }
3772 else
3773 {
3774 if (CONST_INT_P (size))
3775 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3776 -INTVAL (size) - (below ? 0 : extra));
3777 else if (extra != 0 && !below)
3778 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3779 negate_rtx (Pmode, plus_constant (Pmode, size,
3780 extra)));
3781 else
3782 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3783 negate_rtx (Pmode, size));
3784 }
3785
3786 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3787 }
3788
3789 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3790
3791 static rtx
3792 mem_autoinc_base (rtx mem)
3793 {
3794 if (MEM_P (mem))
3795 {
3796 rtx addr = XEXP (mem, 0);
3797 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3798 return XEXP (addr, 0);
3799 }
3800 return NULL;
3801 }
3802
3803 /* A utility routine used here, in reload, and in try_split. The insns
3804 after PREV up to and including LAST are known to adjust the stack,
3805 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3806 placing notes as appropriate. PREV may be NULL, indicating the
3807 entire insn sequence prior to LAST should be scanned.
3808
3809 The set of allowed stack pointer modifications is small:
3810 (1) One or more auto-inc style memory references (aka pushes),
3811 (2) One or more addition/subtraction with the SP as destination,
3812 (3) A single move insn with the SP as destination,
3813 (4) A call_pop insn,
3814 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3815
3816 Insns in the sequence that do not modify the SP are ignored,
3817 except for noreturn calls.
3818
3819 The return value is the amount of adjustment that can be trivially
3820 verified, via immediate operand or auto-inc. If the adjustment
3821 cannot be trivially extracted, the return value is INT_MIN. */
3822
3823 HOST_WIDE_INT
3824 find_args_size_adjust (rtx insn)
3825 {
3826 rtx dest, set, pat;
3827 int i;
3828
3829 pat = PATTERN (insn);
3830 set = NULL;
3831
3832 /* Look for a call_pop pattern. */
3833 if (CALL_P (insn))
3834 {
3835 /* We have to allow non-call_pop patterns for the case
3836 of emit_single_push_insn of a TLS address. */
3837 if (GET_CODE (pat) != PARALLEL)
3838 return 0;
3839
3840 /* All call_pop have a stack pointer adjust in the parallel.
3841 The call itself is always first, and the stack adjust is
3842 usually last, so search from the end. */
3843 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3844 {
3845 set = XVECEXP (pat, 0, i);
3846 if (GET_CODE (set) != SET)
3847 continue;
3848 dest = SET_DEST (set);
3849 if (dest == stack_pointer_rtx)
3850 break;
3851 }
3852 /* We'd better have found the stack pointer adjust. */
3853 if (i == 0)
3854 return 0;
3855 /* Fall through to process the extracted SET and DEST
3856 as if it was a standalone insn. */
3857 }
3858 else if (GET_CODE (pat) == SET)
3859 set = pat;
3860 else if ((set = single_set (insn)) != NULL)
3861 ;
3862 else if (GET_CODE (pat) == PARALLEL)
3863 {
3864 /* ??? Some older ports use a parallel with a stack adjust
3865 and a store for a PUSH_ROUNDING pattern, rather than a
3866 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3867 /* ??? See h8300 and m68k, pushqi1. */
3868 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3869 {
3870 set = XVECEXP (pat, 0, i);
3871 if (GET_CODE (set) != SET)
3872 continue;
3873 dest = SET_DEST (set);
3874 if (dest == stack_pointer_rtx)
3875 break;
3876
3877 /* We do not expect an auto-inc of the sp in the parallel. */
3878 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3879 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3880 != stack_pointer_rtx);
3881 }
3882 if (i < 0)
3883 return 0;
3884 }
3885 else
3886 return 0;
3887
3888 dest = SET_DEST (set);
3889
3890 /* Look for direct modifications of the stack pointer. */
3891 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3892 {
3893 /* Look for a trivial adjustment, otherwise assume nothing. */
3894 /* Note that the SPU restore_stack_block pattern refers to
3895 the stack pointer in V4SImode. Consider that non-trivial. */
3896 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3897 && GET_CODE (SET_SRC (set)) == PLUS
3898 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3899 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3900 return INTVAL (XEXP (SET_SRC (set), 1));
3901 /* ??? Reload can generate no-op moves, which will be cleaned
3902 up later. Recognize it and continue searching. */
3903 else if (rtx_equal_p (dest, SET_SRC (set)))
3904 return 0;
3905 else
3906 return HOST_WIDE_INT_MIN;
3907 }
3908 else
3909 {
3910 rtx mem, addr;
3911
3912 /* Otherwise only think about autoinc patterns. */
3913 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3914 {
3915 mem = dest;
3916 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3917 != stack_pointer_rtx);
3918 }
3919 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3920 mem = SET_SRC (set);
3921 else
3922 return 0;
3923
3924 addr = XEXP (mem, 0);
3925 switch (GET_CODE (addr))
3926 {
3927 case PRE_INC:
3928 case POST_INC:
3929 return GET_MODE_SIZE (GET_MODE (mem));
3930 case PRE_DEC:
3931 case POST_DEC:
3932 return -GET_MODE_SIZE (GET_MODE (mem));
3933 case PRE_MODIFY:
3934 case POST_MODIFY:
3935 addr = XEXP (addr, 1);
3936 gcc_assert (GET_CODE (addr) == PLUS);
3937 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3938 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3939 return INTVAL (XEXP (addr, 1));
3940 default:
3941 gcc_unreachable ();
3942 }
3943 }
3944 }
3945
3946 int
3947 fixup_args_size_notes (rtx prev, rtx last, int end_args_size)
3948 {
3949 int args_size = end_args_size;
3950 bool saw_unknown = false;
3951 rtx insn;
3952
3953 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3954 {
3955 HOST_WIDE_INT this_delta;
3956
3957 if (!NONDEBUG_INSN_P (insn))
3958 continue;
3959
3960 this_delta = find_args_size_adjust (insn);
3961 if (this_delta == 0)
3962 {
3963 if (!CALL_P (insn)
3964 || ACCUMULATE_OUTGOING_ARGS
3965 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3966 continue;
3967 }
3968
3969 gcc_assert (!saw_unknown);
3970 if (this_delta == HOST_WIDE_INT_MIN)
3971 saw_unknown = true;
3972
3973 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3974 #ifdef STACK_GROWS_DOWNWARD
3975 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3976 #endif
3977 args_size -= this_delta;
3978 }
3979
3980 return saw_unknown ? INT_MIN : args_size;
3981 }
3982
3983 #ifdef PUSH_ROUNDING
3984 /* Emit single push insn. */
3985
3986 static void
3987 emit_single_push_insn_1 (enum machine_mode mode, rtx x, tree type)
3988 {
3989 rtx dest_addr;
3990 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3991 rtx dest;
3992 enum insn_code icode;
3993
3994 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3995 /* If there is push pattern, use it. Otherwise try old way of throwing
3996 MEM representing push operation to move expander. */
3997 icode = optab_handler (push_optab, mode);
3998 if (icode != CODE_FOR_nothing)
3999 {
4000 struct expand_operand ops[1];
4001
4002 create_input_operand (&ops[0], x, mode);
4003 if (maybe_expand_insn (icode, 1, ops))
4004 return;
4005 }
4006 if (GET_MODE_SIZE (mode) == rounded_size)
4007 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
4008 /* If we are to pad downward, adjust the stack pointer first and
4009 then store X into the stack location using an offset. This is
4010 because emit_move_insn does not know how to pad; it does not have
4011 access to type. */
4012 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
4013 {
4014 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
4015 HOST_WIDE_INT offset;
4016
4017 emit_move_insn (stack_pointer_rtx,
4018 expand_binop (Pmode,
4019 #ifdef STACK_GROWS_DOWNWARD
4020 sub_optab,
4021 #else
4022 add_optab,
4023 #endif
4024 stack_pointer_rtx,
4025 gen_int_mode (rounded_size, Pmode),
4026 NULL_RTX, 0, OPTAB_LIB_WIDEN));
4027
4028 offset = (HOST_WIDE_INT) padding_size;
4029 #ifdef STACK_GROWS_DOWNWARD
4030 if (STACK_PUSH_CODE == POST_DEC)
4031 /* We have already decremented the stack pointer, so get the
4032 previous value. */
4033 offset += (HOST_WIDE_INT) rounded_size;
4034 #else
4035 if (STACK_PUSH_CODE == POST_INC)
4036 /* We have already incremented the stack pointer, so get the
4037 previous value. */
4038 offset -= (HOST_WIDE_INT) rounded_size;
4039 #endif
4040 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4041 gen_int_mode (offset, Pmode));
4042 }
4043 else
4044 {
4045 #ifdef STACK_GROWS_DOWNWARD
4046 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4047 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4048 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4049 Pmode));
4050 #else
4051 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4052 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4053 gen_int_mode (rounded_size, Pmode));
4054 #endif
4055 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4056 }
4057
4058 dest = gen_rtx_MEM (mode, dest_addr);
4059
4060 if (type != 0)
4061 {
4062 set_mem_attributes (dest, type, 1);
4063
4064 if (flag_optimize_sibling_calls)
4065 /* Function incoming arguments may overlap with sibling call
4066 outgoing arguments and we cannot allow reordering of reads
4067 from function arguments with stores to outgoing arguments
4068 of sibling calls. */
4069 set_mem_alias_set (dest, 0);
4070 }
4071 emit_move_insn (dest, x);
4072 }
4073
4074 /* Emit and annotate a single push insn. */
4075
4076 static void
4077 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
4078 {
4079 int delta, old_delta = stack_pointer_delta;
4080 rtx prev = get_last_insn ();
4081 rtx last;
4082
4083 emit_single_push_insn_1 (mode, x, type);
4084
4085 last = get_last_insn ();
4086
4087 /* Notice the common case where we emitted exactly one insn. */
4088 if (PREV_INSN (last) == prev)
4089 {
4090 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4091 return;
4092 }
4093
4094 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4095 gcc_assert (delta == INT_MIN || delta == old_delta);
4096 }
4097 #endif
4098
4099 /* Generate code to push X onto the stack, assuming it has mode MODE and
4100 type TYPE.
4101 MODE is redundant except when X is a CONST_INT (since they don't
4102 carry mode info).
4103 SIZE is an rtx for the size of data to be copied (in bytes),
4104 needed only if X is BLKmode.
4105
4106 ALIGN (in bits) is maximum alignment we can assume.
4107
4108 If PARTIAL and REG are both nonzero, then copy that many of the first
4109 bytes of X into registers starting with REG, and push the rest of X.
4110 The amount of space pushed is decreased by PARTIAL bytes.
4111 REG must be a hard register in this case.
4112 If REG is zero but PARTIAL is not, take any all others actions for an
4113 argument partially in registers, but do not actually load any
4114 registers.
4115
4116 EXTRA is the amount in bytes of extra space to leave next to this arg.
4117 This is ignored if an argument block has already been allocated.
4118
4119 On a machine that lacks real push insns, ARGS_ADDR is the address of
4120 the bottom of the argument block for this call. We use indexing off there
4121 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4122 argument block has not been preallocated.
4123
4124 ARGS_SO_FAR is the size of args previously pushed for this call.
4125
4126 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4127 for arguments passed in registers. If nonzero, it will be the number
4128 of bytes required. */
4129
4130 void
4131 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
4132 unsigned int align, int partial, rtx reg, int extra,
4133 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4134 rtx alignment_pad)
4135 {
4136 rtx xinner;
4137 enum direction stack_direction
4138 #ifdef STACK_GROWS_DOWNWARD
4139 = downward;
4140 #else
4141 = upward;
4142 #endif
4143
4144 /* Decide where to pad the argument: `downward' for below,
4145 `upward' for above, or `none' for don't pad it.
4146 Default is below for small data on big-endian machines; else above. */
4147 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4148
4149 /* Invert direction if stack is post-decrement.
4150 FIXME: why? */
4151 if (STACK_PUSH_CODE == POST_DEC)
4152 if (where_pad != none)
4153 where_pad = (where_pad == downward ? upward : downward);
4154
4155 xinner = x;
4156
4157 if (mode == BLKmode
4158 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4159 {
4160 /* Copy a block into the stack, entirely or partially. */
4161
4162 rtx temp;
4163 int used;
4164 int offset;
4165 int skip;
4166
4167 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4168 used = partial - offset;
4169
4170 if (mode != BLKmode)
4171 {
4172 /* A value is to be stored in an insufficiently aligned
4173 stack slot; copy via a suitably aligned slot if
4174 necessary. */
4175 size = GEN_INT (GET_MODE_SIZE (mode));
4176 if (!MEM_P (xinner))
4177 {
4178 temp = assign_temp (type, 1, 1);
4179 emit_move_insn (temp, xinner);
4180 xinner = temp;
4181 }
4182 }
4183
4184 gcc_assert (size);
4185
4186 /* USED is now the # of bytes we need not copy to the stack
4187 because registers will take care of them. */
4188
4189 if (partial != 0)
4190 xinner = adjust_address (xinner, BLKmode, used);
4191
4192 /* If the partial register-part of the arg counts in its stack size,
4193 skip the part of stack space corresponding to the registers.
4194 Otherwise, start copying to the beginning of the stack space,
4195 by setting SKIP to 0. */
4196 skip = (reg_parm_stack_space == 0) ? 0 : used;
4197
4198 #ifdef PUSH_ROUNDING
4199 /* Do it with several push insns if that doesn't take lots of insns
4200 and if there is no difficulty with push insns that skip bytes
4201 on the stack for alignment purposes. */
4202 if (args_addr == 0
4203 && PUSH_ARGS
4204 && CONST_INT_P (size)
4205 && skip == 0
4206 && MEM_ALIGN (xinner) >= align
4207 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
4208 /* Here we avoid the case of a structure whose weak alignment
4209 forces many pushes of a small amount of data,
4210 and such small pushes do rounding that causes trouble. */
4211 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4212 || align >= BIGGEST_ALIGNMENT
4213 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4214 == (align / BITS_PER_UNIT)))
4215 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4216 {
4217 /* Push padding now if padding above and stack grows down,
4218 or if padding below and stack grows up.
4219 But if space already allocated, this has already been done. */
4220 if (extra && args_addr == 0
4221 && where_pad != none && where_pad != stack_direction)
4222 anti_adjust_stack (GEN_INT (extra));
4223
4224 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4225 }
4226 else
4227 #endif /* PUSH_ROUNDING */
4228 {
4229 rtx target;
4230
4231 /* Otherwise make space on the stack and copy the data
4232 to the address of that space. */
4233
4234 /* Deduct words put into registers from the size we must copy. */
4235 if (partial != 0)
4236 {
4237 if (CONST_INT_P (size))
4238 size = GEN_INT (INTVAL (size) - used);
4239 else
4240 size = expand_binop (GET_MODE (size), sub_optab, size,
4241 gen_int_mode (used, GET_MODE (size)),
4242 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4243 }
4244
4245 /* Get the address of the stack space.
4246 In this case, we do not deal with EXTRA separately.
4247 A single stack adjust will do. */
4248 if (! args_addr)
4249 {
4250 temp = push_block (size, extra, where_pad == downward);
4251 extra = 0;
4252 }
4253 else if (CONST_INT_P (args_so_far))
4254 temp = memory_address (BLKmode,
4255 plus_constant (Pmode, args_addr,
4256 skip + INTVAL (args_so_far)));
4257 else
4258 temp = memory_address (BLKmode,
4259 plus_constant (Pmode,
4260 gen_rtx_PLUS (Pmode,
4261 args_addr,
4262 args_so_far),
4263 skip));
4264
4265 if (!ACCUMULATE_OUTGOING_ARGS)
4266 {
4267 /* If the source is referenced relative to the stack pointer,
4268 copy it to another register to stabilize it. We do not need
4269 to do this if we know that we won't be changing sp. */
4270
4271 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4272 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4273 temp = copy_to_reg (temp);
4274 }
4275
4276 target = gen_rtx_MEM (BLKmode, temp);
4277
4278 /* We do *not* set_mem_attributes here, because incoming arguments
4279 may overlap with sibling call outgoing arguments and we cannot
4280 allow reordering of reads from function arguments with stores
4281 to outgoing arguments of sibling calls. We do, however, want
4282 to record the alignment of the stack slot. */
4283 /* ALIGN may well be better aligned than TYPE, e.g. due to
4284 PARM_BOUNDARY. Assume the caller isn't lying. */
4285 set_mem_align (target, align);
4286
4287 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4288 }
4289 }
4290 else if (partial > 0)
4291 {
4292 /* Scalar partly in registers. */
4293
4294 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4295 int i;
4296 int not_stack;
4297 /* # bytes of start of argument
4298 that we must make space for but need not store. */
4299 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4300 int args_offset = INTVAL (args_so_far);
4301 int skip;
4302
4303 /* Push padding now if padding above and stack grows down,
4304 or if padding below and stack grows up.
4305 But if space already allocated, this has already been done. */
4306 if (extra && args_addr == 0
4307 && where_pad != none && where_pad != stack_direction)
4308 anti_adjust_stack (GEN_INT (extra));
4309
4310 /* If we make space by pushing it, we might as well push
4311 the real data. Otherwise, we can leave OFFSET nonzero
4312 and leave the space uninitialized. */
4313 if (args_addr == 0)
4314 offset = 0;
4315
4316 /* Now NOT_STACK gets the number of words that we don't need to
4317 allocate on the stack. Convert OFFSET to words too. */
4318 not_stack = (partial - offset) / UNITS_PER_WORD;
4319 offset /= UNITS_PER_WORD;
4320
4321 /* If the partial register-part of the arg counts in its stack size,
4322 skip the part of stack space corresponding to the registers.
4323 Otherwise, start copying to the beginning of the stack space,
4324 by setting SKIP to 0. */
4325 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4326
4327 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4328 x = validize_mem (force_const_mem (mode, x));
4329
4330 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4331 SUBREGs of such registers are not allowed. */
4332 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4333 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4334 x = copy_to_reg (x);
4335
4336 /* Loop over all the words allocated on the stack for this arg. */
4337 /* We can do it by words, because any scalar bigger than a word
4338 has a size a multiple of a word. */
4339 #ifndef PUSH_ARGS_REVERSED
4340 for (i = not_stack; i < size; i++)
4341 #else
4342 for (i = size - 1; i >= not_stack; i--)
4343 #endif
4344 if (i >= not_stack + offset)
4345 emit_push_insn (operand_subword_force (x, i, mode),
4346 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4347 0, args_addr,
4348 GEN_INT (args_offset + ((i - not_stack + skip)
4349 * UNITS_PER_WORD)),
4350 reg_parm_stack_space, alignment_pad);
4351 }
4352 else
4353 {
4354 rtx addr;
4355 rtx dest;
4356
4357 /* Push padding now if padding above and stack grows down,
4358 or if padding below and stack grows up.
4359 But if space already allocated, this has already been done. */
4360 if (extra && args_addr == 0
4361 && where_pad != none && where_pad != stack_direction)
4362 anti_adjust_stack (GEN_INT (extra));
4363
4364 #ifdef PUSH_ROUNDING
4365 if (args_addr == 0 && PUSH_ARGS)
4366 emit_single_push_insn (mode, x, type);
4367 else
4368 #endif
4369 {
4370 if (CONST_INT_P (args_so_far))
4371 addr
4372 = memory_address (mode,
4373 plus_constant (Pmode, args_addr,
4374 INTVAL (args_so_far)));
4375 else
4376 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4377 args_so_far));
4378 dest = gen_rtx_MEM (mode, addr);
4379
4380 /* We do *not* set_mem_attributes here, because incoming arguments
4381 may overlap with sibling call outgoing arguments and we cannot
4382 allow reordering of reads from function arguments with stores
4383 to outgoing arguments of sibling calls. We do, however, want
4384 to record the alignment of the stack slot. */
4385 /* ALIGN may well be better aligned than TYPE, e.g. due to
4386 PARM_BOUNDARY. Assume the caller isn't lying. */
4387 set_mem_align (dest, align);
4388
4389 emit_move_insn (dest, x);
4390 }
4391 }
4392
4393 /* If part should go in registers, copy that part
4394 into the appropriate registers. Do this now, at the end,
4395 since mem-to-mem copies above may do function calls. */
4396 if (partial > 0 && reg != 0)
4397 {
4398 /* Handle calls that pass values in multiple non-contiguous locations.
4399 The Irix 6 ABI has examples of this. */
4400 if (GET_CODE (reg) == PARALLEL)
4401 emit_group_load (reg, x, type, -1);
4402 else
4403 {
4404 gcc_assert (partial % UNITS_PER_WORD == 0);
4405 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4406 }
4407 }
4408
4409 if (extra && args_addr == 0 && where_pad == stack_direction)
4410 anti_adjust_stack (GEN_INT (extra));
4411
4412 if (alignment_pad && args_addr == 0)
4413 anti_adjust_stack (alignment_pad);
4414 }
4415 \f
4416 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4417 operations. */
4418
4419 static rtx
4420 get_subtarget (rtx x)
4421 {
4422 return (optimize
4423 || x == 0
4424 /* Only registers can be subtargets. */
4425 || !REG_P (x)
4426 /* Don't use hard regs to avoid extending their life. */
4427 || REGNO (x) < FIRST_PSEUDO_REGISTER
4428 ? 0 : x);
4429 }
4430
4431 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4432 FIELD is a bitfield. Returns true if the optimization was successful,
4433 and there's nothing else to do. */
4434
4435 static bool
4436 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4437 unsigned HOST_WIDE_INT bitpos,
4438 unsigned HOST_WIDE_INT bitregion_start,
4439 unsigned HOST_WIDE_INT bitregion_end,
4440 enum machine_mode mode1, rtx str_rtx,
4441 tree to, tree src)
4442 {
4443 enum machine_mode str_mode = GET_MODE (str_rtx);
4444 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4445 tree op0, op1;
4446 rtx value, result;
4447 optab binop;
4448 gimple srcstmt;
4449 enum tree_code code;
4450
4451 if (mode1 != VOIDmode
4452 || bitsize >= BITS_PER_WORD
4453 || str_bitsize > BITS_PER_WORD
4454 || TREE_SIDE_EFFECTS (to)
4455 || TREE_THIS_VOLATILE (to))
4456 return false;
4457
4458 STRIP_NOPS (src);
4459 if (TREE_CODE (src) != SSA_NAME)
4460 return false;
4461 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4462 return false;
4463
4464 srcstmt = get_gimple_for_ssa_name (src);
4465 if (!srcstmt
4466 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4467 return false;
4468
4469 code = gimple_assign_rhs_code (srcstmt);
4470
4471 op0 = gimple_assign_rhs1 (srcstmt);
4472
4473 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4474 to find its initialization. Hopefully the initialization will
4475 be from a bitfield load. */
4476 if (TREE_CODE (op0) == SSA_NAME)
4477 {
4478 gimple op0stmt = get_gimple_for_ssa_name (op0);
4479
4480 /* We want to eventually have OP0 be the same as TO, which
4481 should be a bitfield. */
4482 if (!op0stmt
4483 || !is_gimple_assign (op0stmt)
4484 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4485 return false;
4486 op0 = gimple_assign_rhs1 (op0stmt);
4487 }
4488
4489 op1 = gimple_assign_rhs2 (srcstmt);
4490
4491 if (!operand_equal_p (to, op0, 0))
4492 return false;
4493
4494 if (MEM_P (str_rtx))
4495 {
4496 unsigned HOST_WIDE_INT offset1;
4497
4498 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4499 str_mode = word_mode;
4500 str_mode = get_best_mode (bitsize, bitpos,
4501 bitregion_start, bitregion_end,
4502 MEM_ALIGN (str_rtx), str_mode, 0);
4503 if (str_mode == VOIDmode)
4504 return false;
4505 str_bitsize = GET_MODE_BITSIZE (str_mode);
4506
4507 offset1 = bitpos;
4508 bitpos %= str_bitsize;
4509 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4510 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4511 }
4512 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4513 return false;
4514
4515 /* If the bit field covers the whole REG/MEM, store_field
4516 will likely generate better code. */
4517 if (bitsize >= str_bitsize)
4518 return false;
4519
4520 /* We can't handle fields split across multiple entities. */
4521 if (bitpos + bitsize > str_bitsize)
4522 return false;
4523
4524 if (BYTES_BIG_ENDIAN)
4525 bitpos = str_bitsize - bitpos - bitsize;
4526
4527 switch (code)
4528 {
4529 case PLUS_EXPR:
4530 case MINUS_EXPR:
4531 /* For now, just optimize the case of the topmost bitfield
4532 where we don't need to do any masking and also
4533 1 bit bitfields where xor can be used.
4534 We might win by one instruction for the other bitfields
4535 too if insv/extv instructions aren't used, so that
4536 can be added later. */
4537 if (bitpos + bitsize != str_bitsize
4538 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4539 break;
4540
4541 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4542 value = convert_modes (str_mode,
4543 TYPE_MODE (TREE_TYPE (op1)), value,
4544 TYPE_UNSIGNED (TREE_TYPE (op1)));
4545
4546 /* We may be accessing data outside the field, which means
4547 we can alias adjacent data. */
4548 if (MEM_P (str_rtx))
4549 {
4550 str_rtx = shallow_copy_rtx (str_rtx);
4551 set_mem_alias_set (str_rtx, 0);
4552 set_mem_expr (str_rtx, 0);
4553 }
4554
4555 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4556 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4557 {
4558 value = expand_and (str_mode, value, const1_rtx, NULL);
4559 binop = xor_optab;
4560 }
4561 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4562 result = expand_binop (str_mode, binop, str_rtx,
4563 value, str_rtx, 1, OPTAB_WIDEN);
4564 if (result != str_rtx)
4565 emit_move_insn (str_rtx, result);
4566 return true;
4567
4568 case BIT_IOR_EXPR:
4569 case BIT_XOR_EXPR:
4570 if (TREE_CODE (op1) != INTEGER_CST)
4571 break;
4572 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4573 value = convert_modes (str_mode,
4574 TYPE_MODE (TREE_TYPE (op1)), value,
4575 TYPE_UNSIGNED (TREE_TYPE (op1)));
4576
4577 /* We may be accessing data outside the field, which means
4578 we can alias adjacent data. */
4579 if (MEM_P (str_rtx))
4580 {
4581 str_rtx = shallow_copy_rtx (str_rtx);
4582 set_mem_alias_set (str_rtx, 0);
4583 set_mem_expr (str_rtx, 0);
4584 }
4585
4586 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4587 if (bitpos + bitsize != str_bitsize)
4588 {
4589 rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4590 str_mode);
4591 value = expand_and (str_mode, value, mask, NULL_RTX);
4592 }
4593 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4594 result = expand_binop (str_mode, binop, str_rtx,
4595 value, str_rtx, 1, OPTAB_WIDEN);
4596 if (result != str_rtx)
4597 emit_move_insn (str_rtx, result);
4598 return true;
4599
4600 default:
4601 break;
4602 }
4603
4604 return false;
4605 }
4606
4607 /* In the C++ memory model, consecutive bit fields in a structure are
4608 considered one memory location.
4609
4610 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4611 returns the bit range of consecutive bits in which this COMPONENT_REF
4612 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4613 and *OFFSET may be adjusted in the process.
4614
4615 If the access does not need to be restricted, 0 is returned in both
4616 *BITSTART and *BITEND. */
4617
4618 static void
4619 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4620 unsigned HOST_WIDE_INT *bitend,
4621 tree exp,
4622 HOST_WIDE_INT *bitpos,
4623 tree *offset)
4624 {
4625 HOST_WIDE_INT bitoffset;
4626 tree field, repr;
4627
4628 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4629
4630 field = TREE_OPERAND (exp, 1);
4631 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4632 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4633 need to limit the range we can access. */
4634 if (!repr)
4635 {
4636 *bitstart = *bitend = 0;
4637 return;
4638 }
4639
4640 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4641 part of a larger bit field, then the representative does not serve any
4642 useful purpose. This can occur in Ada. */
4643 if (handled_component_p (TREE_OPERAND (exp, 0)))
4644 {
4645 enum machine_mode rmode;
4646 HOST_WIDE_INT rbitsize, rbitpos;
4647 tree roffset;
4648 int unsignedp;
4649 int volatilep = 0;
4650 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4651 &roffset, &rmode, &unsignedp, &volatilep, false);
4652 if ((rbitpos % BITS_PER_UNIT) != 0)
4653 {
4654 *bitstart = *bitend = 0;
4655 return;
4656 }
4657 }
4658
4659 /* Compute the adjustment to bitpos from the offset of the field
4660 relative to the representative. DECL_FIELD_OFFSET of field and
4661 repr are the same by construction if they are not constants,
4662 see finish_bitfield_layout. */
4663 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4664 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4665 bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4666 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4667 else
4668 bitoffset = 0;
4669 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4670 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4671
4672 /* If the adjustment is larger than bitpos, we would have a negative bit
4673 position for the lower bound and this may wreak havoc later. Adjust
4674 offset and bitpos to make the lower bound non-negative in that case. */
4675 if (bitoffset > *bitpos)
4676 {
4677 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4678 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4679
4680 *bitpos += adjust;
4681 if (*offset == NULL_TREE)
4682 *offset = size_int (-adjust / BITS_PER_UNIT);
4683 else
4684 *offset
4685 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4686 *bitstart = 0;
4687 }
4688 else
4689 *bitstart = *bitpos - bitoffset;
4690
4691 *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4692 }
4693
4694 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4695 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4696 DECL_RTL was not set yet, return NORTL. */
4697
4698 static inline bool
4699 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4700 {
4701 if (TREE_CODE (addr) != ADDR_EXPR)
4702 return false;
4703
4704 tree base = TREE_OPERAND (addr, 0);
4705
4706 if (!DECL_P (base)
4707 || TREE_ADDRESSABLE (base)
4708 || DECL_MODE (base) == BLKmode)
4709 return false;
4710
4711 if (!DECL_RTL_SET_P (base))
4712 return nortl;
4713
4714 return (!MEM_P (DECL_RTL (base)));
4715 }
4716
4717 /* Returns true if the MEM_REF REF refers to an object that does not
4718 reside in memory and has non-BLKmode. */
4719
4720 static inline bool
4721 mem_ref_refers_to_non_mem_p (tree ref)
4722 {
4723 tree base = TREE_OPERAND (ref, 0);
4724 return addr_expr_of_non_mem_decl_p_1 (base, false);
4725 }
4726
4727 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4728 is true, try generating a nontemporal store. */
4729
4730 void
4731 expand_assignment (tree to, tree from, bool nontemporal)
4732 {
4733 rtx to_rtx = 0;
4734 rtx result;
4735 enum machine_mode mode;
4736 unsigned int align;
4737 enum insn_code icode;
4738
4739 /* Don't crash if the lhs of the assignment was erroneous. */
4740 if (TREE_CODE (to) == ERROR_MARK)
4741 {
4742 expand_normal (from);
4743 return;
4744 }
4745
4746 /* Optimize away no-op moves without side-effects. */
4747 if (operand_equal_p (to, from, 0))
4748 return;
4749
4750 /* Handle misaligned stores. */
4751 mode = TYPE_MODE (TREE_TYPE (to));
4752 if ((TREE_CODE (to) == MEM_REF
4753 || TREE_CODE (to) == TARGET_MEM_REF)
4754 && mode != BLKmode
4755 && !mem_ref_refers_to_non_mem_p (to)
4756 && ((align = get_object_alignment (to))
4757 < GET_MODE_ALIGNMENT (mode))
4758 && (((icode = optab_handler (movmisalign_optab, mode))
4759 != CODE_FOR_nothing)
4760 || SLOW_UNALIGNED_ACCESS (mode, align)))
4761 {
4762 rtx reg, mem;
4763
4764 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4765 reg = force_not_mem (reg);
4766 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4767
4768 if (icode != CODE_FOR_nothing)
4769 {
4770 struct expand_operand ops[2];
4771
4772 create_fixed_operand (&ops[0], mem);
4773 create_input_operand (&ops[1], reg, mode);
4774 /* The movmisalign<mode> pattern cannot fail, else the assignment
4775 would silently be omitted. */
4776 expand_insn (icode, 2, ops);
4777 }
4778 else
4779 store_bit_field (mem, GET_MODE_BITSIZE (mode),
4780 0, 0, 0, mode, reg);
4781 return;
4782 }
4783
4784 /* Assignment of a structure component needs special treatment
4785 if the structure component's rtx is not simply a MEM.
4786 Assignment of an array element at a constant index, and assignment of
4787 an array element in an unaligned packed structure field, has the same
4788 problem. Same for (partially) storing into a non-memory object. */
4789 if (handled_component_p (to)
4790 || (TREE_CODE (to) == MEM_REF
4791 && mem_ref_refers_to_non_mem_p (to))
4792 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4793 {
4794 enum machine_mode mode1;
4795 HOST_WIDE_INT bitsize, bitpos;
4796 unsigned HOST_WIDE_INT bitregion_start = 0;
4797 unsigned HOST_WIDE_INT bitregion_end = 0;
4798 tree offset;
4799 int unsignedp;
4800 int volatilep = 0;
4801 tree tem;
4802
4803 push_temp_slots ();
4804 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4805 &unsignedp, &volatilep, true);
4806
4807 /* Make sure bitpos is not negative, it can wreak havoc later. */
4808 if (bitpos < 0)
4809 {
4810 gcc_assert (offset == NULL_TREE);
4811 offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4812 ? 3 : exact_log2 (BITS_PER_UNIT)));
4813 bitpos &= BITS_PER_UNIT - 1;
4814 }
4815
4816 if (TREE_CODE (to) == COMPONENT_REF
4817 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4818 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4819
4820 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4821
4822 /* If the bitfield is volatile, we want to access it in the
4823 field's mode, not the computed mode.
4824 If a MEM has VOIDmode (external with incomplete type),
4825 use BLKmode for it instead. */
4826 if (MEM_P (to_rtx))
4827 {
4828 if (volatilep && flag_strict_volatile_bitfields > 0)
4829 to_rtx = adjust_address (to_rtx, mode1, 0);
4830 else if (GET_MODE (to_rtx) == VOIDmode)
4831 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4832 }
4833
4834 if (offset != 0)
4835 {
4836 enum machine_mode address_mode;
4837 rtx offset_rtx;
4838
4839 if (!MEM_P (to_rtx))
4840 {
4841 /* We can get constant negative offsets into arrays with broken
4842 user code. Translate this to a trap instead of ICEing. */
4843 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4844 expand_builtin_trap ();
4845 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4846 }
4847
4848 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4849 address_mode = get_address_mode (to_rtx);
4850 if (GET_MODE (offset_rtx) != address_mode)
4851 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4852
4853 /* A constant address in TO_RTX can have VOIDmode, we must not try
4854 to call force_reg for that case. Avoid that case. */
4855 if (MEM_P (to_rtx)
4856 && GET_MODE (to_rtx) == BLKmode
4857 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4858 && bitsize > 0
4859 && (bitpos % bitsize) == 0
4860 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4861 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4862 {
4863 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4864 bitpos = 0;
4865 }
4866
4867 to_rtx = offset_address (to_rtx, offset_rtx,
4868 highest_pow2_factor_for_target (to,
4869 offset));
4870 }
4871
4872 /* No action is needed if the target is not a memory and the field
4873 lies completely outside that target. This can occur if the source
4874 code contains an out-of-bounds access to a small array. */
4875 if (!MEM_P (to_rtx)
4876 && GET_MODE (to_rtx) != BLKmode
4877 && (unsigned HOST_WIDE_INT) bitpos
4878 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4879 {
4880 expand_normal (from);
4881 result = NULL;
4882 }
4883 /* Handle expand_expr of a complex value returning a CONCAT. */
4884 else if (GET_CODE (to_rtx) == CONCAT)
4885 {
4886 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4887 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4888 && bitpos == 0
4889 && bitsize == mode_bitsize)
4890 result = store_expr (from, to_rtx, false, nontemporal);
4891 else if (bitsize == mode_bitsize / 2
4892 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4893 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4894 nontemporal);
4895 else if (bitpos + bitsize <= mode_bitsize / 2)
4896 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4897 bitregion_start, bitregion_end,
4898 mode1, from,
4899 get_alias_set (to), nontemporal);
4900 else if (bitpos >= mode_bitsize / 2)
4901 result = store_field (XEXP (to_rtx, 1), bitsize,
4902 bitpos - mode_bitsize / 2,
4903 bitregion_start, bitregion_end,
4904 mode1, from,
4905 get_alias_set (to), nontemporal);
4906 else if (bitpos == 0 && bitsize == mode_bitsize)
4907 {
4908 rtx from_rtx;
4909 result = expand_normal (from);
4910 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4911 TYPE_MODE (TREE_TYPE (from)), 0);
4912 emit_move_insn (XEXP (to_rtx, 0),
4913 read_complex_part (from_rtx, false));
4914 emit_move_insn (XEXP (to_rtx, 1),
4915 read_complex_part (from_rtx, true));
4916 }
4917 else
4918 {
4919 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4920 GET_MODE_SIZE (GET_MODE (to_rtx)));
4921 write_complex_part (temp, XEXP (to_rtx, 0), false);
4922 write_complex_part (temp, XEXP (to_rtx, 1), true);
4923 result = store_field (temp, bitsize, bitpos,
4924 bitregion_start, bitregion_end,
4925 mode1, from,
4926 get_alias_set (to), nontemporal);
4927 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4928 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4929 }
4930 }
4931 else
4932 {
4933 if (MEM_P (to_rtx))
4934 {
4935 /* If the field is at offset zero, we could have been given the
4936 DECL_RTX of the parent struct. Don't munge it. */
4937 to_rtx = shallow_copy_rtx (to_rtx);
4938 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4939 if (volatilep)
4940 MEM_VOLATILE_P (to_rtx) = 1;
4941 }
4942
4943 if (optimize_bitfield_assignment_op (bitsize, bitpos,
4944 bitregion_start, bitregion_end,
4945 mode1,
4946 to_rtx, to, from))
4947 result = NULL;
4948 else
4949 result = store_field (to_rtx, bitsize, bitpos,
4950 bitregion_start, bitregion_end,
4951 mode1, from,
4952 get_alias_set (to), nontemporal);
4953 }
4954
4955 if (result)
4956 preserve_temp_slots (result);
4957 pop_temp_slots ();
4958 return;
4959 }
4960
4961 /* If the rhs is a function call and its value is not an aggregate,
4962 call the function before we start to compute the lhs.
4963 This is needed for correct code for cases such as
4964 val = setjmp (buf) on machines where reference to val
4965 requires loading up part of an address in a separate insn.
4966
4967 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4968 since it might be a promoted variable where the zero- or sign- extension
4969 needs to be done. Handling this in the normal way is safe because no
4970 computation is done before the call. The same is true for SSA names. */
4971 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4972 && COMPLETE_TYPE_P (TREE_TYPE (from))
4973 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4974 && ! (((TREE_CODE (to) == VAR_DECL
4975 || TREE_CODE (to) == PARM_DECL
4976 || TREE_CODE (to) == RESULT_DECL)
4977 && REG_P (DECL_RTL (to)))
4978 || TREE_CODE (to) == SSA_NAME))
4979 {
4980 rtx value;
4981
4982 push_temp_slots ();
4983 value = expand_normal (from);
4984 if (to_rtx == 0)
4985 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4986
4987 /* Handle calls that return values in multiple non-contiguous locations.
4988 The Irix 6 ABI has examples of this. */
4989 if (GET_CODE (to_rtx) == PARALLEL)
4990 {
4991 if (GET_CODE (value) == PARALLEL)
4992 emit_group_move (to_rtx, value);
4993 else
4994 emit_group_load (to_rtx, value, TREE_TYPE (from),
4995 int_size_in_bytes (TREE_TYPE (from)));
4996 }
4997 else if (GET_CODE (value) == PARALLEL)
4998 emit_group_store (to_rtx, value, TREE_TYPE (from),
4999 int_size_in_bytes (TREE_TYPE (from)));
5000 else if (GET_MODE (to_rtx) == BLKmode)
5001 {
5002 /* Handle calls that return BLKmode values in registers. */
5003 if (REG_P (value))
5004 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5005 else
5006 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5007 }
5008 else
5009 {
5010 if (POINTER_TYPE_P (TREE_TYPE (to)))
5011 value = convert_memory_address_addr_space
5012 (GET_MODE (to_rtx), value,
5013 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5014
5015 emit_move_insn (to_rtx, value);
5016 }
5017 preserve_temp_slots (to_rtx);
5018 pop_temp_slots ();
5019 return;
5020 }
5021
5022 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5023 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5024
5025 /* Don't move directly into a return register. */
5026 if (TREE_CODE (to) == RESULT_DECL
5027 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5028 {
5029 rtx temp;
5030
5031 push_temp_slots ();
5032
5033 /* If the source is itself a return value, it still is in a pseudo at
5034 this point so we can move it back to the return register directly. */
5035 if (REG_P (to_rtx)
5036 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5037 && TREE_CODE (from) != CALL_EXPR)
5038 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5039 else
5040 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5041
5042 /* Handle calls that return values in multiple non-contiguous locations.
5043 The Irix 6 ABI has examples of this. */
5044 if (GET_CODE (to_rtx) == PARALLEL)
5045 {
5046 if (GET_CODE (temp) == PARALLEL)
5047 emit_group_move (to_rtx, temp);
5048 else
5049 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5050 int_size_in_bytes (TREE_TYPE (from)));
5051 }
5052 else if (temp)
5053 emit_move_insn (to_rtx, temp);
5054
5055 preserve_temp_slots (to_rtx);
5056 pop_temp_slots ();
5057 return;
5058 }
5059
5060 /* In case we are returning the contents of an object which overlaps
5061 the place the value is being stored, use a safe function when copying
5062 a value through a pointer into a structure value return block. */
5063 if (TREE_CODE (to) == RESULT_DECL
5064 && TREE_CODE (from) == INDIRECT_REF
5065 && ADDR_SPACE_GENERIC_P
5066 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5067 && refs_may_alias_p (to, from)
5068 && cfun->returns_struct
5069 && !cfun->returns_pcc_struct)
5070 {
5071 rtx from_rtx, size;
5072
5073 push_temp_slots ();
5074 size = expr_size (from);
5075 from_rtx = expand_normal (from);
5076
5077 emit_library_call (memmove_libfunc, LCT_NORMAL,
5078 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
5079 XEXP (from_rtx, 0), Pmode,
5080 convert_to_mode (TYPE_MODE (sizetype),
5081 size, TYPE_UNSIGNED (sizetype)),
5082 TYPE_MODE (sizetype));
5083
5084 preserve_temp_slots (to_rtx);
5085 pop_temp_slots ();
5086 return;
5087 }
5088
5089 /* Compute FROM and store the value in the rtx we got. */
5090
5091 push_temp_slots ();
5092 result = store_expr (from, to_rtx, 0, nontemporal);
5093 preserve_temp_slots (result);
5094 pop_temp_slots ();
5095 return;
5096 }
5097
5098 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5099 succeeded, false otherwise. */
5100
5101 bool
5102 emit_storent_insn (rtx to, rtx from)
5103 {
5104 struct expand_operand ops[2];
5105 enum machine_mode mode = GET_MODE (to);
5106 enum insn_code code = optab_handler (storent_optab, mode);
5107
5108 if (code == CODE_FOR_nothing)
5109 return false;
5110
5111 create_fixed_operand (&ops[0], to);
5112 create_input_operand (&ops[1], from, mode);
5113 return maybe_expand_insn (code, 2, ops);
5114 }
5115
5116 /* Generate code for computing expression EXP,
5117 and storing the value into TARGET.
5118
5119 If the mode is BLKmode then we may return TARGET itself.
5120 It turns out that in BLKmode it doesn't cause a problem.
5121 because C has no operators that could combine two different
5122 assignments into the same BLKmode object with different values
5123 with no sequence point. Will other languages need this to
5124 be more thorough?
5125
5126 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5127 stack, and block moves may need to be treated specially.
5128
5129 If NONTEMPORAL is true, try using a nontemporal store instruction. */
5130
5131 rtx
5132 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5133 {
5134 rtx temp;
5135 rtx alt_rtl = NULL_RTX;
5136 location_t loc = curr_insn_location ();
5137
5138 if (VOID_TYPE_P (TREE_TYPE (exp)))
5139 {
5140 /* C++ can generate ?: expressions with a throw expression in one
5141 branch and an rvalue in the other. Here, we resolve attempts to
5142 store the throw expression's nonexistent result. */
5143 gcc_assert (!call_param_p);
5144 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5145 return NULL_RTX;
5146 }
5147 if (TREE_CODE (exp) == COMPOUND_EXPR)
5148 {
5149 /* Perform first part of compound expression, then assign from second
5150 part. */
5151 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5152 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5153 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5154 nontemporal);
5155 }
5156 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5157 {
5158 /* For conditional expression, get safe form of the target. Then
5159 test the condition, doing the appropriate assignment on either
5160 side. This avoids the creation of unnecessary temporaries.
5161 For non-BLKmode, it is more efficient not to do this. */
5162
5163 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
5164
5165 do_pending_stack_adjust ();
5166 NO_DEFER_POP;
5167 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5168 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5169 nontemporal);
5170 emit_jump_insn (gen_jump (lab2));
5171 emit_barrier ();
5172 emit_label (lab1);
5173 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
5174 nontemporal);
5175 emit_label (lab2);
5176 OK_DEFER_POP;
5177
5178 return NULL_RTX;
5179 }
5180 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5181 /* If this is a scalar in a register that is stored in a wider mode
5182 than the declared mode, compute the result into its declared mode
5183 and then convert to the wider mode. Our value is the computed
5184 expression. */
5185 {
5186 rtx inner_target = 0;
5187
5188 /* We can do the conversion inside EXP, which will often result
5189 in some optimizations. Do the conversion in two steps: first
5190 change the signedness, if needed, then the extend. But don't
5191 do this if the type of EXP is a subtype of something else
5192 since then the conversion might involve more than just
5193 converting modes. */
5194 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5195 && TREE_TYPE (TREE_TYPE (exp)) == 0
5196 && GET_MODE_PRECISION (GET_MODE (target))
5197 == TYPE_PRECISION (TREE_TYPE (exp)))
5198 {
5199 if (TYPE_UNSIGNED (TREE_TYPE (exp))
5200 != SUBREG_PROMOTED_UNSIGNED_P (target))
5201 {
5202 /* Some types, e.g. Fortran's logical*4, won't have a signed
5203 version, so use the mode instead. */
5204 tree ntype
5205 = (signed_or_unsigned_type_for
5206 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
5207 if (ntype == NULL)
5208 ntype = lang_hooks.types.type_for_mode
5209 (TYPE_MODE (TREE_TYPE (exp)),
5210 SUBREG_PROMOTED_UNSIGNED_P (target));
5211
5212 exp = fold_convert_loc (loc, ntype, exp);
5213 }
5214
5215 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5216 (GET_MODE (SUBREG_REG (target)),
5217 SUBREG_PROMOTED_UNSIGNED_P (target)),
5218 exp);
5219
5220 inner_target = SUBREG_REG (target);
5221 }
5222
5223 temp = expand_expr (exp, inner_target, VOIDmode,
5224 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5225
5226 /* If TEMP is a VOIDmode constant, use convert_modes to make
5227 sure that we properly convert it. */
5228 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5229 {
5230 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5231 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
5232 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5233 GET_MODE (target), temp,
5234 SUBREG_PROMOTED_UNSIGNED_P (target));
5235 }
5236
5237 convert_move (SUBREG_REG (target), temp,
5238 SUBREG_PROMOTED_UNSIGNED_P (target));
5239
5240 return NULL_RTX;
5241 }
5242 else if ((TREE_CODE (exp) == STRING_CST
5243 || (TREE_CODE (exp) == MEM_REF
5244 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5245 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5246 == STRING_CST
5247 && integer_zerop (TREE_OPERAND (exp, 1))))
5248 && !nontemporal && !call_param_p
5249 && MEM_P (target))
5250 {
5251 /* Optimize initialization of an array with a STRING_CST. */
5252 HOST_WIDE_INT exp_len, str_copy_len;
5253 rtx dest_mem;
5254 tree str = TREE_CODE (exp) == STRING_CST
5255 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5256
5257 exp_len = int_expr_size (exp);
5258 if (exp_len <= 0)
5259 goto normal_expr;
5260
5261 if (TREE_STRING_LENGTH (str) <= 0)
5262 goto normal_expr;
5263
5264 str_copy_len = strlen (TREE_STRING_POINTER (str));
5265 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5266 goto normal_expr;
5267
5268 str_copy_len = TREE_STRING_LENGTH (str);
5269 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5270 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5271 {
5272 str_copy_len += STORE_MAX_PIECES - 1;
5273 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5274 }
5275 str_copy_len = MIN (str_copy_len, exp_len);
5276 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5277 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5278 MEM_ALIGN (target), false))
5279 goto normal_expr;
5280
5281 dest_mem = target;
5282
5283 dest_mem = store_by_pieces (dest_mem,
5284 str_copy_len, builtin_strncpy_read_str,
5285 CONST_CAST (char *,
5286 TREE_STRING_POINTER (str)),
5287 MEM_ALIGN (target), false,
5288 exp_len > str_copy_len ? 1 : 0);
5289 if (exp_len > str_copy_len)
5290 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5291 GEN_INT (exp_len - str_copy_len),
5292 BLOCK_OP_NORMAL);
5293 return NULL_RTX;
5294 }
5295 else
5296 {
5297 rtx tmp_target;
5298
5299 normal_expr:
5300 /* If we want to use a nontemporal store, force the value to
5301 register first. */
5302 tmp_target = nontemporal ? NULL_RTX : target;
5303 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5304 (call_param_p
5305 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5306 &alt_rtl);
5307 }
5308
5309 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5310 the same as that of TARGET, adjust the constant. This is needed, for
5311 example, in case it is a CONST_DOUBLE and we want only a word-sized
5312 value. */
5313 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5314 && TREE_CODE (exp) != ERROR_MARK
5315 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5316 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5317 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5318
5319 /* If value was not generated in the target, store it there.
5320 Convert the value to TARGET's type first if necessary and emit the
5321 pending incrementations that have been queued when expanding EXP.
5322 Note that we cannot emit the whole queue blindly because this will
5323 effectively disable the POST_INC optimization later.
5324
5325 If TEMP and TARGET compare equal according to rtx_equal_p, but
5326 one or both of them are volatile memory refs, we have to distinguish
5327 two cases:
5328 - expand_expr has used TARGET. In this case, we must not generate
5329 another copy. This can be detected by TARGET being equal according
5330 to == .
5331 - expand_expr has not used TARGET - that means that the source just
5332 happens to have the same RTX form. Since temp will have been created
5333 by expand_expr, it will compare unequal according to == .
5334 We must generate a copy in this case, to reach the correct number
5335 of volatile memory references. */
5336
5337 if ((! rtx_equal_p (temp, target)
5338 || (temp != target && (side_effects_p (temp)
5339 || side_effects_p (target))))
5340 && TREE_CODE (exp) != ERROR_MARK
5341 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5342 but TARGET is not valid memory reference, TEMP will differ
5343 from TARGET although it is really the same location. */
5344 && !(alt_rtl
5345 && rtx_equal_p (alt_rtl, target)
5346 && !side_effects_p (alt_rtl)
5347 && !side_effects_p (target))
5348 /* If there's nothing to copy, don't bother. Don't call
5349 expr_size unless necessary, because some front-ends (C++)
5350 expr_size-hook must not be given objects that are not
5351 supposed to be bit-copied or bit-initialized. */
5352 && expr_size (exp) != const0_rtx)
5353 {
5354 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5355 {
5356 if (GET_MODE (target) == BLKmode)
5357 {
5358 /* Handle calls that return BLKmode values in registers. */
5359 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5360 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5361 else
5362 store_bit_field (target,
5363 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5364 0, 0, 0, GET_MODE (temp), temp);
5365 }
5366 else
5367 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5368 }
5369
5370 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5371 {
5372 /* Handle copying a string constant into an array. The string
5373 constant may be shorter than the array. So copy just the string's
5374 actual length, and clear the rest. First get the size of the data
5375 type of the string, which is actually the size of the target. */
5376 rtx size = expr_size (exp);
5377
5378 if (CONST_INT_P (size)
5379 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5380 emit_block_move (target, temp, size,
5381 (call_param_p
5382 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5383 else
5384 {
5385 enum machine_mode pointer_mode
5386 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5387 enum machine_mode address_mode = get_address_mode (target);
5388
5389 /* Compute the size of the data to copy from the string. */
5390 tree copy_size
5391 = size_binop_loc (loc, MIN_EXPR,
5392 make_tree (sizetype, size),
5393 size_int (TREE_STRING_LENGTH (exp)));
5394 rtx copy_size_rtx
5395 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5396 (call_param_p
5397 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5398 rtx label = 0;
5399
5400 /* Copy that much. */
5401 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5402 TYPE_UNSIGNED (sizetype));
5403 emit_block_move (target, temp, copy_size_rtx,
5404 (call_param_p
5405 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5406
5407 /* Figure out how much is left in TARGET that we have to clear.
5408 Do all calculations in pointer_mode. */
5409 if (CONST_INT_P (copy_size_rtx))
5410 {
5411 size = plus_constant (address_mode, size,
5412 -INTVAL (copy_size_rtx));
5413 target = adjust_address (target, BLKmode,
5414 INTVAL (copy_size_rtx));
5415 }
5416 else
5417 {
5418 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5419 copy_size_rtx, NULL_RTX, 0,
5420 OPTAB_LIB_WIDEN);
5421
5422 if (GET_MODE (copy_size_rtx) != address_mode)
5423 copy_size_rtx = convert_to_mode (address_mode,
5424 copy_size_rtx,
5425 TYPE_UNSIGNED (sizetype));
5426
5427 target = offset_address (target, copy_size_rtx,
5428 highest_pow2_factor (copy_size));
5429 label = gen_label_rtx ();
5430 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5431 GET_MODE (size), 0, label);
5432 }
5433
5434 if (size != const0_rtx)
5435 clear_storage (target, size, BLOCK_OP_NORMAL);
5436
5437 if (label)
5438 emit_label (label);
5439 }
5440 }
5441 /* Handle calls that return values in multiple non-contiguous locations.
5442 The Irix 6 ABI has examples of this. */
5443 else if (GET_CODE (target) == PARALLEL)
5444 {
5445 if (GET_CODE (temp) == PARALLEL)
5446 emit_group_move (target, temp);
5447 else
5448 emit_group_load (target, temp, TREE_TYPE (exp),
5449 int_size_in_bytes (TREE_TYPE (exp)));
5450 }
5451 else if (GET_CODE (temp) == PARALLEL)
5452 emit_group_store (target, temp, TREE_TYPE (exp),
5453 int_size_in_bytes (TREE_TYPE (exp)));
5454 else if (GET_MODE (temp) == BLKmode)
5455 emit_block_move (target, temp, expr_size (exp),
5456 (call_param_p
5457 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5458 /* If we emit a nontemporal store, there is nothing else to do. */
5459 else if (nontemporal && emit_storent_insn (target, temp))
5460 ;
5461 else
5462 {
5463 temp = force_operand (temp, target);
5464 if (temp != target)
5465 emit_move_insn (target, temp);
5466 }
5467 }
5468
5469 return NULL_RTX;
5470 }
5471 \f
5472 /* Return true if field F of structure TYPE is a flexible array. */
5473
5474 static bool
5475 flexible_array_member_p (const_tree f, const_tree type)
5476 {
5477 const_tree tf;
5478
5479 tf = TREE_TYPE (f);
5480 return (DECL_CHAIN (f) == NULL
5481 && TREE_CODE (tf) == ARRAY_TYPE
5482 && TYPE_DOMAIN (tf)
5483 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5484 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5485 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5486 && int_size_in_bytes (type) >= 0);
5487 }
5488
5489 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5490 must have in order for it to completely initialize a value of type TYPE.
5491 Return -1 if the number isn't known.
5492
5493 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5494
5495 static HOST_WIDE_INT
5496 count_type_elements (const_tree type, bool for_ctor_p)
5497 {
5498 switch (TREE_CODE (type))
5499 {
5500 case ARRAY_TYPE:
5501 {
5502 tree nelts;
5503
5504 nelts = array_type_nelts (type);
5505 if (nelts && tree_fits_uhwi_p (nelts))
5506 {
5507 unsigned HOST_WIDE_INT n;
5508
5509 n = tree_to_uhwi (nelts) + 1;
5510 if (n == 0 || for_ctor_p)
5511 return n;
5512 else
5513 return n * count_type_elements (TREE_TYPE (type), false);
5514 }
5515 return for_ctor_p ? -1 : 1;
5516 }
5517
5518 case RECORD_TYPE:
5519 {
5520 unsigned HOST_WIDE_INT n;
5521 tree f;
5522
5523 n = 0;
5524 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5525 if (TREE_CODE (f) == FIELD_DECL)
5526 {
5527 if (!for_ctor_p)
5528 n += count_type_elements (TREE_TYPE (f), false);
5529 else if (!flexible_array_member_p (f, type))
5530 /* Don't count flexible arrays, which are not supposed
5531 to be initialized. */
5532 n += 1;
5533 }
5534
5535 return n;
5536 }
5537
5538 case UNION_TYPE:
5539 case QUAL_UNION_TYPE:
5540 {
5541 tree f;
5542 HOST_WIDE_INT n, m;
5543
5544 gcc_assert (!for_ctor_p);
5545 /* Estimate the number of scalars in each field and pick the
5546 maximum. Other estimates would do instead; the idea is simply
5547 to make sure that the estimate is not sensitive to the ordering
5548 of the fields. */
5549 n = 1;
5550 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5551 if (TREE_CODE (f) == FIELD_DECL)
5552 {
5553 m = count_type_elements (TREE_TYPE (f), false);
5554 /* If the field doesn't span the whole union, add an extra
5555 scalar for the rest. */
5556 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5557 TYPE_SIZE (type)) != 1)
5558 m++;
5559 if (n < m)
5560 n = m;
5561 }
5562 return n;
5563 }
5564
5565 case COMPLEX_TYPE:
5566 return 2;
5567
5568 case VECTOR_TYPE:
5569 return TYPE_VECTOR_SUBPARTS (type);
5570
5571 case INTEGER_TYPE:
5572 case REAL_TYPE:
5573 case FIXED_POINT_TYPE:
5574 case ENUMERAL_TYPE:
5575 case BOOLEAN_TYPE:
5576 case POINTER_TYPE:
5577 case OFFSET_TYPE:
5578 case REFERENCE_TYPE:
5579 case NULLPTR_TYPE:
5580 return 1;
5581
5582 case ERROR_MARK:
5583 return 0;
5584
5585 case VOID_TYPE:
5586 case METHOD_TYPE:
5587 case FUNCTION_TYPE:
5588 case LANG_TYPE:
5589 default:
5590 gcc_unreachable ();
5591 }
5592 }
5593
5594 /* Helper for categorize_ctor_elements. Identical interface. */
5595
5596 static bool
5597 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5598 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5599 {
5600 unsigned HOST_WIDE_INT idx;
5601 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5602 tree value, purpose, elt_type;
5603
5604 /* Whether CTOR is a valid constant initializer, in accordance with what
5605 initializer_constant_valid_p does. If inferred from the constructor
5606 elements, true until proven otherwise. */
5607 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5608 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5609
5610 nz_elts = 0;
5611 init_elts = 0;
5612 num_fields = 0;
5613 elt_type = NULL_TREE;
5614
5615 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5616 {
5617 HOST_WIDE_INT mult = 1;
5618
5619 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5620 {
5621 tree lo_index = TREE_OPERAND (purpose, 0);
5622 tree hi_index = TREE_OPERAND (purpose, 1);
5623
5624 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5625 mult = (tree_to_uhwi (hi_index)
5626 - tree_to_uhwi (lo_index) + 1);
5627 }
5628 num_fields += mult;
5629 elt_type = TREE_TYPE (value);
5630
5631 switch (TREE_CODE (value))
5632 {
5633 case CONSTRUCTOR:
5634 {
5635 HOST_WIDE_INT nz = 0, ic = 0;
5636
5637 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5638 p_complete);
5639
5640 nz_elts += mult * nz;
5641 init_elts += mult * ic;
5642
5643 if (const_from_elts_p && const_p)
5644 const_p = const_elt_p;
5645 }
5646 break;
5647
5648 case INTEGER_CST:
5649 case REAL_CST:
5650 case FIXED_CST:
5651 if (!initializer_zerop (value))
5652 nz_elts += mult;
5653 init_elts += mult;
5654 break;
5655
5656 case STRING_CST:
5657 nz_elts += mult * TREE_STRING_LENGTH (value);
5658 init_elts += mult * TREE_STRING_LENGTH (value);
5659 break;
5660
5661 case COMPLEX_CST:
5662 if (!initializer_zerop (TREE_REALPART (value)))
5663 nz_elts += mult;
5664 if (!initializer_zerop (TREE_IMAGPART (value)))
5665 nz_elts += mult;
5666 init_elts += mult;
5667 break;
5668
5669 case VECTOR_CST:
5670 {
5671 unsigned i;
5672 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5673 {
5674 tree v = VECTOR_CST_ELT (value, i);
5675 if (!initializer_zerop (v))
5676 nz_elts += mult;
5677 init_elts += mult;
5678 }
5679 }
5680 break;
5681
5682 default:
5683 {
5684 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5685 nz_elts += mult * tc;
5686 init_elts += mult * tc;
5687
5688 if (const_from_elts_p && const_p)
5689 const_p = initializer_constant_valid_p (value, elt_type)
5690 != NULL_TREE;
5691 }
5692 break;
5693 }
5694 }
5695
5696 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5697 num_fields, elt_type))
5698 *p_complete = false;
5699
5700 *p_nz_elts += nz_elts;
5701 *p_init_elts += init_elts;
5702
5703 return const_p;
5704 }
5705
5706 /* Examine CTOR to discover:
5707 * how many scalar fields are set to nonzero values,
5708 and place it in *P_NZ_ELTS;
5709 * how many scalar fields in total are in CTOR,
5710 and place it in *P_ELT_COUNT.
5711 * whether the constructor is complete -- in the sense that every
5712 meaningful byte is explicitly given a value --
5713 and place it in *P_COMPLETE.
5714
5715 Return whether or not CTOR is a valid static constant initializer, the same
5716 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5717
5718 bool
5719 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5720 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5721 {
5722 *p_nz_elts = 0;
5723 *p_init_elts = 0;
5724 *p_complete = true;
5725
5726 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5727 }
5728
5729 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5730 of which had type LAST_TYPE. Each element was itself a complete
5731 initializer, in the sense that every meaningful byte was explicitly
5732 given a value. Return true if the same is true for the constructor
5733 as a whole. */
5734
5735 bool
5736 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5737 const_tree last_type)
5738 {
5739 if (TREE_CODE (type) == UNION_TYPE
5740 || TREE_CODE (type) == QUAL_UNION_TYPE)
5741 {
5742 if (num_elts == 0)
5743 return false;
5744
5745 gcc_assert (num_elts == 1 && last_type);
5746
5747 /* ??? We could look at each element of the union, and find the
5748 largest element. Which would avoid comparing the size of the
5749 initialized element against any tail padding in the union.
5750 Doesn't seem worth the effort... */
5751 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5752 }
5753
5754 return count_type_elements (type, true) == num_elts;
5755 }
5756
5757 /* Return 1 if EXP contains mostly (3/4) zeros. */
5758
5759 static int
5760 mostly_zeros_p (const_tree exp)
5761 {
5762 if (TREE_CODE (exp) == CONSTRUCTOR)
5763 {
5764 HOST_WIDE_INT nz_elts, init_elts;
5765 bool complete_p;
5766
5767 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5768 return !complete_p || nz_elts < init_elts / 4;
5769 }
5770
5771 return initializer_zerop (exp);
5772 }
5773
5774 /* Return 1 if EXP contains all zeros. */
5775
5776 static int
5777 all_zeros_p (const_tree exp)
5778 {
5779 if (TREE_CODE (exp) == CONSTRUCTOR)
5780 {
5781 HOST_WIDE_INT nz_elts, init_elts;
5782 bool complete_p;
5783
5784 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5785 return nz_elts == 0;
5786 }
5787
5788 return initializer_zerop (exp);
5789 }
5790 \f
5791 /* Helper function for store_constructor.
5792 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5793 CLEARED is as for store_constructor.
5794 ALIAS_SET is the alias set to use for any stores.
5795
5796 This provides a recursive shortcut back to store_constructor when it isn't
5797 necessary to go through store_field. This is so that we can pass through
5798 the cleared field to let store_constructor know that we may not have to
5799 clear a substructure if the outer structure has already been cleared. */
5800
5801 static void
5802 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5803 HOST_WIDE_INT bitpos, enum machine_mode mode,
5804 tree exp, int cleared, alias_set_type alias_set)
5805 {
5806 if (TREE_CODE (exp) == CONSTRUCTOR
5807 /* We can only call store_constructor recursively if the size and
5808 bit position are on a byte boundary. */
5809 && bitpos % BITS_PER_UNIT == 0
5810 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5811 /* If we have a nonzero bitpos for a register target, then we just
5812 let store_field do the bitfield handling. This is unlikely to
5813 generate unnecessary clear instructions anyways. */
5814 && (bitpos == 0 || MEM_P (target)))
5815 {
5816 if (MEM_P (target))
5817 target
5818 = adjust_address (target,
5819 GET_MODE (target) == BLKmode
5820 || 0 != (bitpos
5821 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5822 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5823
5824
5825 /* Update the alias set, if required. */
5826 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5827 && MEM_ALIAS_SET (target) != 0)
5828 {
5829 target = copy_rtx (target);
5830 set_mem_alias_set (target, alias_set);
5831 }
5832
5833 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5834 }
5835 else
5836 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false);
5837 }
5838
5839
5840 /* Returns the number of FIELD_DECLs in TYPE. */
5841
5842 static int
5843 fields_length (const_tree type)
5844 {
5845 tree t = TYPE_FIELDS (type);
5846 int count = 0;
5847
5848 for (; t; t = DECL_CHAIN (t))
5849 if (TREE_CODE (t) == FIELD_DECL)
5850 ++count;
5851
5852 return count;
5853 }
5854
5855
5856 /* Store the value of constructor EXP into the rtx TARGET.
5857 TARGET is either a REG or a MEM; we know it cannot conflict, since
5858 safe_from_p has been called.
5859 CLEARED is true if TARGET is known to have been zero'd.
5860 SIZE is the number of bytes of TARGET we are allowed to modify: this
5861 may not be the same as the size of EXP if we are assigning to a field
5862 which has been packed to exclude padding bits. */
5863
5864 static void
5865 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5866 {
5867 tree type = TREE_TYPE (exp);
5868 #ifdef WORD_REGISTER_OPERATIONS
5869 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5870 #endif
5871
5872 switch (TREE_CODE (type))
5873 {
5874 case RECORD_TYPE:
5875 case UNION_TYPE:
5876 case QUAL_UNION_TYPE:
5877 {
5878 unsigned HOST_WIDE_INT idx;
5879 tree field, value;
5880
5881 /* If size is zero or the target is already cleared, do nothing. */
5882 if (size == 0 || cleared)
5883 cleared = 1;
5884 /* We either clear the aggregate or indicate the value is dead. */
5885 else if ((TREE_CODE (type) == UNION_TYPE
5886 || TREE_CODE (type) == QUAL_UNION_TYPE)
5887 && ! CONSTRUCTOR_ELTS (exp))
5888 /* If the constructor is empty, clear the union. */
5889 {
5890 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5891 cleared = 1;
5892 }
5893
5894 /* If we are building a static constructor into a register,
5895 set the initial value as zero so we can fold the value into
5896 a constant. But if more than one register is involved,
5897 this probably loses. */
5898 else if (REG_P (target) && TREE_STATIC (exp)
5899 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5900 {
5901 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5902 cleared = 1;
5903 }
5904
5905 /* If the constructor has fewer fields than the structure or
5906 if we are initializing the structure to mostly zeros, clear
5907 the whole structure first. Don't do this if TARGET is a
5908 register whose mode size isn't equal to SIZE since
5909 clear_storage can't handle this case. */
5910 else if (size > 0
5911 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
5912 != fields_length (type))
5913 || mostly_zeros_p (exp))
5914 && (!REG_P (target)
5915 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5916 == size)))
5917 {
5918 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5919 cleared = 1;
5920 }
5921
5922 if (REG_P (target) && !cleared)
5923 emit_clobber (target);
5924
5925 /* Store each element of the constructor into the
5926 corresponding field of TARGET. */
5927 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5928 {
5929 enum machine_mode mode;
5930 HOST_WIDE_INT bitsize;
5931 HOST_WIDE_INT bitpos = 0;
5932 tree offset;
5933 rtx to_rtx = target;
5934
5935 /* Just ignore missing fields. We cleared the whole
5936 structure, above, if any fields are missing. */
5937 if (field == 0)
5938 continue;
5939
5940 if (cleared && initializer_zerop (value))
5941 continue;
5942
5943 if (tree_fits_uhwi_p (DECL_SIZE (field)))
5944 bitsize = tree_to_uhwi (DECL_SIZE (field));
5945 else
5946 bitsize = -1;
5947
5948 mode = DECL_MODE (field);
5949 if (DECL_BIT_FIELD (field))
5950 mode = VOIDmode;
5951
5952 offset = DECL_FIELD_OFFSET (field);
5953 if (tree_fits_shwi_p (offset)
5954 && tree_fits_shwi_p (bit_position (field)))
5955 {
5956 bitpos = int_bit_position (field);
5957 offset = 0;
5958 }
5959 else
5960 bitpos = tree_to_shwi (DECL_FIELD_BIT_OFFSET (field));
5961
5962 if (offset)
5963 {
5964 enum machine_mode address_mode;
5965 rtx offset_rtx;
5966
5967 offset
5968 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5969 make_tree (TREE_TYPE (exp),
5970 target));
5971
5972 offset_rtx = expand_normal (offset);
5973 gcc_assert (MEM_P (to_rtx));
5974
5975 address_mode = get_address_mode (to_rtx);
5976 if (GET_MODE (offset_rtx) != address_mode)
5977 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5978
5979 to_rtx = offset_address (to_rtx, offset_rtx,
5980 highest_pow2_factor (offset));
5981 }
5982
5983 #ifdef WORD_REGISTER_OPERATIONS
5984 /* If this initializes a field that is smaller than a
5985 word, at the start of a word, try to widen it to a full
5986 word. This special case allows us to output C++ member
5987 function initializations in a form that the optimizers
5988 can understand. */
5989 if (REG_P (target)
5990 && bitsize < BITS_PER_WORD
5991 && bitpos % BITS_PER_WORD == 0
5992 && GET_MODE_CLASS (mode) == MODE_INT
5993 && TREE_CODE (value) == INTEGER_CST
5994 && exp_size >= 0
5995 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5996 {
5997 tree type = TREE_TYPE (value);
5998
5999 if (TYPE_PRECISION (type) < BITS_PER_WORD)
6000 {
6001 type = lang_hooks.types.type_for_mode
6002 (word_mode, TYPE_UNSIGNED (type));
6003 value = fold_convert (type, value);
6004 }
6005
6006 if (BYTES_BIG_ENDIAN)
6007 value
6008 = fold_build2 (LSHIFT_EXPR, type, value,
6009 build_int_cst (type,
6010 BITS_PER_WORD - bitsize));
6011 bitsize = BITS_PER_WORD;
6012 mode = word_mode;
6013 }
6014 #endif
6015
6016 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6017 && DECL_NONADDRESSABLE_P (field))
6018 {
6019 to_rtx = copy_rtx (to_rtx);
6020 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6021 }
6022
6023 store_constructor_field (to_rtx, bitsize, bitpos, mode,
6024 value, cleared,
6025 get_alias_set (TREE_TYPE (field)));
6026 }
6027 break;
6028 }
6029 case ARRAY_TYPE:
6030 {
6031 tree value, index;
6032 unsigned HOST_WIDE_INT i;
6033 int need_to_clear;
6034 tree domain;
6035 tree elttype = TREE_TYPE (type);
6036 int const_bounds_p;
6037 HOST_WIDE_INT minelt = 0;
6038 HOST_WIDE_INT maxelt = 0;
6039
6040 domain = TYPE_DOMAIN (type);
6041 const_bounds_p = (TYPE_MIN_VALUE (domain)
6042 && TYPE_MAX_VALUE (domain)
6043 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6044 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6045
6046 /* If we have constant bounds for the range of the type, get them. */
6047 if (const_bounds_p)
6048 {
6049 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6050 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6051 }
6052
6053 /* If the constructor has fewer elements than the array, clear
6054 the whole array first. Similarly if this is static
6055 constructor of a non-BLKmode object. */
6056 if (cleared)
6057 need_to_clear = 0;
6058 else if (REG_P (target) && TREE_STATIC (exp))
6059 need_to_clear = 1;
6060 else
6061 {
6062 unsigned HOST_WIDE_INT idx;
6063 tree index, value;
6064 HOST_WIDE_INT count = 0, zero_count = 0;
6065 need_to_clear = ! const_bounds_p;
6066
6067 /* This loop is a more accurate version of the loop in
6068 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6069 is also needed to check for missing elements. */
6070 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6071 {
6072 HOST_WIDE_INT this_node_count;
6073
6074 if (need_to_clear)
6075 break;
6076
6077 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6078 {
6079 tree lo_index = TREE_OPERAND (index, 0);
6080 tree hi_index = TREE_OPERAND (index, 1);
6081
6082 if (! tree_fits_uhwi_p (lo_index)
6083 || ! tree_fits_uhwi_p (hi_index))
6084 {
6085 need_to_clear = 1;
6086 break;
6087 }
6088
6089 this_node_count = (tree_to_uhwi (hi_index)
6090 - tree_to_uhwi (lo_index) + 1);
6091 }
6092 else
6093 this_node_count = 1;
6094
6095 count += this_node_count;
6096 if (mostly_zeros_p (value))
6097 zero_count += this_node_count;
6098 }
6099
6100 /* Clear the entire array first if there are any missing
6101 elements, or if the incidence of zero elements is >=
6102 75%. */
6103 if (! need_to_clear
6104 && (count < maxelt - minelt + 1
6105 || 4 * zero_count >= 3 * count))
6106 need_to_clear = 1;
6107 }
6108
6109 if (need_to_clear && size > 0)
6110 {
6111 if (REG_P (target))
6112 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6113 else
6114 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6115 cleared = 1;
6116 }
6117
6118 if (!cleared && REG_P (target))
6119 /* Inform later passes that the old value is dead. */
6120 emit_clobber (target);
6121
6122 /* Store each element of the constructor into the
6123 corresponding element of TARGET, determined by counting the
6124 elements. */
6125 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6126 {
6127 enum machine_mode mode;
6128 HOST_WIDE_INT bitsize;
6129 HOST_WIDE_INT bitpos;
6130 rtx xtarget = target;
6131
6132 if (cleared && initializer_zerop (value))
6133 continue;
6134
6135 mode = TYPE_MODE (elttype);
6136 if (mode == BLKmode)
6137 bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6138 ? tree_to_uhwi (TYPE_SIZE (elttype))
6139 : -1);
6140 else
6141 bitsize = GET_MODE_BITSIZE (mode);
6142
6143 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6144 {
6145 tree lo_index = TREE_OPERAND (index, 0);
6146 tree hi_index = TREE_OPERAND (index, 1);
6147 rtx index_r, pos_rtx;
6148 HOST_WIDE_INT lo, hi, count;
6149 tree position;
6150
6151 /* If the range is constant and "small", unroll the loop. */
6152 if (const_bounds_p
6153 && tree_fits_shwi_p (lo_index)
6154 && tree_fits_shwi_p (hi_index)
6155 && (lo = tree_to_shwi (lo_index),
6156 hi = tree_to_shwi (hi_index),
6157 count = hi - lo + 1,
6158 (!MEM_P (target)
6159 || count <= 2
6160 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6161 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6162 <= 40 * 8)))))
6163 {
6164 lo -= minelt; hi -= minelt;
6165 for (; lo <= hi; lo++)
6166 {
6167 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6168
6169 if (MEM_P (target)
6170 && !MEM_KEEP_ALIAS_SET_P (target)
6171 && TREE_CODE (type) == ARRAY_TYPE
6172 && TYPE_NONALIASED_COMPONENT (type))
6173 {
6174 target = copy_rtx (target);
6175 MEM_KEEP_ALIAS_SET_P (target) = 1;
6176 }
6177
6178 store_constructor_field
6179 (target, bitsize, bitpos, mode, value, cleared,
6180 get_alias_set (elttype));
6181 }
6182 }
6183 else
6184 {
6185 rtx loop_start = gen_label_rtx ();
6186 rtx loop_end = gen_label_rtx ();
6187 tree exit_cond;
6188
6189 expand_normal (hi_index);
6190
6191 index = build_decl (EXPR_LOCATION (exp),
6192 VAR_DECL, NULL_TREE, domain);
6193 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6194 SET_DECL_RTL (index, index_r);
6195 store_expr (lo_index, index_r, 0, false);
6196
6197 /* Build the head of the loop. */
6198 do_pending_stack_adjust ();
6199 emit_label (loop_start);
6200
6201 /* Assign value to element index. */
6202 position =
6203 fold_convert (ssizetype,
6204 fold_build2 (MINUS_EXPR,
6205 TREE_TYPE (index),
6206 index,
6207 TYPE_MIN_VALUE (domain)));
6208
6209 position =
6210 size_binop (MULT_EXPR, position,
6211 fold_convert (ssizetype,
6212 TYPE_SIZE_UNIT (elttype)));
6213
6214 pos_rtx = expand_normal (position);
6215 xtarget = offset_address (target, pos_rtx,
6216 highest_pow2_factor (position));
6217 xtarget = adjust_address (xtarget, mode, 0);
6218 if (TREE_CODE (value) == CONSTRUCTOR)
6219 store_constructor (value, xtarget, cleared,
6220 bitsize / BITS_PER_UNIT);
6221 else
6222 store_expr (value, xtarget, 0, false);
6223
6224 /* Generate a conditional jump to exit the loop. */
6225 exit_cond = build2 (LT_EXPR, integer_type_node,
6226 index, hi_index);
6227 jumpif (exit_cond, loop_end, -1);
6228
6229 /* Update the loop counter, and jump to the head of
6230 the loop. */
6231 expand_assignment (index,
6232 build2 (PLUS_EXPR, TREE_TYPE (index),
6233 index, integer_one_node),
6234 false);
6235
6236 emit_jump (loop_start);
6237
6238 /* Build the end of the loop. */
6239 emit_label (loop_end);
6240 }
6241 }
6242 else if ((index != 0 && ! tree_fits_shwi_p (index))
6243 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6244 {
6245 tree position;
6246
6247 if (index == 0)
6248 index = ssize_int (1);
6249
6250 if (minelt)
6251 index = fold_convert (ssizetype,
6252 fold_build2 (MINUS_EXPR,
6253 TREE_TYPE (index),
6254 index,
6255 TYPE_MIN_VALUE (domain)));
6256
6257 position =
6258 size_binop (MULT_EXPR, index,
6259 fold_convert (ssizetype,
6260 TYPE_SIZE_UNIT (elttype)));
6261 xtarget = offset_address (target,
6262 expand_normal (position),
6263 highest_pow2_factor (position));
6264 xtarget = adjust_address (xtarget, mode, 0);
6265 store_expr (value, xtarget, 0, false);
6266 }
6267 else
6268 {
6269 if (index != 0)
6270 bitpos = ((tree_to_shwi (index) - minelt)
6271 * tree_to_uhwi (TYPE_SIZE (elttype)));
6272 else
6273 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6274
6275 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6276 && TREE_CODE (type) == ARRAY_TYPE
6277 && TYPE_NONALIASED_COMPONENT (type))
6278 {
6279 target = copy_rtx (target);
6280 MEM_KEEP_ALIAS_SET_P (target) = 1;
6281 }
6282 store_constructor_field (target, bitsize, bitpos, mode, value,
6283 cleared, get_alias_set (elttype));
6284 }
6285 }
6286 break;
6287 }
6288
6289 case VECTOR_TYPE:
6290 {
6291 unsigned HOST_WIDE_INT idx;
6292 constructor_elt *ce;
6293 int i;
6294 int need_to_clear;
6295 int icode = CODE_FOR_nothing;
6296 tree elttype = TREE_TYPE (type);
6297 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6298 enum machine_mode eltmode = TYPE_MODE (elttype);
6299 HOST_WIDE_INT bitsize;
6300 HOST_WIDE_INT bitpos;
6301 rtvec vector = NULL;
6302 unsigned n_elts;
6303 alias_set_type alias;
6304
6305 gcc_assert (eltmode != BLKmode);
6306
6307 n_elts = TYPE_VECTOR_SUBPARTS (type);
6308 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6309 {
6310 enum machine_mode mode = GET_MODE (target);
6311
6312 icode = (int) optab_handler (vec_init_optab, mode);
6313 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6314 if (icode != CODE_FOR_nothing)
6315 {
6316 tree value;
6317
6318 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6319 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
6320 {
6321 icode = CODE_FOR_nothing;
6322 break;
6323 }
6324 }
6325 if (icode != CODE_FOR_nothing)
6326 {
6327 unsigned int i;
6328
6329 vector = rtvec_alloc (n_elts);
6330 for (i = 0; i < n_elts; i++)
6331 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6332 }
6333 }
6334
6335 /* If the constructor has fewer elements than the vector,
6336 clear the whole array first. Similarly if this is static
6337 constructor of a non-BLKmode object. */
6338 if (cleared)
6339 need_to_clear = 0;
6340 else if (REG_P (target) && TREE_STATIC (exp))
6341 need_to_clear = 1;
6342 else
6343 {
6344 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6345 tree value;
6346
6347 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6348 {
6349 int n_elts_here = tree_to_uhwi
6350 (int_const_binop (TRUNC_DIV_EXPR,
6351 TYPE_SIZE (TREE_TYPE (value)),
6352 TYPE_SIZE (elttype)));
6353
6354 count += n_elts_here;
6355 if (mostly_zeros_p (value))
6356 zero_count += n_elts_here;
6357 }
6358
6359 /* Clear the entire vector first if there are any missing elements,
6360 or if the incidence of zero elements is >= 75%. */
6361 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6362 }
6363
6364 if (need_to_clear && size > 0 && !vector)
6365 {
6366 if (REG_P (target))
6367 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6368 else
6369 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6370 cleared = 1;
6371 }
6372
6373 /* Inform later passes that the old value is dead. */
6374 if (!cleared && !vector && REG_P (target))
6375 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6376
6377 if (MEM_P (target))
6378 alias = MEM_ALIAS_SET (target);
6379 else
6380 alias = get_alias_set (elttype);
6381
6382 /* Store each element of the constructor into the corresponding
6383 element of TARGET, determined by counting the elements. */
6384 for (idx = 0, i = 0;
6385 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6386 idx++, i += bitsize / elt_size)
6387 {
6388 HOST_WIDE_INT eltpos;
6389 tree value = ce->value;
6390
6391 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6392 if (cleared && initializer_zerop (value))
6393 continue;
6394
6395 if (ce->index)
6396 eltpos = tree_to_uhwi (ce->index);
6397 else
6398 eltpos = i;
6399
6400 if (vector)
6401 {
6402 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6403 elements. */
6404 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6405 RTVEC_ELT (vector, eltpos)
6406 = expand_normal (value);
6407 }
6408 else
6409 {
6410 enum machine_mode value_mode =
6411 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6412 ? TYPE_MODE (TREE_TYPE (value))
6413 : eltmode;
6414 bitpos = eltpos * elt_size;
6415 store_constructor_field (target, bitsize, bitpos, value_mode,
6416 value, cleared, alias);
6417 }
6418 }
6419
6420 if (vector)
6421 emit_insn (GEN_FCN (icode)
6422 (target,
6423 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6424 break;
6425 }
6426
6427 default:
6428 gcc_unreachable ();
6429 }
6430 }
6431
6432 /* Store the value of EXP (an expression tree)
6433 into a subfield of TARGET which has mode MODE and occupies
6434 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6435 If MODE is VOIDmode, it means that we are storing into a bit-field.
6436
6437 BITREGION_START is bitpos of the first bitfield in this region.
6438 BITREGION_END is the bitpos of the ending bitfield in this region.
6439 These two fields are 0, if the C++ memory model does not apply,
6440 or we are not interested in keeping track of bitfield regions.
6441
6442 Always return const0_rtx unless we have something particular to
6443 return.
6444
6445 ALIAS_SET is the alias set for the destination. This value will
6446 (in general) be different from that for TARGET, since TARGET is a
6447 reference to the containing structure.
6448
6449 If NONTEMPORAL is true, try generating a nontemporal store. */
6450
6451 static rtx
6452 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6453 unsigned HOST_WIDE_INT bitregion_start,
6454 unsigned HOST_WIDE_INT bitregion_end,
6455 enum machine_mode mode, tree exp,
6456 alias_set_type alias_set, bool nontemporal)
6457 {
6458 if (TREE_CODE (exp) == ERROR_MARK)
6459 return const0_rtx;
6460
6461 /* If we have nothing to store, do nothing unless the expression has
6462 side-effects. */
6463 if (bitsize == 0)
6464 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6465
6466 if (GET_CODE (target) == CONCAT)
6467 {
6468 /* We're storing into a struct containing a single __complex. */
6469
6470 gcc_assert (!bitpos);
6471 return store_expr (exp, target, 0, nontemporal);
6472 }
6473
6474 /* If the structure is in a register or if the component
6475 is a bit field, we cannot use addressing to access it.
6476 Use bit-field techniques or SUBREG to store in it. */
6477
6478 if (mode == VOIDmode
6479 || (mode != BLKmode && ! direct_store[(int) mode]
6480 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6481 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6482 || REG_P (target)
6483 || GET_CODE (target) == SUBREG
6484 /* If the field isn't aligned enough to store as an ordinary memref,
6485 store it as a bit field. */
6486 || (mode != BLKmode
6487 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6488 || bitpos % GET_MODE_ALIGNMENT (mode))
6489 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6490 || (bitpos % BITS_PER_UNIT != 0)))
6491 || (bitsize >= 0 && mode != BLKmode
6492 && GET_MODE_BITSIZE (mode) > bitsize)
6493 /* If the RHS and field are a constant size and the size of the
6494 RHS isn't the same size as the bitfield, we must use bitfield
6495 operations. */
6496 || (bitsize >= 0
6497 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6498 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6499 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6500 decl we must use bitfield operations. */
6501 || (bitsize >= 0
6502 && TREE_CODE (exp) == MEM_REF
6503 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6504 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6505 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6506 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6507 {
6508 rtx temp;
6509 gimple nop_def;
6510
6511 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6512 implies a mask operation. If the precision is the same size as
6513 the field we're storing into, that mask is redundant. This is
6514 particularly common with bit field assignments generated by the
6515 C front end. */
6516 nop_def = get_def_for_expr (exp, NOP_EXPR);
6517 if (nop_def)
6518 {
6519 tree type = TREE_TYPE (exp);
6520 if (INTEGRAL_TYPE_P (type)
6521 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6522 && bitsize == TYPE_PRECISION (type))
6523 {
6524 tree op = gimple_assign_rhs1 (nop_def);
6525 type = TREE_TYPE (op);
6526 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6527 exp = op;
6528 }
6529 }
6530
6531 temp = expand_normal (exp);
6532
6533 /* If BITSIZE is narrower than the size of the type of EXP
6534 we will be narrowing TEMP. Normally, what's wanted are the
6535 low-order bits. However, if EXP's type is a record and this is
6536 big-endian machine, we want the upper BITSIZE bits. */
6537 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6538 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6539 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6540 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6541 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6542 NULL_RTX, 1);
6543
6544 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6545 if (mode != VOIDmode && mode != BLKmode
6546 && mode != TYPE_MODE (TREE_TYPE (exp)))
6547 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6548
6549 /* If the modes of TEMP and TARGET are both BLKmode, both
6550 must be in memory and BITPOS must be aligned on a byte
6551 boundary. If so, we simply do a block copy. Likewise
6552 for a BLKmode-like TARGET. */
6553 if (GET_MODE (temp) == BLKmode
6554 && (GET_MODE (target) == BLKmode
6555 || (MEM_P (target)
6556 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6557 && (bitpos % BITS_PER_UNIT) == 0
6558 && (bitsize % BITS_PER_UNIT) == 0)))
6559 {
6560 gcc_assert (MEM_P (target) && MEM_P (temp)
6561 && (bitpos % BITS_PER_UNIT) == 0);
6562
6563 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6564 emit_block_move (target, temp,
6565 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6566 / BITS_PER_UNIT),
6567 BLOCK_OP_NORMAL);
6568
6569 return const0_rtx;
6570 }
6571
6572 /* Handle calls that return values in multiple non-contiguous locations.
6573 The Irix 6 ABI has examples of this. */
6574 if (GET_CODE (temp) == PARALLEL)
6575 {
6576 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6577 rtx temp_target;
6578 if (mode == BLKmode)
6579 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6580 temp_target = gen_reg_rtx (mode);
6581 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6582 temp = temp_target;
6583 }
6584 else if (mode == BLKmode)
6585 {
6586 /* Handle calls that return BLKmode values in registers. */
6587 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6588 {
6589 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6590 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6591 temp = temp_target;
6592 }
6593 else
6594 {
6595 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6596 rtx temp_target;
6597 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6598 temp_target = gen_reg_rtx (mode);
6599 temp_target
6600 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6601 temp_target, mode, mode);
6602 temp = temp_target;
6603 }
6604 }
6605
6606 /* Store the value in the bitfield. */
6607 store_bit_field (target, bitsize, bitpos,
6608 bitregion_start, bitregion_end,
6609 mode, temp);
6610
6611 return const0_rtx;
6612 }
6613 else
6614 {
6615 /* Now build a reference to just the desired component. */
6616 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6617
6618 if (to_rtx == target)
6619 to_rtx = copy_rtx (to_rtx);
6620
6621 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6622 set_mem_alias_set (to_rtx, alias_set);
6623
6624 return store_expr (exp, to_rtx, 0, nontemporal);
6625 }
6626 }
6627 \f
6628 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6629 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6630 codes and find the ultimate containing object, which we return.
6631
6632 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6633 bit position, and *PUNSIGNEDP to the signedness of the field.
6634 If the position of the field is variable, we store a tree
6635 giving the variable offset (in units) in *POFFSET.
6636 This offset is in addition to the bit position.
6637 If the position is not variable, we store 0 in *POFFSET.
6638
6639 If any of the extraction expressions is volatile,
6640 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6641
6642 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6643 Otherwise, it is a mode that can be used to access the field.
6644
6645 If the field describes a variable-sized object, *PMODE is set to
6646 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6647 this case, but the address of the object can be found.
6648
6649 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6650 look through nodes that serve as markers of a greater alignment than
6651 the one that can be deduced from the expression. These nodes make it
6652 possible for front-ends to prevent temporaries from being created by
6653 the middle-end on alignment considerations. For that purpose, the
6654 normal operating mode at high-level is to always pass FALSE so that
6655 the ultimate containing object is really returned; moreover, the
6656 associated predicate handled_component_p will always return TRUE
6657 on these nodes, thus indicating that they are essentially handled
6658 by get_inner_reference. TRUE should only be passed when the caller
6659 is scanning the expression in order to build another representation
6660 and specifically knows how to handle these nodes; as such, this is
6661 the normal operating mode in the RTL expanders. */
6662
6663 tree
6664 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6665 HOST_WIDE_INT *pbitpos, tree *poffset,
6666 enum machine_mode *pmode, int *punsignedp,
6667 int *pvolatilep, bool keep_aligning)
6668 {
6669 tree size_tree = 0;
6670 enum machine_mode mode = VOIDmode;
6671 bool blkmode_bitfield = false;
6672 tree offset = size_zero_node;
6673 double_int bit_offset = double_int_zero;
6674
6675 /* First get the mode, signedness, and size. We do this from just the
6676 outermost expression. */
6677 *pbitsize = -1;
6678 if (TREE_CODE (exp) == COMPONENT_REF)
6679 {
6680 tree field = TREE_OPERAND (exp, 1);
6681 size_tree = DECL_SIZE (field);
6682 if (flag_strict_volatile_bitfields > 0
6683 && TREE_THIS_VOLATILE (exp)
6684 && DECL_BIT_FIELD_TYPE (field)
6685 && DECL_MODE (field) != BLKmode)
6686 /* Volatile bitfields should be accessed in the mode of the
6687 field's type, not the mode computed based on the bit
6688 size. */
6689 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6690 else if (!DECL_BIT_FIELD (field))
6691 mode = DECL_MODE (field);
6692 else if (DECL_MODE (field) == BLKmode)
6693 blkmode_bitfield = true;
6694
6695 *punsignedp = DECL_UNSIGNED (field);
6696 }
6697 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6698 {
6699 size_tree = TREE_OPERAND (exp, 1);
6700 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6701 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6702
6703 /* For vector types, with the correct size of access, use the mode of
6704 inner type. */
6705 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6706 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6707 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6708 mode = TYPE_MODE (TREE_TYPE (exp));
6709 }
6710 else
6711 {
6712 mode = TYPE_MODE (TREE_TYPE (exp));
6713 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6714
6715 if (mode == BLKmode)
6716 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6717 else
6718 *pbitsize = GET_MODE_BITSIZE (mode);
6719 }
6720
6721 if (size_tree != 0)
6722 {
6723 if (! tree_fits_uhwi_p (size_tree))
6724 mode = BLKmode, *pbitsize = -1;
6725 else
6726 *pbitsize = tree_to_uhwi (size_tree);
6727 }
6728
6729 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6730 and find the ultimate containing object. */
6731 while (1)
6732 {
6733 switch (TREE_CODE (exp))
6734 {
6735 case BIT_FIELD_REF:
6736 bit_offset += tree_to_double_int (TREE_OPERAND (exp, 2));
6737 break;
6738
6739 case COMPONENT_REF:
6740 {
6741 tree field = TREE_OPERAND (exp, 1);
6742 tree this_offset = component_ref_field_offset (exp);
6743
6744 /* If this field hasn't been filled in yet, don't go past it.
6745 This should only happen when folding expressions made during
6746 type construction. */
6747 if (this_offset == 0)
6748 break;
6749
6750 offset = size_binop (PLUS_EXPR, offset, this_offset);
6751 bit_offset += tree_to_double_int (DECL_FIELD_BIT_OFFSET (field));
6752
6753 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6754 }
6755 break;
6756
6757 case ARRAY_REF:
6758 case ARRAY_RANGE_REF:
6759 {
6760 tree index = TREE_OPERAND (exp, 1);
6761 tree low_bound = array_ref_low_bound (exp);
6762 tree unit_size = array_ref_element_size (exp);
6763
6764 /* We assume all arrays have sizes that are a multiple of a byte.
6765 First subtract the lower bound, if any, in the type of the
6766 index, then convert to sizetype and multiply by the size of
6767 the array element. */
6768 if (! integer_zerop (low_bound))
6769 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6770 index, low_bound);
6771
6772 offset = size_binop (PLUS_EXPR, offset,
6773 size_binop (MULT_EXPR,
6774 fold_convert (sizetype, index),
6775 unit_size));
6776 }
6777 break;
6778
6779 case REALPART_EXPR:
6780 break;
6781
6782 case IMAGPART_EXPR:
6783 bit_offset += double_int::from_uhwi (*pbitsize);
6784 break;
6785
6786 case VIEW_CONVERT_EXPR:
6787 if (keep_aligning && STRICT_ALIGNMENT
6788 && (TYPE_ALIGN (TREE_TYPE (exp))
6789 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6790 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6791 < BIGGEST_ALIGNMENT)
6792 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6793 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6794 goto done;
6795 break;
6796
6797 case MEM_REF:
6798 /* Hand back the decl for MEM[&decl, off]. */
6799 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6800 {
6801 tree off = TREE_OPERAND (exp, 1);
6802 if (!integer_zerop (off))
6803 {
6804 double_int boff, coff = mem_ref_offset (exp);
6805 boff = coff.lshift (BITS_PER_UNIT == 8
6806 ? 3 : exact_log2 (BITS_PER_UNIT));
6807 bit_offset += boff;
6808 }
6809 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6810 }
6811 goto done;
6812
6813 default:
6814 goto done;
6815 }
6816
6817 /* If any reference in the chain is volatile, the effect is volatile. */
6818 if (TREE_THIS_VOLATILE (exp))
6819 *pvolatilep = 1;
6820
6821 exp = TREE_OPERAND (exp, 0);
6822 }
6823 done:
6824
6825 /* If OFFSET is constant, see if we can return the whole thing as a
6826 constant bit position. Make sure to handle overflow during
6827 this conversion. */
6828 if (TREE_CODE (offset) == INTEGER_CST)
6829 {
6830 double_int tem = tree_to_double_int (offset);
6831 tem = tem.sext (TYPE_PRECISION (sizetype));
6832 tem = tem.lshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT));
6833 tem += bit_offset;
6834 if (tem.fits_shwi ())
6835 {
6836 *pbitpos = tem.to_shwi ();
6837 *poffset = offset = NULL_TREE;
6838 }
6839 }
6840
6841 /* Otherwise, split it up. */
6842 if (offset)
6843 {
6844 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6845 if (bit_offset.is_negative ())
6846 {
6847 double_int mask
6848 = double_int::mask (BITS_PER_UNIT == 8
6849 ? 3 : exact_log2 (BITS_PER_UNIT));
6850 double_int tem = bit_offset.and_not (mask);
6851 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6852 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6853 bit_offset -= tem;
6854 tem = tem.arshift (BITS_PER_UNIT == 8
6855 ? 3 : exact_log2 (BITS_PER_UNIT),
6856 HOST_BITS_PER_DOUBLE_INT);
6857 offset = size_binop (PLUS_EXPR, offset,
6858 double_int_to_tree (sizetype, tem));
6859 }
6860
6861 *pbitpos = bit_offset.to_shwi ();
6862 *poffset = offset;
6863 }
6864
6865 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6866 if (mode == VOIDmode
6867 && blkmode_bitfield
6868 && (*pbitpos % BITS_PER_UNIT) == 0
6869 && (*pbitsize % BITS_PER_UNIT) == 0)
6870 *pmode = BLKmode;
6871 else
6872 *pmode = mode;
6873
6874 return exp;
6875 }
6876
6877 /* Return a tree of sizetype representing the size, in bytes, of the element
6878 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6879
6880 tree
6881 array_ref_element_size (tree exp)
6882 {
6883 tree aligned_size = TREE_OPERAND (exp, 3);
6884 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6885 location_t loc = EXPR_LOCATION (exp);
6886
6887 /* If a size was specified in the ARRAY_REF, it's the size measured
6888 in alignment units of the element type. So multiply by that value. */
6889 if (aligned_size)
6890 {
6891 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6892 sizetype from another type of the same width and signedness. */
6893 if (TREE_TYPE (aligned_size) != sizetype)
6894 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6895 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6896 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6897 }
6898
6899 /* Otherwise, take the size from that of the element type. Substitute
6900 any PLACEHOLDER_EXPR that we have. */
6901 else
6902 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6903 }
6904
6905 /* Return a tree representing the lower bound of the array mentioned in
6906 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6907
6908 tree
6909 array_ref_low_bound (tree exp)
6910 {
6911 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6912
6913 /* If a lower bound is specified in EXP, use it. */
6914 if (TREE_OPERAND (exp, 2))
6915 return TREE_OPERAND (exp, 2);
6916
6917 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6918 substituting for a PLACEHOLDER_EXPR as needed. */
6919 if (domain_type && TYPE_MIN_VALUE (domain_type))
6920 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6921
6922 /* Otherwise, return a zero of the appropriate type. */
6923 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6924 }
6925
6926 /* Returns true if REF is an array reference to an array at the end of
6927 a structure. If this is the case, the array may be allocated larger
6928 than its upper bound implies. */
6929
6930 bool
6931 array_at_struct_end_p (tree ref)
6932 {
6933 if (TREE_CODE (ref) != ARRAY_REF
6934 && TREE_CODE (ref) != ARRAY_RANGE_REF)
6935 return false;
6936
6937 while (handled_component_p (ref))
6938 {
6939 /* If the reference chain contains a component reference to a
6940 non-union type and there follows another field the reference
6941 is not at the end of a structure. */
6942 if (TREE_CODE (ref) == COMPONENT_REF
6943 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
6944 {
6945 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
6946 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
6947 nextf = DECL_CHAIN (nextf);
6948 if (nextf)
6949 return false;
6950 }
6951
6952 ref = TREE_OPERAND (ref, 0);
6953 }
6954
6955 /* If the reference is based on a declared entity, the size of the array
6956 is constrained by its given domain. */
6957 if (DECL_P (ref))
6958 return false;
6959
6960 return true;
6961 }
6962
6963 /* Return a tree representing the upper bound of the array mentioned in
6964 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6965
6966 tree
6967 array_ref_up_bound (tree exp)
6968 {
6969 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6970
6971 /* If there is a domain type and it has an upper bound, use it, substituting
6972 for a PLACEHOLDER_EXPR as needed. */
6973 if (domain_type && TYPE_MAX_VALUE (domain_type))
6974 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6975
6976 /* Otherwise fail. */
6977 return NULL_TREE;
6978 }
6979
6980 /* Return a tree representing the offset, in bytes, of the field referenced
6981 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6982
6983 tree
6984 component_ref_field_offset (tree exp)
6985 {
6986 tree aligned_offset = TREE_OPERAND (exp, 2);
6987 tree field = TREE_OPERAND (exp, 1);
6988 location_t loc = EXPR_LOCATION (exp);
6989
6990 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6991 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6992 value. */
6993 if (aligned_offset)
6994 {
6995 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6996 sizetype from another type of the same width and signedness. */
6997 if (TREE_TYPE (aligned_offset) != sizetype)
6998 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6999 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
7000 size_int (DECL_OFFSET_ALIGN (field)
7001 / BITS_PER_UNIT));
7002 }
7003
7004 /* Otherwise, take the offset from that of the field. Substitute
7005 any PLACEHOLDER_EXPR that we have. */
7006 else
7007 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
7008 }
7009
7010 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7011
7012 static unsigned HOST_WIDE_INT
7013 target_align (const_tree target)
7014 {
7015 /* We might have a chain of nested references with intermediate misaligning
7016 bitfields components, so need to recurse to find out. */
7017
7018 unsigned HOST_WIDE_INT this_align, outer_align;
7019
7020 switch (TREE_CODE (target))
7021 {
7022 case BIT_FIELD_REF:
7023 return 1;
7024
7025 case COMPONENT_REF:
7026 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7027 outer_align = target_align (TREE_OPERAND (target, 0));
7028 return MIN (this_align, outer_align);
7029
7030 case ARRAY_REF:
7031 case ARRAY_RANGE_REF:
7032 this_align = TYPE_ALIGN (TREE_TYPE (target));
7033 outer_align = target_align (TREE_OPERAND (target, 0));
7034 return MIN (this_align, outer_align);
7035
7036 CASE_CONVERT:
7037 case NON_LVALUE_EXPR:
7038 case VIEW_CONVERT_EXPR:
7039 this_align = TYPE_ALIGN (TREE_TYPE (target));
7040 outer_align = target_align (TREE_OPERAND (target, 0));
7041 return MAX (this_align, outer_align);
7042
7043 default:
7044 return TYPE_ALIGN (TREE_TYPE (target));
7045 }
7046 }
7047
7048 \f
7049 /* Given an rtx VALUE that may contain additions and multiplications, return
7050 an equivalent value that just refers to a register, memory, or constant.
7051 This is done by generating instructions to perform the arithmetic and
7052 returning a pseudo-register containing the value.
7053
7054 The returned value may be a REG, SUBREG, MEM or constant. */
7055
7056 rtx
7057 force_operand (rtx value, rtx target)
7058 {
7059 rtx op1, op2;
7060 /* Use subtarget as the target for operand 0 of a binary operation. */
7061 rtx subtarget = get_subtarget (target);
7062 enum rtx_code code = GET_CODE (value);
7063
7064 /* Check for subreg applied to an expression produced by loop optimizer. */
7065 if (code == SUBREG
7066 && !REG_P (SUBREG_REG (value))
7067 && !MEM_P (SUBREG_REG (value)))
7068 {
7069 value
7070 = simplify_gen_subreg (GET_MODE (value),
7071 force_reg (GET_MODE (SUBREG_REG (value)),
7072 force_operand (SUBREG_REG (value),
7073 NULL_RTX)),
7074 GET_MODE (SUBREG_REG (value)),
7075 SUBREG_BYTE (value));
7076 code = GET_CODE (value);
7077 }
7078
7079 /* Check for a PIC address load. */
7080 if ((code == PLUS || code == MINUS)
7081 && XEXP (value, 0) == pic_offset_table_rtx
7082 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7083 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7084 || GET_CODE (XEXP (value, 1)) == CONST))
7085 {
7086 if (!subtarget)
7087 subtarget = gen_reg_rtx (GET_MODE (value));
7088 emit_move_insn (subtarget, value);
7089 return subtarget;
7090 }
7091
7092 if (ARITHMETIC_P (value))
7093 {
7094 op2 = XEXP (value, 1);
7095 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7096 subtarget = 0;
7097 if (code == MINUS && CONST_INT_P (op2))
7098 {
7099 code = PLUS;
7100 op2 = negate_rtx (GET_MODE (value), op2);
7101 }
7102
7103 /* Check for an addition with OP2 a constant integer and our first
7104 operand a PLUS of a virtual register and something else. In that
7105 case, we want to emit the sum of the virtual register and the
7106 constant first and then add the other value. This allows virtual
7107 register instantiation to simply modify the constant rather than
7108 creating another one around this addition. */
7109 if (code == PLUS && CONST_INT_P (op2)
7110 && GET_CODE (XEXP (value, 0)) == PLUS
7111 && REG_P (XEXP (XEXP (value, 0), 0))
7112 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7113 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7114 {
7115 rtx temp = expand_simple_binop (GET_MODE (value), code,
7116 XEXP (XEXP (value, 0), 0), op2,
7117 subtarget, 0, OPTAB_LIB_WIDEN);
7118 return expand_simple_binop (GET_MODE (value), code, temp,
7119 force_operand (XEXP (XEXP (value,
7120 0), 1), 0),
7121 target, 0, OPTAB_LIB_WIDEN);
7122 }
7123
7124 op1 = force_operand (XEXP (value, 0), subtarget);
7125 op2 = force_operand (op2, NULL_RTX);
7126 switch (code)
7127 {
7128 case MULT:
7129 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7130 case DIV:
7131 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7132 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7133 target, 1, OPTAB_LIB_WIDEN);
7134 else
7135 return expand_divmod (0,
7136 FLOAT_MODE_P (GET_MODE (value))
7137 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7138 GET_MODE (value), op1, op2, target, 0);
7139 case MOD:
7140 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7141 target, 0);
7142 case UDIV:
7143 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7144 target, 1);
7145 case UMOD:
7146 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7147 target, 1);
7148 case ASHIFTRT:
7149 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7150 target, 0, OPTAB_LIB_WIDEN);
7151 default:
7152 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7153 target, 1, OPTAB_LIB_WIDEN);
7154 }
7155 }
7156 if (UNARY_P (value))
7157 {
7158 if (!target)
7159 target = gen_reg_rtx (GET_MODE (value));
7160 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7161 switch (code)
7162 {
7163 case ZERO_EXTEND:
7164 case SIGN_EXTEND:
7165 case TRUNCATE:
7166 case FLOAT_EXTEND:
7167 case FLOAT_TRUNCATE:
7168 convert_move (target, op1, code == ZERO_EXTEND);
7169 return target;
7170
7171 case FIX:
7172 case UNSIGNED_FIX:
7173 expand_fix (target, op1, code == UNSIGNED_FIX);
7174 return target;
7175
7176 case FLOAT:
7177 case UNSIGNED_FLOAT:
7178 expand_float (target, op1, code == UNSIGNED_FLOAT);
7179 return target;
7180
7181 default:
7182 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7183 }
7184 }
7185
7186 #ifdef INSN_SCHEDULING
7187 /* On machines that have insn scheduling, we want all memory reference to be
7188 explicit, so we need to deal with such paradoxical SUBREGs. */
7189 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7190 value
7191 = simplify_gen_subreg (GET_MODE (value),
7192 force_reg (GET_MODE (SUBREG_REG (value)),
7193 force_operand (SUBREG_REG (value),
7194 NULL_RTX)),
7195 GET_MODE (SUBREG_REG (value)),
7196 SUBREG_BYTE (value));
7197 #endif
7198
7199 return value;
7200 }
7201 \f
7202 /* Subroutine of expand_expr: return nonzero iff there is no way that
7203 EXP can reference X, which is being modified. TOP_P is nonzero if this
7204 call is going to be used to determine whether we need a temporary
7205 for EXP, as opposed to a recursive call to this function.
7206
7207 It is always safe for this routine to return zero since it merely
7208 searches for optimization opportunities. */
7209
7210 int
7211 safe_from_p (const_rtx x, tree exp, int top_p)
7212 {
7213 rtx exp_rtl = 0;
7214 int i, nops;
7215
7216 if (x == 0
7217 /* If EXP has varying size, we MUST use a target since we currently
7218 have no way of allocating temporaries of variable size
7219 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7220 So we assume here that something at a higher level has prevented a
7221 clash. This is somewhat bogus, but the best we can do. Only
7222 do this when X is BLKmode and when we are at the top level. */
7223 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7224 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7225 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7226 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7227 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7228 != INTEGER_CST)
7229 && GET_MODE (x) == BLKmode)
7230 /* If X is in the outgoing argument area, it is always safe. */
7231 || (MEM_P (x)
7232 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7233 || (GET_CODE (XEXP (x, 0)) == PLUS
7234 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7235 return 1;
7236
7237 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7238 find the underlying pseudo. */
7239 if (GET_CODE (x) == SUBREG)
7240 {
7241 x = SUBREG_REG (x);
7242 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7243 return 0;
7244 }
7245
7246 /* Now look at our tree code and possibly recurse. */
7247 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7248 {
7249 case tcc_declaration:
7250 exp_rtl = DECL_RTL_IF_SET (exp);
7251 break;
7252
7253 case tcc_constant:
7254 return 1;
7255
7256 case tcc_exceptional:
7257 if (TREE_CODE (exp) == TREE_LIST)
7258 {
7259 while (1)
7260 {
7261 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7262 return 0;
7263 exp = TREE_CHAIN (exp);
7264 if (!exp)
7265 return 1;
7266 if (TREE_CODE (exp) != TREE_LIST)
7267 return safe_from_p (x, exp, 0);
7268 }
7269 }
7270 else if (TREE_CODE (exp) == CONSTRUCTOR)
7271 {
7272 constructor_elt *ce;
7273 unsigned HOST_WIDE_INT idx;
7274
7275 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7276 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7277 || !safe_from_p (x, ce->value, 0))
7278 return 0;
7279 return 1;
7280 }
7281 else if (TREE_CODE (exp) == ERROR_MARK)
7282 return 1; /* An already-visited SAVE_EXPR? */
7283 else
7284 return 0;
7285
7286 case tcc_statement:
7287 /* The only case we look at here is the DECL_INITIAL inside a
7288 DECL_EXPR. */
7289 return (TREE_CODE (exp) != DECL_EXPR
7290 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7291 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7292 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7293
7294 case tcc_binary:
7295 case tcc_comparison:
7296 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7297 return 0;
7298 /* Fall through. */
7299
7300 case tcc_unary:
7301 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7302
7303 case tcc_expression:
7304 case tcc_reference:
7305 case tcc_vl_exp:
7306 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7307 the expression. If it is set, we conflict iff we are that rtx or
7308 both are in memory. Otherwise, we check all operands of the
7309 expression recursively. */
7310
7311 switch (TREE_CODE (exp))
7312 {
7313 case ADDR_EXPR:
7314 /* If the operand is static or we are static, we can't conflict.
7315 Likewise if we don't conflict with the operand at all. */
7316 if (staticp (TREE_OPERAND (exp, 0))
7317 || TREE_STATIC (exp)
7318 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7319 return 1;
7320
7321 /* Otherwise, the only way this can conflict is if we are taking
7322 the address of a DECL a that address if part of X, which is
7323 very rare. */
7324 exp = TREE_OPERAND (exp, 0);
7325 if (DECL_P (exp))
7326 {
7327 if (!DECL_RTL_SET_P (exp)
7328 || !MEM_P (DECL_RTL (exp)))
7329 return 0;
7330 else
7331 exp_rtl = XEXP (DECL_RTL (exp), 0);
7332 }
7333 break;
7334
7335 case MEM_REF:
7336 if (MEM_P (x)
7337 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7338 get_alias_set (exp)))
7339 return 0;
7340 break;
7341
7342 case CALL_EXPR:
7343 /* Assume that the call will clobber all hard registers and
7344 all of memory. */
7345 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7346 || MEM_P (x))
7347 return 0;
7348 break;
7349
7350 case WITH_CLEANUP_EXPR:
7351 case CLEANUP_POINT_EXPR:
7352 /* Lowered by gimplify.c. */
7353 gcc_unreachable ();
7354
7355 case SAVE_EXPR:
7356 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7357
7358 default:
7359 break;
7360 }
7361
7362 /* If we have an rtx, we do not need to scan our operands. */
7363 if (exp_rtl)
7364 break;
7365
7366 nops = TREE_OPERAND_LENGTH (exp);
7367 for (i = 0; i < nops; i++)
7368 if (TREE_OPERAND (exp, i) != 0
7369 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7370 return 0;
7371
7372 break;
7373
7374 case tcc_type:
7375 /* Should never get a type here. */
7376 gcc_unreachable ();
7377 }
7378
7379 /* If we have an rtl, find any enclosed object. Then see if we conflict
7380 with it. */
7381 if (exp_rtl)
7382 {
7383 if (GET_CODE (exp_rtl) == SUBREG)
7384 {
7385 exp_rtl = SUBREG_REG (exp_rtl);
7386 if (REG_P (exp_rtl)
7387 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7388 return 0;
7389 }
7390
7391 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7392 are memory and they conflict. */
7393 return ! (rtx_equal_p (x, exp_rtl)
7394 || (MEM_P (x) && MEM_P (exp_rtl)
7395 && true_dependence (exp_rtl, VOIDmode, x)));
7396 }
7397
7398 /* If we reach here, it is safe. */
7399 return 1;
7400 }
7401
7402 \f
7403 /* Return the highest power of two that EXP is known to be a multiple of.
7404 This is used in updating alignment of MEMs in array references. */
7405
7406 unsigned HOST_WIDE_INT
7407 highest_pow2_factor (const_tree exp)
7408 {
7409 unsigned HOST_WIDE_INT ret;
7410 int trailing_zeros = tree_ctz (exp);
7411 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7412 return BIGGEST_ALIGNMENT;
7413 ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7414 if (ret > BIGGEST_ALIGNMENT)
7415 return BIGGEST_ALIGNMENT;
7416 return ret;
7417 }
7418
7419 /* Similar, except that the alignment requirements of TARGET are
7420 taken into account. Assume it is at least as aligned as its
7421 type, unless it is a COMPONENT_REF in which case the layout of
7422 the structure gives the alignment. */
7423
7424 static unsigned HOST_WIDE_INT
7425 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7426 {
7427 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7428 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7429
7430 return MAX (factor, talign);
7431 }
7432 \f
7433 #ifdef HAVE_conditional_move
7434 /* Convert the tree comparison code TCODE to the rtl one where the
7435 signedness is UNSIGNEDP. */
7436
7437 static enum rtx_code
7438 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7439 {
7440 enum rtx_code code;
7441 switch (tcode)
7442 {
7443 case EQ_EXPR:
7444 code = EQ;
7445 break;
7446 case NE_EXPR:
7447 code = NE;
7448 break;
7449 case LT_EXPR:
7450 code = unsignedp ? LTU : LT;
7451 break;
7452 case LE_EXPR:
7453 code = unsignedp ? LEU : LE;
7454 break;
7455 case GT_EXPR:
7456 code = unsignedp ? GTU : GT;
7457 break;
7458 case GE_EXPR:
7459 code = unsignedp ? GEU : GE;
7460 break;
7461 case UNORDERED_EXPR:
7462 code = UNORDERED;
7463 break;
7464 case ORDERED_EXPR:
7465 code = ORDERED;
7466 break;
7467 case UNLT_EXPR:
7468 code = UNLT;
7469 break;
7470 case UNLE_EXPR:
7471 code = UNLE;
7472 break;
7473 case UNGT_EXPR:
7474 code = UNGT;
7475 break;
7476 case UNGE_EXPR:
7477 code = UNGE;
7478 break;
7479 case UNEQ_EXPR:
7480 code = UNEQ;
7481 break;
7482 case LTGT_EXPR:
7483 code = LTGT;
7484 break;
7485
7486 default:
7487 gcc_unreachable ();
7488 }
7489 return code;
7490 }
7491 #endif
7492
7493 /* Subroutine of expand_expr. Expand the two operands of a binary
7494 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7495 The value may be stored in TARGET if TARGET is nonzero. The
7496 MODIFIER argument is as documented by expand_expr. */
7497
7498 static void
7499 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7500 enum expand_modifier modifier)
7501 {
7502 if (! safe_from_p (target, exp1, 1))
7503 target = 0;
7504 if (operand_equal_p (exp0, exp1, 0))
7505 {
7506 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7507 *op1 = copy_rtx (*op0);
7508 }
7509 else
7510 {
7511 /* If we need to preserve evaluation order, copy exp0 into its own
7512 temporary variable so that it can't be clobbered by exp1. */
7513 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7514 exp0 = save_expr (exp0);
7515 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7516 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7517 }
7518 }
7519
7520 \f
7521 /* Return a MEM that contains constant EXP. DEFER is as for
7522 output_constant_def and MODIFIER is as for expand_expr. */
7523
7524 static rtx
7525 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7526 {
7527 rtx mem;
7528
7529 mem = output_constant_def (exp, defer);
7530 if (modifier != EXPAND_INITIALIZER)
7531 mem = use_anchored_address (mem);
7532 return mem;
7533 }
7534
7535 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7536 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7537
7538 static rtx
7539 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
7540 enum expand_modifier modifier, addr_space_t as)
7541 {
7542 rtx result, subtarget;
7543 tree inner, offset;
7544 HOST_WIDE_INT bitsize, bitpos;
7545 int volatilep, unsignedp;
7546 enum machine_mode mode1;
7547
7548 /* If we are taking the address of a constant and are at the top level,
7549 we have to use output_constant_def since we can't call force_const_mem
7550 at top level. */
7551 /* ??? This should be considered a front-end bug. We should not be
7552 generating ADDR_EXPR of something that isn't an LVALUE. The only
7553 exception here is STRING_CST. */
7554 if (CONSTANT_CLASS_P (exp))
7555 {
7556 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7557 if (modifier < EXPAND_SUM)
7558 result = force_operand (result, target);
7559 return result;
7560 }
7561
7562 /* Everything must be something allowed by is_gimple_addressable. */
7563 switch (TREE_CODE (exp))
7564 {
7565 case INDIRECT_REF:
7566 /* This case will happen via recursion for &a->b. */
7567 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7568
7569 case MEM_REF:
7570 {
7571 tree tem = TREE_OPERAND (exp, 0);
7572 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7573 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7574 return expand_expr (tem, target, tmode, modifier);
7575 }
7576
7577 case CONST_DECL:
7578 /* Expand the initializer like constants above. */
7579 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7580 0, modifier), 0);
7581 if (modifier < EXPAND_SUM)
7582 result = force_operand (result, target);
7583 return result;
7584
7585 case REALPART_EXPR:
7586 /* The real part of the complex number is always first, therefore
7587 the address is the same as the address of the parent object. */
7588 offset = 0;
7589 bitpos = 0;
7590 inner = TREE_OPERAND (exp, 0);
7591 break;
7592
7593 case IMAGPART_EXPR:
7594 /* The imaginary part of the complex number is always second.
7595 The expression is therefore always offset by the size of the
7596 scalar type. */
7597 offset = 0;
7598 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7599 inner = TREE_OPERAND (exp, 0);
7600 break;
7601
7602 case COMPOUND_LITERAL_EXPR:
7603 /* Allow COMPOUND_LITERAL_EXPR in initializers, if e.g.
7604 rtl_for_decl_init is called on DECL_INITIAL with
7605 COMPOUNT_LITERAL_EXPRs in it, they aren't gimplified. */
7606 if (modifier == EXPAND_INITIALIZER
7607 && COMPOUND_LITERAL_EXPR_DECL (exp))
7608 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7609 target, tmode, modifier, as);
7610 /* FALLTHRU */
7611 default:
7612 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7613 expand_expr, as that can have various side effects; LABEL_DECLs for
7614 example, may not have their DECL_RTL set yet. Expand the rtl of
7615 CONSTRUCTORs too, which should yield a memory reference for the
7616 constructor's contents. Assume language specific tree nodes can
7617 be expanded in some interesting way. */
7618 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7619 if (DECL_P (exp)
7620 || TREE_CODE (exp) == CONSTRUCTOR
7621 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7622 {
7623 result = expand_expr (exp, target, tmode,
7624 modifier == EXPAND_INITIALIZER
7625 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7626
7627 /* If the DECL isn't in memory, then the DECL wasn't properly
7628 marked TREE_ADDRESSABLE, which will be either a front-end
7629 or a tree optimizer bug. */
7630
7631 if (TREE_ADDRESSABLE (exp)
7632 && ! MEM_P (result)
7633 && ! targetm.calls.allocate_stack_slots_for_args ())
7634 {
7635 error ("local frame unavailable (naked function?)");
7636 return result;
7637 }
7638 else
7639 gcc_assert (MEM_P (result));
7640 result = XEXP (result, 0);
7641
7642 /* ??? Is this needed anymore? */
7643 if (DECL_P (exp))
7644 TREE_USED (exp) = 1;
7645
7646 if (modifier != EXPAND_INITIALIZER
7647 && modifier != EXPAND_CONST_ADDRESS
7648 && modifier != EXPAND_SUM)
7649 result = force_operand (result, target);
7650 return result;
7651 }
7652
7653 /* Pass FALSE as the last argument to get_inner_reference although
7654 we are expanding to RTL. The rationale is that we know how to
7655 handle "aligning nodes" here: we can just bypass them because
7656 they won't change the final object whose address will be returned
7657 (they actually exist only for that purpose). */
7658 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7659 &mode1, &unsignedp, &volatilep, false);
7660 break;
7661 }
7662
7663 /* We must have made progress. */
7664 gcc_assert (inner != exp);
7665
7666 subtarget = offset || bitpos ? NULL_RTX : target;
7667 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7668 inner alignment, force the inner to be sufficiently aligned. */
7669 if (CONSTANT_CLASS_P (inner)
7670 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7671 {
7672 inner = copy_node (inner);
7673 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7674 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7675 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7676 }
7677 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7678
7679 if (offset)
7680 {
7681 rtx tmp;
7682
7683 if (modifier != EXPAND_NORMAL)
7684 result = force_operand (result, NULL);
7685 tmp = expand_expr (offset, NULL_RTX, tmode,
7686 modifier == EXPAND_INITIALIZER
7687 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7688
7689 result = convert_memory_address_addr_space (tmode, result, as);
7690 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7691
7692 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7693 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7694 else
7695 {
7696 subtarget = bitpos ? NULL_RTX : target;
7697 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7698 1, OPTAB_LIB_WIDEN);
7699 }
7700 }
7701
7702 if (bitpos)
7703 {
7704 /* Someone beforehand should have rejected taking the address
7705 of such an object. */
7706 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7707
7708 result = convert_memory_address_addr_space (tmode, result, as);
7709 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7710 if (modifier < EXPAND_SUM)
7711 result = force_operand (result, target);
7712 }
7713
7714 return result;
7715 }
7716
7717 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7718 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7719
7720 static rtx
7721 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7722 enum expand_modifier modifier)
7723 {
7724 addr_space_t as = ADDR_SPACE_GENERIC;
7725 enum machine_mode address_mode = Pmode;
7726 enum machine_mode pointer_mode = ptr_mode;
7727 enum machine_mode rmode;
7728 rtx result;
7729
7730 /* Target mode of VOIDmode says "whatever's natural". */
7731 if (tmode == VOIDmode)
7732 tmode = TYPE_MODE (TREE_TYPE (exp));
7733
7734 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7735 {
7736 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7737 address_mode = targetm.addr_space.address_mode (as);
7738 pointer_mode = targetm.addr_space.pointer_mode (as);
7739 }
7740
7741 /* We can get called with some Weird Things if the user does silliness
7742 like "(short) &a". In that case, convert_memory_address won't do
7743 the right thing, so ignore the given target mode. */
7744 if (tmode != address_mode && tmode != pointer_mode)
7745 tmode = address_mode;
7746
7747 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7748 tmode, modifier, as);
7749
7750 /* Despite expand_expr claims concerning ignoring TMODE when not
7751 strictly convenient, stuff breaks if we don't honor it. Note
7752 that combined with the above, we only do this for pointer modes. */
7753 rmode = GET_MODE (result);
7754 if (rmode == VOIDmode)
7755 rmode = tmode;
7756 if (rmode != tmode)
7757 result = convert_memory_address_addr_space (tmode, result, as);
7758
7759 return result;
7760 }
7761
7762 /* Generate code for computing CONSTRUCTOR EXP.
7763 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7764 is TRUE, instead of creating a temporary variable in memory
7765 NULL is returned and the caller needs to handle it differently. */
7766
7767 static rtx
7768 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7769 bool avoid_temp_mem)
7770 {
7771 tree type = TREE_TYPE (exp);
7772 enum machine_mode mode = TYPE_MODE (type);
7773
7774 /* Try to avoid creating a temporary at all. This is possible
7775 if all of the initializer is zero.
7776 FIXME: try to handle all [0..255] initializers we can handle
7777 with memset. */
7778 if (TREE_STATIC (exp)
7779 && !TREE_ADDRESSABLE (exp)
7780 && target != 0 && mode == BLKmode
7781 && all_zeros_p (exp))
7782 {
7783 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7784 return target;
7785 }
7786
7787 /* All elts simple constants => refer to a constant in memory. But
7788 if this is a non-BLKmode mode, let it store a field at a time
7789 since that should make a CONST_INT or CONST_DOUBLE when we
7790 fold. Likewise, if we have a target we can use, it is best to
7791 store directly into the target unless the type is large enough
7792 that memcpy will be used. If we are making an initializer and
7793 all operands are constant, put it in memory as well.
7794
7795 FIXME: Avoid trying to fill vector constructors piece-meal.
7796 Output them with output_constant_def below unless we're sure
7797 they're zeros. This should go away when vector initializers
7798 are treated like VECTOR_CST instead of arrays. */
7799 if ((TREE_STATIC (exp)
7800 && ((mode == BLKmode
7801 && ! (target != 0 && safe_from_p (target, exp, 1)))
7802 || TREE_ADDRESSABLE (exp)
7803 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
7804 && (! MOVE_BY_PIECES_P
7805 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
7806 TYPE_ALIGN (type)))
7807 && ! mostly_zeros_p (exp))))
7808 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7809 && TREE_CONSTANT (exp)))
7810 {
7811 rtx constructor;
7812
7813 if (avoid_temp_mem)
7814 return NULL_RTX;
7815
7816 constructor = expand_expr_constant (exp, 1, modifier);
7817
7818 if (modifier != EXPAND_CONST_ADDRESS
7819 && modifier != EXPAND_INITIALIZER
7820 && modifier != EXPAND_SUM)
7821 constructor = validize_mem (constructor);
7822
7823 return constructor;
7824 }
7825
7826 /* Handle calls that pass values in multiple non-contiguous
7827 locations. The Irix 6 ABI has examples of this. */
7828 if (target == 0 || ! safe_from_p (target, exp, 1)
7829 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7830 {
7831 if (avoid_temp_mem)
7832 return NULL_RTX;
7833
7834 target
7835 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7836 | (TREE_READONLY (exp)
7837 * TYPE_QUAL_CONST))),
7838 TREE_ADDRESSABLE (exp), 1);
7839 }
7840
7841 store_constructor (exp, target, 0, int_expr_size (exp));
7842 return target;
7843 }
7844
7845
7846 /* expand_expr: generate code for computing expression EXP.
7847 An rtx for the computed value is returned. The value is never null.
7848 In the case of a void EXP, const0_rtx is returned.
7849
7850 The value may be stored in TARGET if TARGET is nonzero.
7851 TARGET is just a suggestion; callers must assume that
7852 the rtx returned may not be the same as TARGET.
7853
7854 If TARGET is CONST0_RTX, it means that the value will be ignored.
7855
7856 If TMODE is not VOIDmode, it suggests generating the
7857 result in mode TMODE. But this is done only when convenient.
7858 Otherwise, TMODE is ignored and the value generated in its natural mode.
7859 TMODE is just a suggestion; callers must assume that
7860 the rtx returned may not have mode TMODE.
7861
7862 Note that TARGET may have neither TMODE nor MODE. In that case, it
7863 probably will not be used.
7864
7865 If MODIFIER is EXPAND_SUM then when EXP is an addition
7866 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7867 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7868 products as above, or REG or MEM, or constant.
7869 Ordinarily in such cases we would output mul or add instructions
7870 and then return a pseudo reg containing the sum.
7871
7872 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7873 it also marks a label as absolutely required (it can't be dead).
7874 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7875 This is used for outputting expressions used in initializers.
7876
7877 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7878 with a constant address even if that address is not normally legitimate.
7879 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7880
7881 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7882 a call parameter. Such targets require special care as we haven't yet
7883 marked TARGET so that it's safe from being trashed by libcalls. We
7884 don't want to use TARGET for anything but the final result;
7885 Intermediate values must go elsewhere. Additionally, calls to
7886 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7887
7888 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7889 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7890 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7891 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7892 recursively. */
7893
7894 rtx
7895 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7896 enum expand_modifier modifier, rtx *alt_rtl)
7897 {
7898 rtx ret;
7899
7900 /* Handle ERROR_MARK before anybody tries to access its type. */
7901 if (TREE_CODE (exp) == ERROR_MARK
7902 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7903 {
7904 ret = CONST0_RTX (tmode);
7905 return ret ? ret : const0_rtx;
7906 }
7907
7908 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7909 return ret;
7910 }
7911
7912 /* Try to expand the conditional expression which is represented by
7913 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
7914 return the rtl reg which repsents the result. Otherwise return
7915 NULL_RTL. */
7916
7917 static rtx
7918 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7919 tree treeop1 ATTRIBUTE_UNUSED,
7920 tree treeop2 ATTRIBUTE_UNUSED)
7921 {
7922 #ifdef HAVE_conditional_move
7923 rtx insn;
7924 rtx op00, op01, op1, op2;
7925 enum rtx_code comparison_code;
7926 enum machine_mode comparison_mode;
7927 gimple srcstmt;
7928 rtx temp;
7929 tree type = TREE_TYPE (treeop1);
7930 int unsignedp = TYPE_UNSIGNED (type);
7931 enum machine_mode mode = TYPE_MODE (type);
7932 enum machine_mode orig_mode = mode;
7933
7934 /* If we cannot do a conditional move on the mode, try doing it
7935 with the promoted mode. */
7936 if (!can_conditionally_move_p (mode))
7937 {
7938 mode = promote_mode (type, mode, &unsignedp);
7939 if (!can_conditionally_move_p (mode))
7940 return NULL_RTX;
7941 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
7942 }
7943 else
7944 temp = assign_temp (type, 0, 1);
7945
7946 start_sequence ();
7947 expand_operands (treeop1, treeop2,
7948 temp, &op1, &op2, EXPAND_NORMAL);
7949
7950 if (TREE_CODE (treeop0) == SSA_NAME
7951 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
7952 {
7953 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
7954 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
7955 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
7956 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
7957 comparison_mode = TYPE_MODE (type);
7958 unsignedp = TYPE_UNSIGNED (type);
7959 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7960 }
7961 else if (TREE_CODE_CLASS (TREE_CODE (treeop0)) == tcc_comparison)
7962 {
7963 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
7964 enum tree_code cmpcode = TREE_CODE (treeop0);
7965 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
7966 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
7967 unsignedp = TYPE_UNSIGNED (type);
7968 comparison_mode = TYPE_MODE (type);
7969 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7970 }
7971 else
7972 {
7973 op00 = expand_normal (treeop0);
7974 op01 = const0_rtx;
7975 comparison_code = NE;
7976 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
7977 }
7978
7979 if (GET_MODE (op1) != mode)
7980 op1 = gen_lowpart (mode, op1);
7981
7982 if (GET_MODE (op2) != mode)
7983 op2 = gen_lowpart (mode, op2);
7984
7985 /* Try to emit the conditional move. */
7986 insn = emit_conditional_move (temp, comparison_code,
7987 op00, op01, comparison_mode,
7988 op1, op2, mode,
7989 unsignedp);
7990
7991 /* If we could do the conditional move, emit the sequence,
7992 and return. */
7993 if (insn)
7994 {
7995 rtx seq = get_insns ();
7996 end_sequence ();
7997 emit_insn (seq);
7998 return convert_modes (orig_mode, mode, temp, 0);
7999 }
8000
8001 /* Otherwise discard the sequence and fall back to code with
8002 branches. */
8003 end_sequence ();
8004 #endif
8005 return NULL_RTX;
8006 }
8007
8008 rtx
8009 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
8010 enum expand_modifier modifier)
8011 {
8012 rtx op0, op1, op2, temp;
8013 tree type;
8014 int unsignedp;
8015 enum machine_mode mode;
8016 enum tree_code code = ops->code;
8017 optab this_optab;
8018 rtx subtarget, original_target;
8019 int ignore;
8020 bool reduce_bit_field;
8021 location_t loc = ops->location;
8022 tree treeop0, treeop1, treeop2;
8023 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8024 ? reduce_to_bit_field_precision ((expr), \
8025 target, \
8026 type) \
8027 : (expr))
8028
8029 type = ops->type;
8030 mode = TYPE_MODE (type);
8031 unsignedp = TYPE_UNSIGNED (type);
8032
8033 treeop0 = ops->op0;
8034 treeop1 = ops->op1;
8035 treeop2 = ops->op2;
8036
8037 /* We should be called only on simple (binary or unary) expressions,
8038 exactly those that are valid in gimple expressions that aren't
8039 GIMPLE_SINGLE_RHS (or invalid). */
8040 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8041 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8042 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8043
8044 ignore = (target == const0_rtx
8045 || ((CONVERT_EXPR_CODE_P (code)
8046 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8047 && TREE_CODE (type) == VOID_TYPE));
8048
8049 /* We should be called only if we need the result. */
8050 gcc_assert (!ignore);
8051
8052 /* An operation in what may be a bit-field type needs the
8053 result to be reduced to the precision of the bit-field type,
8054 which is narrower than that of the type's mode. */
8055 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8056 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8057
8058 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8059 target = 0;
8060
8061 /* Use subtarget as the target for operand 0 of a binary operation. */
8062 subtarget = get_subtarget (target);
8063 original_target = target;
8064
8065 switch (code)
8066 {
8067 case NON_LVALUE_EXPR:
8068 case PAREN_EXPR:
8069 CASE_CONVERT:
8070 if (treeop0 == error_mark_node)
8071 return const0_rtx;
8072
8073 if (TREE_CODE (type) == UNION_TYPE)
8074 {
8075 tree valtype = TREE_TYPE (treeop0);
8076
8077 /* If both input and output are BLKmode, this conversion isn't doing
8078 anything except possibly changing memory attribute. */
8079 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8080 {
8081 rtx result = expand_expr (treeop0, target, tmode,
8082 modifier);
8083
8084 result = copy_rtx (result);
8085 set_mem_attributes (result, type, 0);
8086 return result;
8087 }
8088
8089 if (target == 0)
8090 {
8091 if (TYPE_MODE (type) != BLKmode)
8092 target = gen_reg_rtx (TYPE_MODE (type));
8093 else
8094 target = assign_temp (type, 1, 1);
8095 }
8096
8097 if (MEM_P (target))
8098 /* Store data into beginning of memory target. */
8099 store_expr (treeop0,
8100 adjust_address (target, TYPE_MODE (valtype), 0),
8101 modifier == EXPAND_STACK_PARM,
8102 false);
8103
8104 else
8105 {
8106 gcc_assert (REG_P (target));
8107
8108 /* Store this field into a union of the proper type. */
8109 store_field (target,
8110 MIN ((int_size_in_bytes (TREE_TYPE
8111 (treeop0))
8112 * BITS_PER_UNIT),
8113 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8114 0, 0, 0, TYPE_MODE (valtype), treeop0, 0, false);
8115 }
8116
8117 /* Return the entire union. */
8118 return target;
8119 }
8120
8121 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8122 {
8123 op0 = expand_expr (treeop0, target, VOIDmode,
8124 modifier);
8125
8126 /* If the signedness of the conversion differs and OP0 is
8127 a promoted SUBREG, clear that indication since we now
8128 have to do the proper extension. */
8129 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8130 && GET_CODE (op0) == SUBREG)
8131 SUBREG_PROMOTED_VAR_P (op0) = 0;
8132
8133 return REDUCE_BIT_FIELD (op0);
8134 }
8135
8136 op0 = expand_expr (treeop0, NULL_RTX, mode,
8137 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8138 if (GET_MODE (op0) == mode)
8139 ;
8140
8141 /* If OP0 is a constant, just convert it into the proper mode. */
8142 else if (CONSTANT_P (op0))
8143 {
8144 tree inner_type = TREE_TYPE (treeop0);
8145 enum machine_mode inner_mode = GET_MODE (op0);
8146
8147 if (inner_mode == VOIDmode)
8148 inner_mode = TYPE_MODE (inner_type);
8149
8150 if (modifier == EXPAND_INITIALIZER)
8151 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8152 subreg_lowpart_offset (mode,
8153 inner_mode));
8154 else
8155 op0= convert_modes (mode, inner_mode, op0,
8156 TYPE_UNSIGNED (inner_type));
8157 }
8158
8159 else if (modifier == EXPAND_INITIALIZER)
8160 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8161
8162 else if (target == 0)
8163 op0 = convert_to_mode (mode, op0,
8164 TYPE_UNSIGNED (TREE_TYPE
8165 (treeop0)));
8166 else
8167 {
8168 convert_move (target, op0,
8169 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8170 op0 = target;
8171 }
8172
8173 return REDUCE_BIT_FIELD (op0);
8174
8175 case ADDR_SPACE_CONVERT_EXPR:
8176 {
8177 tree treeop0_type = TREE_TYPE (treeop0);
8178 addr_space_t as_to;
8179 addr_space_t as_from;
8180
8181 gcc_assert (POINTER_TYPE_P (type));
8182 gcc_assert (POINTER_TYPE_P (treeop0_type));
8183
8184 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8185 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8186
8187 /* Conversions between pointers to the same address space should
8188 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8189 gcc_assert (as_to != as_from);
8190
8191 /* Ask target code to handle conversion between pointers
8192 to overlapping address spaces. */
8193 if (targetm.addr_space.subset_p (as_to, as_from)
8194 || targetm.addr_space.subset_p (as_from, as_to))
8195 {
8196 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8197 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8198 gcc_assert (op0);
8199 return op0;
8200 }
8201
8202 /* For disjoint address spaces, converting anything but
8203 a null pointer invokes undefined behaviour. We simply
8204 always return a null pointer here. */
8205 return CONST0_RTX (mode);
8206 }
8207
8208 case POINTER_PLUS_EXPR:
8209 /* Even though the sizetype mode and the pointer's mode can be different
8210 expand is able to handle this correctly and get the correct result out
8211 of the PLUS_EXPR code. */
8212 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8213 if sizetype precision is smaller than pointer precision. */
8214 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8215 treeop1 = fold_convert_loc (loc, type,
8216 fold_convert_loc (loc, ssizetype,
8217 treeop1));
8218 /* If sizetype precision is larger than pointer precision, truncate the
8219 offset to have matching modes. */
8220 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8221 treeop1 = fold_convert_loc (loc, type, treeop1);
8222
8223 case PLUS_EXPR:
8224 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8225 something else, make sure we add the register to the constant and
8226 then to the other thing. This case can occur during strength
8227 reduction and doing it this way will produce better code if the
8228 frame pointer or argument pointer is eliminated.
8229
8230 fold-const.c will ensure that the constant is always in the inner
8231 PLUS_EXPR, so the only case we need to do anything about is if
8232 sp, ap, or fp is our second argument, in which case we must swap
8233 the innermost first argument and our second argument. */
8234
8235 if (TREE_CODE (treeop0) == PLUS_EXPR
8236 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8237 && TREE_CODE (treeop1) == VAR_DECL
8238 && (DECL_RTL (treeop1) == frame_pointer_rtx
8239 || DECL_RTL (treeop1) == stack_pointer_rtx
8240 || DECL_RTL (treeop1) == arg_pointer_rtx))
8241 {
8242 gcc_unreachable ();
8243 }
8244
8245 /* If the result is to be ptr_mode and we are adding an integer to
8246 something, we might be forming a constant. So try to use
8247 plus_constant. If it produces a sum and we can't accept it,
8248 use force_operand. This allows P = &ARR[const] to generate
8249 efficient code on machines where a SYMBOL_REF is not a valid
8250 address.
8251
8252 If this is an EXPAND_SUM call, always return the sum. */
8253 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8254 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8255 {
8256 if (modifier == EXPAND_STACK_PARM)
8257 target = 0;
8258 if (TREE_CODE (treeop0) == INTEGER_CST
8259 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8260 && TREE_CONSTANT (treeop1))
8261 {
8262 rtx constant_part;
8263
8264 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8265 EXPAND_SUM);
8266 /* Use immed_double_const to ensure that the constant is
8267 truncated according to the mode of OP1, then sign extended
8268 to a HOST_WIDE_INT. Using the constant directly can result
8269 in non-canonical RTL in a 64x32 cross compile. */
8270 constant_part
8271 = immed_double_const (TREE_INT_CST_LOW (treeop0),
8272 (HOST_WIDE_INT) 0,
8273 TYPE_MODE (TREE_TYPE (treeop1)));
8274 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8275 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8276 op1 = force_operand (op1, target);
8277 return REDUCE_BIT_FIELD (op1);
8278 }
8279
8280 else if (TREE_CODE (treeop1) == INTEGER_CST
8281 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8282 && TREE_CONSTANT (treeop0))
8283 {
8284 rtx constant_part;
8285
8286 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8287 (modifier == EXPAND_INITIALIZER
8288 ? EXPAND_INITIALIZER : EXPAND_SUM));
8289 if (! CONSTANT_P (op0))
8290 {
8291 op1 = expand_expr (treeop1, NULL_RTX,
8292 VOIDmode, modifier);
8293 /* Return a PLUS if modifier says it's OK. */
8294 if (modifier == EXPAND_SUM
8295 || modifier == EXPAND_INITIALIZER)
8296 return simplify_gen_binary (PLUS, mode, op0, op1);
8297 goto binop2;
8298 }
8299 /* Use immed_double_const to ensure that the constant is
8300 truncated according to the mode of OP1, then sign extended
8301 to a HOST_WIDE_INT. Using the constant directly can result
8302 in non-canonical RTL in a 64x32 cross compile. */
8303 constant_part
8304 = immed_double_const (TREE_INT_CST_LOW (treeop1),
8305 (HOST_WIDE_INT) 0,
8306 TYPE_MODE (TREE_TYPE (treeop0)));
8307 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8308 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8309 op0 = force_operand (op0, target);
8310 return REDUCE_BIT_FIELD (op0);
8311 }
8312 }
8313
8314 /* Use TER to expand pointer addition of a negated value
8315 as pointer subtraction. */
8316 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8317 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8318 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8319 && TREE_CODE (treeop1) == SSA_NAME
8320 && TYPE_MODE (TREE_TYPE (treeop0))
8321 == TYPE_MODE (TREE_TYPE (treeop1)))
8322 {
8323 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8324 if (def)
8325 {
8326 treeop1 = gimple_assign_rhs1 (def);
8327 code = MINUS_EXPR;
8328 goto do_minus;
8329 }
8330 }
8331
8332 /* No sense saving up arithmetic to be done
8333 if it's all in the wrong mode to form part of an address.
8334 And force_operand won't know whether to sign-extend or
8335 zero-extend. */
8336 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8337 || mode != ptr_mode)
8338 {
8339 expand_operands (treeop0, treeop1,
8340 subtarget, &op0, &op1, EXPAND_NORMAL);
8341 if (op0 == const0_rtx)
8342 return op1;
8343 if (op1 == const0_rtx)
8344 return op0;
8345 goto binop2;
8346 }
8347
8348 expand_operands (treeop0, treeop1,
8349 subtarget, &op0, &op1, modifier);
8350 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8351
8352 case MINUS_EXPR:
8353 do_minus:
8354 /* For initializers, we are allowed to return a MINUS of two
8355 symbolic constants. Here we handle all cases when both operands
8356 are constant. */
8357 /* Handle difference of two symbolic constants,
8358 for the sake of an initializer. */
8359 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8360 && really_constant_p (treeop0)
8361 && really_constant_p (treeop1))
8362 {
8363 expand_operands (treeop0, treeop1,
8364 NULL_RTX, &op0, &op1, modifier);
8365
8366 /* If the last operand is a CONST_INT, use plus_constant of
8367 the negated constant. Else make the MINUS. */
8368 if (CONST_INT_P (op1))
8369 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8370 -INTVAL (op1)));
8371 else
8372 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8373 }
8374
8375 /* No sense saving up arithmetic to be done
8376 if it's all in the wrong mode to form part of an address.
8377 And force_operand won't know whether to sign-extend or
8378 zero-extend. */
8379 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8380 || mode != ptr_mode)
8381 goto binop;
8382
8383 expand_operands (treeop0, treeop1,
8384 subtarget, &op0, &op1, modifier);
8385
8386 /* Convert A - const to A + (-const). */
8387 if (CONST_INT_P (op1))
8388 {
8389 op1 = negate_rtx (mode, op1);
8390 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8391 }
8392
8393 goto binop2;
8394
8395 case WIDEN_MULT_PLUS_EXPR:
8396 case WIDEN_MULT_MINUS_EXPR:
8397 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8398 op2 = expand_normal (treeop2);
8399 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8400 target, unsignedp);
8401 return target;
8402
8403 case WIDEN_MULT_EXPR:
8404 /* If first operand is constant, swap them.
8405 Thus the following special case checks need only
8406 check the second operand. */
8407 if (TREE_CODE (treeop0) == INTEGER_CST)
8408 {
8409 tree t1 = treeop0;
8410 treeop0 = treeop1;
8411 treeop1 = t1;
8412 }
8413
8414 /* First, check if we have a multiplication of one signed and one
8415 unsigned operand. */
8416 if (TREE_CODE (treeop1) != INTEGER_CST
8417 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8418 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8419 {
8420 enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8421 this_optab = usmul_widen_optab;
8422 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8423 != CODE_FOR_nothing)
8424 {
8425 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8426 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8427 EXPAND_NORMAL);
8428 else
8429 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8430 EXPAND_NORMAL);
8431 /* op0 and op1 might still be constant, despite the above
8432 != INTEGER_CST check. Handle it. */
8433 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8434 {
8435 op0 = convert_modes (innermode, mode, op0, true);
8436 op1 = convert_modes (innermode, mode, op1, false);
8437 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8438 target, unsignedp));
8439 }
8440 goto binop3;
8441 }
8442 }
8443 /* Check for a multiplication with matching signedness. */
8444 else if ((TREE_CODE (treeop1) == INTEGER_CST
8445 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8446 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8447 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8448 {
8449 tree op0type = TREE_TYPE (treeop0);
8450 enum machine_mode innermode = TYPE_MODE (op0type);
8451 bool zextend_p = TYPE_UNSIGNED (op0type);
8452 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8453 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8454
8455 if (TREE_CODE (treeop0) != INTEGER_CST)
8456 {
8457 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8458 != CODE_FOR_nothing)
8459 {
8460 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8461 EXPAND_NORMAL);
8462 /* op0 and op1 might still be constant, despite the above
8463 != INTEGER_CST check. Handle it. */
8464 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8465 {
8466 widen_mult_const:
8467 op0 = convert_modes (innermode, mode, op0, zextend_p);
8468 op1
8469 = convert_modes (innermode, mode, op1,
8470 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8471 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8472 target,
8473 unsignedp));
8474 }
8475 temp = expand_widening_mult (mode, op0, op1, target,
8476 unsignedp, this_optab);
8477 return REDUCE_BIT_FIELD (temp);
8478 }
8479 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8480 != CODE_FOR_nothing
8481 && innermode == word_mode)
8482 {
8483 rtx htem, hipart;
8484 op0 = expand_normal (treeop0);
8485 if (TREE_CODE (treeop1) == INTEGER_CST)
8486 op1 = convert_modes (innermode, mode,
8487 expand_normal (treeop1),
8488 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8489 else
8490 op1 = expand_normal (treeop1);
8491 /* op0 and op1 might still be constant, despite the above
8492 != INTEGER_CST check. Handle it. */
8493 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8494 goto widen_mult_const;
8495 temp = expand_binop (mode, other_optab, op0, op1, target,
8496 unsignedp, OPTAB_LIB_WIDEN);
8497 hipart = gen_highpart (innermode, temp);
8498 htem = expand_mult_highpart_adjust (innermode, hipart,
8499 op0, op1, hipart,
8500 zextend_p);
8501 if (htem != hipart)
8502 emit_move_insn (hipart, htem);
8503 return REDUCE_BIT_FIELD (temp);
8504 }
8505 }
8506 }
8507 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8508 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8509 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8510 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8511
8512 case FMA_EXPR:
8513 {
8514 optab opt = fma_optab;
8515 gimple def0, def2;
8516
8517 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8518 call. */
8519 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8520 {
8521 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8522 tree call_expr;
8523
8524 gcc_assert (fn != NULL_TREE);
8525 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8526 return expand_builtin (call_expr, target, subtarget, mode, false);
8527 }
8528
8529 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8530 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8531
8532 op0 = op2 = NULL;
8533
8534 if (def0 && def2
8535 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8536 {
8537 opt = fnms_optab;
8538 op0 = expand_normal (gimple_assign_rhs1 (def0));
8539 op2 = expand_normal (gimple_assign_rhs1 (def2));
8540 }
8541 else if (def0
8542 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8543 {
8544 opt = fnma_optab;
8545 op0 = expand_normal (gimple_assign_rhs1 (def0));
8546 }
8547 else if (def2
8548 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8549 {
8550 opt = fms_optab;
8551 op2 = expand_normal (gimple_assign_rhs1 (def2));
8552 }
8553
8554 if (op0 == NULL)
8555 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8556 if (op2 == NULL)
8557 op2 = expand_normal (treeop2);
8558 op1 = expand_normal (treeop1);
8559
8560 return expand_ternary_op (TYPE_MODE (type), opt,
8561 op0, op1, op2, target, 0);
8562 }
8563
8564 case MULT_EXPR:
8565 /* If this is a fixed-point operation, then we cannot use the code
8566 below because "expand_mult" doesn't support sat/no-sat fixed-point
8567 multiplications. */
8568 if (ALL_FIXED_POINT_MODE_P (mode))
8569 goto binop;
8570
8571 /* If first operand is constant, swap them.
8572 Thus the following special case checks need only
8573 check the second operand. */
8574 if (TREE_CODE (treeop0) == INTEGER_CST)
8575 {
8576 tree t1 = treeop0;
8577 treeop0 = treeop1;
8578 treeop1 = t1;
8579 }
8580
8581 /* Attempt to return something suitable for generating an
8582 indexed address, for machines that support that. */
8583
8584 if (modifier == EXPAND_SUM && mode == ptr_mode
8585 && tree_fits_shwi_p (treeop1))
8586 {
8587 tree exp1 = treeop1;
8588
8589 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8590 EXPAND_SUM);
8591
8592 if (!REG_P (op0))
8593 op0 = force_operand (op0, NULL_RTX);
8594 if (!REG_P (op0))
8595 op0 = copy_to_mode_reg (mode, op0);
8596
8597 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8598 gen_int_mode (tree_to_shwi (exp1),
8599 TYPE_MODE (TREE_TYPE (exp1)))));
8600 }
8601
8602 if (modifier == EXPAND_STACK_PARM)
8603 target = 0;
8604
8605 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8606 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8607
8608 case TRUNC_DIV_EXPR:
8609 case FLOOR_DIV_EXPR:
8610 case CEIL_DIV_EXPR:
8611 case ROUND_DIV_EXPR:
8612 case EXACT_DIV_EXPR:
8613 /* If this is a fixed-point operation, then we cannot use the code
8614 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8615 divisions. */
8616 if (ALL_FIXED_POINT_MODE_P (mode))
8617 goto binop;
8618
8619 if (modifier == EXPAND_STACK_PARM)
8620 target = 0;
8621 /* Possible optimization: compute the dividend with EXPAND_SUM
8622 then if the divisor is constant can optimize the case
8623 where some terms of the dividend have coeffs divisible by it. */
8624 expand_operands (treeop0, treeop1,
8625 subtarget, &op0, &op1, EXPAND_NORMAL);
8626 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8627
8628 case RDIV_EXPR:
8629 goto binop;
8630
8631 case MULT_HIGHPART_EXPR:
8632 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8633 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8634 gcc_assert (temp);
8635 return temp;
8636
8637 case TRUNC_MOD_EXPR:
8638 case FLOOR_MOD_EXPR:
8639 case CEIL_MOD_EXPR:
8640 case ROUND_MOD_EXPR:
8641 if (modifier == EXPAND_STACK_PARM)
8642 target = 0;
8643 expand_operands (treeop0, treeop1,
8644 subtarget, &op0, &op1, EXPAND_NORMAL);
8645 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8646
8647 case FIXED_CONVERT_EXPR:
8648 op0 = expand_normal (treeop0);
8649 if (target == 0 || modifier == EXPAND_STACK_PARM)
8650 target = gen_reg_rtx (mode);
8651
8652 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8653 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8654 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8655 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8656 else
8657 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8658 return target;
8659
8660 case FIX_TRUNC_EXPR:
8661 op0 = expand_normal (treeop0);
8662 if (target == 0 || modifier == EXPAND_STACK_PARM)
8663 target = gen_reg_rtx (mode);
8664 expand_fix (target, op0, unsignedp);
8665 return target;
8666
8667 case FLOAT_EXPR:
8668 op0 = expand_normal (treeop0);
8669 if (target == 0 || modifier == EXPAND_STACK_PARM)
8670 target = gen_reg_rtx (mode);
8671 /* expand_float can't figure out what to do if FROM has VOIDmode.
8672 So give it the correct mode. With -O, cse will optimize this. */
8673 if (GET_MODE (op0) == VOIDmode)
8674 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8675 op0);
8676 expand_float (target, op0,
8677 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8678 return target;
8679
8680 case NEGATE_EXPR:
8681 op0 = expand_expr (treeop0, subtarget,
8682 VOIDmode, EXPAND_NORMAL);
8683 if (modifier == EXPAND_STACK_PARM)
8684 target = 0;
8685 temp = expand_unop (mode,
8686 optab_for_tree_code (NEGATE_EXPR, type,
8687 optab_default),
8688 op0, target, 0);
8689 gcc_assert (temp);
8690 return REDUCE_BIT_FIELD (temp);
8691
8692 case ABS_EXPR:
8693 op0 = expand_expr (treeop0, subtarget,
8694 VOIDmode, EXPAND_NORMAL);
8695 if (modifier == EXPAND_STACK_PARM)
8696 target = 0;
8697
8698 /* ABS_EXPR is not valid for complex arguments. */
8699 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8700 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8701
8702 /* Unsigned abs is simply the operand. Testing here means we don't
8703 risk generating incorrect code below. */
8704 if (TYPE_UNSIGNED (type))
8705 return op0;
8706
8707 return expand_abs (mode, op0, target, unsignedp,
8708 safe_from_p (target, treeop0, 1));
8709
8710 case MAX_EXPR:
8711 case MIN_EXPR:
8712 target = original_target;
8713 if (target == 0
8714 || modifier == EXPAND_STACK_PARM
8715 || (MEM_P (target) && MEM_VOLATILE_P (target))
8716 || GET_MODE (target) != mode
8717 || (REG_P (target)
8718 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8719 target = gen_reg_rtx (mode);
8720 expand_operands (treeop0, treeop1,
8721 target, &op0, &op1, EXPAND_NORMAL);
8722
8723 /* First try to do it with a special MIN or MAX instruction.
8724 If that does not win, use a conditional jump to select the proper
8725 value. */
8726 this_optab = optab_for_tree_code (code, type, optab_default);
8727 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8728 OPTAB_WIDEN);
8729 if (temp != 0)
8730 return temp;
8731
8732 /* At this point, a MEM target is no longer useful; we will get better
8733 code without it. */
8734
8735 if (! REG_P (target))
8736 target = gen_reg_rtx (mode);
8737
8738 /* If op1 was placed in target, swap op0 and op1. */
8739 if (target != op0 && target == op1)
8740 {
8741 temp = op0;
8742 op0 = op1;
8743 op1 = temp;
8744 }
8745
8746 /* We generate better code and avoid problems with op1 mentioning
8747 target by forcing op1 into a pseudo if it isn't a constant. */
8748 if (! CONSTANT_P (op1))
8749 op1 = force_reg (mode, op1);
8750
8751 {
8752 enum rtx_code comparison_code;
8753 rtx cmpop1 = op1;
8754
8755 if (code == MAX_EXPR)
8756 comparison_code = unsignedp ? GEU : GE;
8757 else
8758 comparison_code = unsignedp ? LEU : LE;
8759
8760 /* Canonicalize to comparisons against 0. */
8761 if (op1 == const1_rtx)
8762 {
8763 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8764 or (a != 0 ? a : 1) for unsigned.
8765 For MIN we are safe converting (a <= 1 ? a : 1)
8766 into (a <= 0 ? a : 1) */
8767 cmpop1 = const0_rtx;
8768 if (code == MAX_EXPR)
8769 comparison_code = unsignedp ? NE : GT;
8770 }
8771 if (op1 == constm1_rtx && !unsignedp)
8772 {
8773 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8774 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8775 cmpop1 = const0_rtx;
8776 if (code == MIN_EXPR)
8777 comparison_code = LT;
8778 }
8779 #ifdef HAVE_conditional_move
8780 /* Use a conditional move if possible. */
8781 if (can_conditionally_move_p (mode))
8782 {
8783 rtx insn;
8784
8785 /* ??? Same problem as in expmed.c: emit_conditional_move
8786 forces a stack adjustment via compare_from_rtx, and we
8787 lose the stack adjustment if the sequence we are about
8788 to create is discarded. */
8789 do_pending_stack_adjust ();
8790
8791 start_sequence ();
8792
8793 /* Try to emit the conditional move. */
8794 insn = emit_conditional_move (target, comparison_code,
8795 op0, cmpop1, mode,
8796 op0, op1, mode,
8797 unsignedp);
8798
8799 /* If we could do the conditional move, emit the sequence,
8800 and return. */
8801 if (insn)
8802 {
8803 rtx seq = get_insns ();
8804 end_sequence ();
8805 emit_insn (seq);
8806 return target;
8807 }
8808
8809 /* Otherwise discard the sequence and fall back to code with
8810 branches. */
8811 end_sequence ();
8812 }
8813 #endif
8814 if (target != op0)
8815 emit_move_insn (target, op0);
8816
8817 temp = gen_label_rtx ();
8818 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8819 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8820 -1);
8821 }
8822 emit_move_insn (target, op1);
8823 emit_label (temp);
8824 return target;
8825
8826 case BIT_NOT_EXPR:
8827 op0 = expand_expr (treeop0, subtarget,
8828 VOIDmode, EXPAND_NORMAL);
8829 if (modifier == EXPAND_STACK_PARM)
8830 target = 0;
8831 /* In case we have to reduce the result to bitfield precision
8832 for unsigned bitfield expand this as XOR with a proper constant
8833 instead. */
8834 if (reduce_bit_field && TYPE_UNSIGNED (type))
8835 temp = expand_binop (mode, xor_optab, op0,
8836 immed_double_int_const
8837 (double_int::mask (TYPE_PRECISION (type)), mode),
8838 target, 1, OPTAB_LIB_WIDEN);
8839 else
8840 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8841 gcc_assert (temp);
8842 return temp;
8843
8844 /* ??? Can optimize bitwise operations with one arg constant.
8845 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8846 and (a bitwise1 b) bitwise2 b (etc)
8847 but that is probably not worth while. */
8848
8849 case BIT_AND_EXPR:
8850 case BIT_IOR_EXPR:
8851 case BIT_XOR_EXPR:
8852 goto binop;
8853
8854 case LROTATE_EXPR:
8855 case RROTATE_EXPR:
8856 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8857 || (GET_MODE_PRECISION (TYPE_MODE (type))
8858 == TYPE_PRECISION (type)));
8859 /* fall through */
8860
8861 case LSHIFT_EXPR:
8862 case RSHIFT_EXPR:
8863 /* If this is a fixed-point operation, then we cannot use the code
8864 below because "expand_shift" doesn't support sat/no-sat fixed-point
8865 shifts. */
8866 if (ALL_FIXED_POINT_MODE_P (mode))
8867 goto binop;
8868
8869 if (! safe_from_p (subtarget, treeop1, 1))
8870 subtarget = 0;
8871 if (modifier == EXPAND_STACK_PARM)
8872 target = 0;
8873 op0 = expand_expr (treeop0, subtarget,
8874 VOIDmode, EXPAND_NORMAL);
8875 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8876 unsignedp);
8877 if (code == LSHIFT_EXPR)
8878 temp = REDUCE_BIT_FIELD (temp);
8879 return temp;
8880
8881 /* Could determine the answer when only additive constants differ. Also,
8882 the addition of one can be handled by changing the condition. */
8883 case LT_EXPR:
8884 case LE_EXPR:
8885 case GT_EXPR:
8886 case GE_EXPR:
8887 case EQ_EXPR:
8888 case NE_EXPR:
8889 case UNORDERED_EXPR:
8890 case ORDERED_EXPR:
8891 case UNLT_EXPR:
8892 case UNLE_EXPR:
8893 case UNGT_EXPR:
8894 case UNGE_EXPR:
8895 case UNEQ_EXPR:
8896 case LTGT_EXPR:
8897 temp = do_store_flag (ops,
8898 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8899 tmode != VOIDmode ? tmode : mode);
8900 if (temp)
8901 return temp;
8902
8903 /* Use a compare and a jump for BLKmode comparisons, or for function
8904 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8905
8906 if ((target == 0
8907 || modifier == EXPAND_STACK_PARM
8908 || ! safe_from_p (target, treeop0, 1)
8909 || ! safe_from_p (target, treeop1, 1)
8910 /* Make sure we don't have a hard reg (such as function's return
8911 value) live across basic blocks, if not optimizing. */
8912 || (!optimize && REG_P (target)
8913 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8914 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8915
8916 emit_move_insn (target, const0_rtx);
8917
8918 op1 = gen_label_rtx ();
8919 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8920
8921 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8922 emit_move_insn (target, constm1_rtx);
8923 else
8924 emit_move_insn (target, const1_rtx);
8925
8926 emit_label (op1);
8927 return target;
8928
8929 case COMPLEX_EXPR:
8930 /* Get the rtx code of the operands. */
8931 op0 = expand_normal (treeop0);
8932 op1 = expand_normal (treeop1);
8933
8934 if (!target)
8935 target = gen_reg_rtx (TYPE_MODE (type));
8936 else
8937 /* If target overlaps with op1, then either we need to force
8938 op1 into a pseudo (if target also overlaps with op0),
8939 or write the complex parts in reverse order. */
8940 switch (GET_CODE (target))
8941 {
8942 case CONCAT:
8943 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
8944 {
8945 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
8946 {
8947 complex_expr_force_op1:
8948 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
8949 emit_move_insn (temp, op1);
8950 op1 = temp;
8951 break;
8952 }
8953 complex_expr_swap_order:
8954 /* Move the imaginary (op1) and real (op0) parts to their
8955 location. */
8956 write_complex_part (target, op1, true);
8957 write_complex_part (target, op0, false);
8958
8959 return target;
8960 }
8961 break;
8962 case MEM:
8963 temp = adjust_address_nv (target,
8964 GET_MODE_INNER (GET_MODE (target)), 0);
8965 if (reg_overlap_mentioned_p (temp, op1))
8966 {
8967 enum machine_mode imode = GET_MODE_INNER (GET_MODE (target));
8968 temp = adjust_address_nv (target, imode,
8969 GET_MODE_SIZE (imode));
8970 if (reg_overlap_mentioned_p (temp, op0))
8971 goto complex_expr_force_op1;
8972 goto complex_expr_swap_order;
8973 }
8974 break;
8975 default:
8976 if (reg_overlap_mentioned_p (target, op1))
8977 {
8978 if (reg_overlap_mentioned_p (target, op0))
8979 goto complex_expr_force_op1;
8980 goto complex_expr_swap_order;
8981 }
8982 break;
8983 }
8984
8985 /* Move the real (op0) and imaginary (op1) parts to their location. */
8986 write_complex_part (target, op0, false);
8987 write_complex_part (target, op1, true);
8988
8989 return target;
8990
8991 case WIDEN_SUM_EXPR:
8992 {
8993 tree oprnd0 = treeop0;
8994 tree oprnd1 = treeop1;
8995
8996 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8997 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8998 target, unsignedp);
8999 return target;
9000 }
9001
9002 case REDUC_MAX_EXPR:
9003 case REDUC_MIN_EXPR:
9004 case REDUC_PLUS_EXPR:
9005 {
9006 op0 = expand_normal (treeop0);
9007 this_optab = optab_for_tree_code (code, type, optab_default);
9008 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
9009 gcc_assert (temp);
9010 return temp;
9011 }
9012
9013 case VEC_LSHIFT_EXPR:
9014 case VEC_RSHIFT_EXPR:
9015 {
9016 target = expand_vec_shift_expr (ops, target);
9017 return target;
9018 }
9019
9020 case VEC_UNPACK_HI_EXPR:
9021 case VEC_UNPACK_LO_EXPR:
9022 {
9023 op0 = expand_normal (treeop0);
9024 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9025 target, unsignedp);
9026 gcc_assert (temp);
9027 return temp;
9028 }
9029
9030 case VEC_UNPACK_FLOAT_HI_EXPR:
9031 case VEC_UNPACK_FLOAT_LO_EXPR:
9032 {
9033 op0 = expand_normal (treeop0);
9034 /* The signedness is determined from input operand. */
9035 temp = expand_widen_pattern_expr
9036 (ops, op0, NULL_RTX, NULL_RTX,
9037 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9038
9039 gcc_assert (temp);
9040 return temp;
9041 }
9042
9043 case VEC_WIDEN_MULT_HI_EXPR:
9044 case VEC_WIDEN_MULT_LO_EXPR:
9045 case VEC_WIDEN_MULT_EVEN_EXPR:
9046 case VEC_WIDEN_MULT_ODD_EXPR:
9047 case VEC_WIDEN_LSHIFT_HI_EXPR:
9048 case VEC_WIDEN_LSHIFT_LO_EXPR:
9049 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9050 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9051 target, unsignedp);
9052 gcc_assert (target);
9053 return target;
9054
9055 case VEC_PACK_TRUNC_EXPR:
9056 case VEC_PACK_SAT_EXPR:
9057 case VEC_PACK_FIX_TRUNC_EXPR:
9058 mode = TYPE_MODE (TREE_TYPE (treeop0));
9059 goto binop;
9060
9061 case VEC_PERM_EXPR:
9062 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9063 op2 = expand_normal (treeop2);
9064
9065 /* Careful here: if the target doesn't support integral vector modes,
9066 a constant selection vector could wind up smooshed into a normal
9067 integral constant. */
9068 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9069 {
9070 tree sel_type = TREE_TYPE (treeop2);
9071 enum machine_mode vmode
9072 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9073 TYPE_VECTOR_SUBPARTS (sel_type));
9074 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9075 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9076 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9077 }
9078 else
9079 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9080
9081 temp = expand_vec_perm (mode, op0, op1, op2, target);
9082 gcc_assert (temp);
9083 return temp;
9084
9085 case DOT_PROD_EXPR:
9086 {
9087 tree oprnd0 = treeop0;
9088 tree oprnd1 = treeop1;
9089 tree oprnd2 = treeop2;
9090 rtx op2;
9091
9092 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9093 op2 = expand_normal (oprnd2);
9094 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9095 target, unsignedp);
9096 return target;
9097 }
9098
9099 case REALIGN_LOAD_EXPR:
9100 {
9101 tree oprnd0 = treeop0;
9102 tree oprnd1 = treeop1;
9103 tree oprnd2 = treeop2;
9104 rtx op2;
9105
9106 this_optab = optab_for_tree_code (code, type, optab_default);
9107 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9108 op2 = expand_normal (oprnd2);
9109 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9110 target, unsignedp);
9111 gcc_assert (temp);
9112 return temp;
9113 }
9114
9115 case COND_EXPR:
9116 /* A COND_EXPR with its type being VOID_TYPE represents a
9117 conditional jump and is handled in
9118 expand_gimple_cond_expr. */
9119 gcc_assert (!VOID_TYPE_P (type));
9120
9121 /* Note that COND_EXPRs whose type is a structure or union
9122 are required to be constructed to contain assignments of
9123 a temporary variable, so that we can evaluate them here
9124 for side effect only. If type is void, we must do likewise. */
9125
9126 gcc_assert (!TREE_ADDRESSABLE (type)
9127 && !ignore
9128 && TREE_TYPE (treeop1) != void_type_node
9129 && TREE_TYPE (treeop2) != void_type_node);
9130
9131 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9132 if (temp)
9133 return temp;
9134
9135 /* If we are not to produce a result, we have no target. Otherwise,
9136 if a target was specified use it; it will not be used as an
9137 intermediate target unless it is safe. If no target, use a
9138 temporary. */
9139
9140 if (modifier != EXPAND_STACK_PARM
9141 && original_target
9142 && safe_from_p (original_target, treeop0, 1)
9143 && GET_MODE (original_target) == mode
9144 && !MEM_P (original_target))
9145 temp = original_target;
9146 else
9147 temp = assign_temp (type, 0, 1);
9148
9149 do_pending_stack_adjust ();
9150 NO_DEFER_POP;
9151 op0 = gen_label_rtx ();
9152 op1 = gen_label_rtx ();
9153 jumpifnot (treeop0, op0, -1);
9154 store_expr (treeop1, temp,
9155 modifier == EXPAND_STACK_PARM,
9156 false);
9157
9158 emit_jump_insn (gen_jump (op1));
9159 emit_barrier ();
9160 emit_label (op0);
9161 store_expr (treeop2, temp,
9162 modifier == EXPAND_STACK_PARM,
9163 false);
9164
9165 emit_label (op1);
9166 OK_DEFER_POP;
9167 return temp;
9168
9169 case VEC_COND_EXPR:
9170 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9171 return target;
9172
9173 default:
9174 gcc_unreachable ();
9175 }
9176
9177 /* Here to do an ordinary binary operator. */
9178 binop:
9179 expand_operands (treeop0, treeop1,
9180 subtarget, &op0, &op1, EXPAND_NORMAL);
9181 binop2:
9182 this_optab = optab_for_tree_code (code, type, optab_default);
9183 binop3:
9184 if (modifier == EXPAND_STACK_PARM)
9185 target = 0;
9186 temp = expand_binop (mode, this_optab, op0, op1, target,
9187 unsignedp, OPTAB_LIB_WIDEN);
9188 gcc_assert (temp);
9189 /* Bitwise operations do not need bitfield reduction as we expect their
9190 operands being properly truncated. */
9191 if (code == BIT_XOR_EXPR
9192 || code == BIT_AND_EXPR
9193 || code == BIT_IOR_EXPR)
9194 return temp;
9195 return REDUCE_BIT_FIELD (temp);
9196 }
9197 #undef REDUCE_BIT_FIELD
9198
9199
9200 /* Return TRUE if expression STMT is suitable for replacement.
9201 Never consider memory loads as replaceable, because those don't ever lead
9202 into constant expressions. */
9203
9204 static bool
9205 stmt_is_replaceable_p (gimple stmt)
9206 {
9207 if (ssa_is_replaceable_p (stmt))
9208 {
9209 /* Don't move around loads. */
9210 if (!gimple_assign_single_p (stmt)
9211 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9212 return true;
9213 }
9214 return false;
9215 }
9216
9217 rtx
9218 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
9219 enum expand_modifier modifier, rtx *alt_rtl)
9220 {
9221 rtx op0, op1, temp, decl_rtl;
9222 tree type;
9223 int unsignedp;
9224 enum machine_mode mode;
9225 enum tree_code code = TREE_CODE (exp);
9226 rtx subtarget, original_target;
9227 int ignore;
9228 tree context;
9229 bool reduce_bit_field;
9230 location_t loc = EXPR_LOCATION (exp);
9231 struct separate_ops ops;
9232 tree treeop0, treeop1, treeop2;
9233 tree ssa_name = NULL_TREE;
9234 gimple g;
9235
9236 type = TREE_TYPE (exp);
9237 mode = TYPE_MODE (type);
9238 unsignedp = TYPE_UNSIGNED (type);
9239
9240 treeop0 = treeop1 = treeop2 = NULL_TREE;
9241 if (!VL_EXP_CLASS_P (exp))
9242 switch (TREE_CODE_LENGTH (code))
9243 {
9244 default:
9245 case 3: treeop2 = TREE_OPERAND (exp, 2);
9246 case 2: treeop1 = TREE_OPERAND (exp, 1);
9247 case 1: treeop0 = TREE_OPERAND (exp, 0);
9248 case 0: break;
9249 }
9250 ops.code = code;
9251 ops.type = type;
9252 ops.op0 = treeop0;
9253 ops.op1 = treeop1;
9254 ops.op2 = treeop2;
9255 ops.location = loc;
9256
9257 ignore = (target == const0_rtx
9258 || ((CONVERT_EXPR_CODE_P (code)
9259 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9260 && TREE_CODE (type) == VOID_TYPE));
9261
9262 /* An operation in what may be a bit-field type needs the
9263 result to be reduced to the precision of the bit-field type,
9264 which is narrower than that of the type's mode. */
9265 reduce_bit_field = (!ignore
9266 && INTEGRAL_TYPE_P (type)
9267 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9268
9269 /* If we are going to ignore this result, we need only do something
9270 if there is a side-effect somewhere in the expression. If there
9271 is, short-circuit the most common cases here. Note that we must
9272 not call expand_expr with anything but const0_rtx in case this
9273 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9274
9275 if (ignore)
9276 {
9277 if (! TREE_SIDE_EFFECTS (exp))
9278 return const0_rtx;
9279
9280 /* Ensure we reference a volatile object even if value is ignored, but
9281 don't do this if all we are doing is taking its address. */
9282 if (TREE_THIS_VOLATILE (exp)
9283 && TREE_CODE (exp) != FUNCTION_DECL
9284 && mode != VOIDmode && mode != BLKmode
9285 && modifier != EXPAND_CONST_ADDRESS)
9286 {
9287 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9288 if (MEM_P (temp))
9289 copy_to_reg (temp);
9290 return const0_rtx;
9291 }
9292
9293 if (TREE_CODE_CLASS (code) == tcc_unary
9294 || code == BIT_FIELD_REF
9295 || code == COMPONENT_REF
9296 || code == INDIRECT_REF)
9297 return expand_expr (treeop0, const0_rtx, VOIDmode,
9298 modifier);
9299
9300 else if (TREE_CODE_CLASS (code) == tcc_binary
9301 || TREE_CODE_CLASS (code) == tcc_comparison
9302 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9303 {
9304 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9305 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9306 return const0_rtx;
9307 }
9308
9309 target = 0;
9310 }
9311
9312 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9313 target = 0;
9314
9315 /* Use subtarget as the target for operand 0 of a binary operation. */
9316 subtarget = get_subtarget (target);
9317 original_target = target;
9318
9319 switch (code)
9320 {
9321 case LABEL_DECL:
9322 {
9323 tree function = decl_function_context (exp);
9324
9325 temp = label_rtx (exp);
9326 temp = gen_rtx_LABEL_REF (Pmode, temp);
9327
9328 if (function != current_function_decl
9329 && function != 0)
9330 LABEL_REF_NONLOCAL_P (temp) = 1;
9331
9332 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9333 return temp;
9334 }
9335
9336 case SSA_NAME:
9337 /* ??? ivopts calls expander, without any preparation from
9338 out-of-ssa. So fake instructions as if this was an access to the
9339 base variable. This unnecessarily allocates a pseudo, see how we can
9340 reuse it, if partition base vars have it set already. */
9341 if (!currently_expanding_to_rtl)
9342 {
9343 tree var = SSA_NAME_VAR (exp);
9344 if (var && DECL_RTL_SET_P (var))
9345 return DECL_RTL (var);
9346 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9347 LAST_VIRTUAL_REGISTER + 1);
9348 }
9349
9350 g = get_gimple_for_ssa_name (exp);
9351 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9352 if (g == NULL
9353 && modifier == EXPAND_INITIALIZER
9354 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9355 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9356 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9357 g = SSA_NAME_DEF_STMT (exp);
9358 if (g)
9359 {
9360 rtx r;
9361 location_t saved_loc = curr_insn_location ();
9362
9363 set_curr_insn_location (gimple_location (g));
9364 r = expand_expr_real (gimple_assign_rhs_to_tree (g), target,
9365 tmode, modifier, NULL);
9366 set_curr_insn_location (saved_loc);
9367 if (REG_P (r) && !REG_EXPR (r))
9368 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9369 return r;
9370 }
9371
9372 ssa_name = exp;
9373 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9374 exp = SSA_NAME_VAR (ssa_name);
9375 goto expand_decl_rtl;
9376
9377 case PARM_DECL:
9378 case VAR_DECL:
9379 /* If a static var's type was incomplete when the decl was written,
9380 but the type is complete now, lay out the decl now. */
9381 if (DECL_SIZE (exp) == 0
9382 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9383 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9384 layout_decl (exp, 0);
9385
9386 /* ... fall through ... */
9387
9388 case FUNCTION_DECL:
9389 case RESULT_DECL:
9390 decl_rtl = DECL_RTL (exp);
9391 expand_decl_rtl:
9392 gcc_assert (decl_rtl);
9393 decl_rtl = copy_rtx (decl_rtl);
9394 /* Record writes to register variables. */
9395 if (modifier == EXPAND_WRITE
9396 && REG_P (decl_rtl)
9397 && HARD_REGISTER_P (decl_rtl))
9398 add_to_hard_reg_set (&crtl->asm_clobbers,
9399 GET_MODE (decl_rtl), REGNO (decl_rtl));
9400
9401 /* Ensure variable marked as used even if it doesn't go through
9402 a parser. If it hasn't be used yet, write out an external
9403 definition. */
9404 TREE_USED (exp) = 1;
9405
9406 /* Show we haven't gotten RTL for this yet. */
9407 temp = 0;
9408
9409 /* Variables inherited from containing functions should have
9410 been lowered by this point. */
9411 context = decl_function_context (exp);
9412 gcc_assert (SCOPE_FILE_SCOPE_P (context)
9413 || context == current_function_decl
9414 || TREE_STATIC (exp)
9415 || DECL_EXTERNAL (exp)
9416 /* ??? C++ creates functions that are not TREE_STATIC. */
9417 || TREE_CODE (exp) == FUNCTION_DECL);
9418
9419 /* This is the case of an array whose size is to be determined
9420 from its initializer, while the initializer is still being parsed.
9421 ??? We aren't parsing while expanding anymore. */
9422
9423 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9424 temp = validize_mem (decl_rtl);
9425
9426 /* If DECL_RTL is memory, we are in the normal case and the
9427 address is not valid, get the address into a register. */
9428
9429 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9430 {
9431 if (alt_rtl)
9432 *alt_rtl = decl_rtl;
9433 decl_rtl = use_anchored_address (decl_rtl);
9434 if (modifier != EXPAND_CONST_ADDRESS
9435 && modifier != EXPAND_SUM
9436 && !memory_address_addr_space_p (DECL_MODE (exp),
9437 XEXP (decl_rtl, 0),
9438 MEM_ADDR_SPACE (decl_rtl)))
9439 temp = replace_equiv_address (decl_rtl,
9440 copy_rtx (XEXP (decl_rtl, 0)));
9441 }
9442
9443 /* If we got something, return it. But first, set the alignment
9444 if the address is a register. */
9445 if (temp != 0)
9446 {
9447 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9448 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9449
9450 return temp;
9451 }
9452
9453 /* If the mode of DECL_RTL does not match that of the decl,
9454 there are two cases: we are dealing with a BLKmode value
9455 that is returned in a register, or we are dealing with
9456 a promoted value. In the latter case, return a SUBREG
9457 of the wanted mode, but mark it so that we know that it
9458 was already extended. */
9459 if (REG_P (decl_rtl)
9460 && DECL_MODE (exp) != BLKmode
9461 && GET_MODE (decl_rtl) != DECL_MODE (exp))
9462 {
9463 enum machine_mode pmode;
9464
9465 /* Get the signedness to be used for this variable. Ensure we get
9466 the same mode we got when the variable was declared. */
9467 if (code == SSA_NAME
9468 && (g = SSA_NAME_DEF_STMT (ssa_name))
9469 && gimple_code (g) == GIMPLE_CALL)
9470 {
9471 gcc_assert (!gimple_call_internal_p (g));
9472 pmode = promote_function_mode (type, mode, &unsignedp,
9473 gimple_call_fntype (g),
9474 2);
9475 }
9476 else
9477 pmode = promote_decl_mode (exp, &unsignedp);
9478 gcc_assert (GET_MODE (decl_rtl) == pmode);
9479
9480 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9481 SUBREG_PROMOTED_VAR_P (temp) = 1;
9482 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
9483 return temp;
9484 }
9485
9486 return decl_rtl;
9487
9488 case INTEGER_CST:
9489 temp = immed_double_const (TREE_INT_CST_LOW (exp),
9490 TREE_INT_CST_HIGH (exp), mode);
9491
9492 return temp;
9493
9494 case VECTOR_CST:
9495 {
9496 tree tmp = NULL_TREE;
9497 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9498 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9499 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9500 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9501 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9502 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9503 return const_vector_from_tree (exp);
9504 if (GET_MODE_CLASS (mode) == MODE_INT)
9505 {
9506 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9507 if (type_for_mode)
9508 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9509 }
9510 if (!tmp)
9511 {
9512 vec<constructor_elt, va_gc> *v;
9513 unsigned i;
9514 vec_alloc (v, VECTOR_CST_NELTS (exp));
9515 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9516 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9517 tmp = build_constructor (type, v);
9518 }
9519 return expand_expr (tmp, ignore ? const0_rtx : target,
9520 tmode, modifier);
9521 }
9522
9523 case CONST_DECL:
9524 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9525
9526 case REAL_CST:
9527 /* If optimized, generate immediate CONST_DOUBLE
9528 which will be turned into memory by reload if necessary.
9529
9530 We used to force a register so that loop.c could see it. But
9531 this does not allow gen_* patterns to perform optimizations with
9532 the constants. It also produces two insns in cases like "x = 1.0;".
9533 On most machines, floating-point constants are not permitted in
9534 many insns, so we'd end up copying it to a register in any case.
9535
9536 Now, we do the copying in expand_binop, if appropriate. */
9537 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9538 TYPE_MODE (TREE_TYPE (exp)));
9539
9540 case FIXED_CST:
9541 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9542 TYPE_MODE (TREE_TYPE (exp)));
9543
9544 case COMPLEX_CST:
9545 /* Handle evaluating a complex constant in a CONCAT target. */
9546 if (original_target && GET_CODE (original_target) == CONCAT)
9547 {
9548 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9549 rtx rtarg, itarg;
9550
9551 rtarg = XEXP (original_target, 0);
9552 itarg = XEXP (original_target, 1);
9553
9554 /* Move the real and imaginary parts separately. */
9555 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9556 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9557
9558 if (op0 != rtarg)
9559 emit_move_insn (rtarg, op0);
9560 if (op1 != itarg)
9561 emit_move_insn (itarg, op1);
9562
9563 return original_target;
9564 }
9565
9566 /* ... fall through ... */
9567
9568 case STRING_CST:
9569 temp = expand_expr_constant (exp, 1, modifier);
9570
9571 /* temp contains a constant address.
9572 On RISC machines where a constant address isn't valid,
9573 make some insns to get that address into a register. */
9574 if (modifier != EXPAND_CONST_ADDRESS
9575 && modifier != EXPAND_INITIALIZER
9576 && modifier != EXPAND_SUM
9577 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9578 MEM_ADDR_SPACE (temp)))
9579 return replace_equiv_address (temp,
9580 copy_rtx (XEXP (temp, 0)));
9581 return temp;
9582
9583 case SAVE_EXPR:
9584 {
9585 tree val = treeop0;
9586 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
9587
9588 if (!SAVE_EXPR_RESOLVED_P (exp))
9589 {
9590 /* We can indeed still hit this case, typically via builtin
9591 expanders calling save_expr immediately before expanding
9592 something. Assume this means that we only have to deal
9593 with non-BLKmode values. */
9594 gcc_assert (GET_MODE (ret) != BLKmode);
9595
9596 val = build_decl (curr_insn_location (),
9597 VAR_DECL, NULL, TREE_TYPE (exp));
9598 DECL_ARTIFICIAL (val) = 1;
9599 DECL_IGNORED_P (val) = 1;
9600 treeop0 = val;
9601 TREE_OPERAND (exp, 0) = treeop0;
9602 SAVE_EXPR_RESOLVED_P (exp) = 1;
9603
9604 if (!CONSTANT_P (ret))
9605 ret = copy_to_reg (ret);
9606 SET_DECL_RTL (val, ret);
9607 }
9608
9609 return ret;
9610 }
9611
9612
9613 case CONSTRUCTOR:
9614 /* If we don't need the result, just ensure we evaluate any
9615 subexpressions. */
9616 if (ignore)
9617 {
9618 unsigned HOST_WIDE_INT idx;
9619 tree value;
9620
9621 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9622 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9623
9624 return const0_rtx;
9625 }
9626
9627 return expand_constructor (exp, target, modifier, false);
9628
9629 case TARGET_MEM_REF:
9630 {
9631 addr_space_t as
9632 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9633 enum insn_code icode;
9634 unsigned int align;
9635
9636 op0 = addr_for_mem_ref (exp, as, true);
9637 op0 = memory_address_addr_space (mode, op0, as);
9638 temp = gen_rtx_MEM (mode, op0);
9639 set_mem_attributes (temp, exp, 0);
9640 set_mem_addr_space (temp, as);
9641 align = get_object_alignment (exp);
9642 if (modifier != EXPAND_WRITE
9643 && modifier != EXPAND_MEMORY
9644 && mode != BLKmode
9645 && align < GET_MODE_ALIGNMENT (mode)
9646 /* If the target does not have special handling for unaligned
9647 loads of mode then it can use regular moves for them. */
9648 && ((icode = optab_handler (movmisalign_optab, mode))
9649 != CODE_FOR_nothing))
9650 {
9651 struct expand_operand ops[2];
9652
9653 /* We've already validated the memory, and we're creating a
9654 new pseudo destination. The predicates really can't fail,
9655 nor can the generator. */
9656 create_output_operand (&ops[0], NULL_RTX, mode);
9657 create_fixed_operand (&ops[1], temp);
9658 expand_insn (icode, 2, ops);
9659 temp = ops[0].value;
9660 }
9661 return temp;
9662 }
9663
9664 case MEM_REF:
9665 {
9666 addr_space_t as
9667 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9668 enum machine_mode address_mode;
9669 tree base = TREE_OPERAND (exp, 0);
9670 gimple def_stmt;
9671 enum insn_code icode;
9672 unsigned align;
9673 /* Handle expansion of non-aliased memory with non-BLKmode. That
9674 might end up in a register. */
9675 if (mem_ref_refers_to_non_mem_p (exp))
9676 {
9677 HOST_WIDE_INT offset = mem_ref_offset (exp).low;
9678 base = TREE_OPERAND (base, 0);
9679 if (offset == 0
9680 && tree_fits_uhwi_p (TYPE_SIZE (type))
9681 && (GET_MODE_BITSIZE (DECL_MODE (base))
9682 == tree_to_uhwi (TYPE_SIZE (type))))
9683 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9684 target, tmode, modifier);
9685 if (TYPE_MODE (type) == BLKmode)
9686 {
9687 temp = assign_stack_temp (DECL_MODE (base),
9688 GET_MODE_SIZE (DECL_MODE (base)));
9689 store_expr (base, temp, 0, false);
9690 temp = adjust_address (temp, BLKmode, offset);
9691 set_mem_size (temp, int_size_in_bytes (type));
9692 return temp;
9693 }
9694 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9695 bitsize_int (offset * BITS_PER_UNIT));
9696 return expand_expr (exp, target, tmode, modifier);
9697 }
9698 address_mode = targetm.addr_space.address_mode (as);
9699 base = TREE_OPERAND (exp, 0);
9700 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9701 {
9702 tree mask = gimple_assign_rhs2 (def_stmt);
9703 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9704 gimple_assign_rhs1 (def_stmt), mask);
9705 TREE_OPERAND (exp, 0) = base;
9706 }
9707 align = get_object_alignment (exp);
9708 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9709 op0 = memory_address_addr_space (mode, op0, as);
9710 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9711 {
9712 rtx off
9713 = immed_double_int_const (mem_ref_offset (exp), address_mode);
9714 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9715 op0 = memory_address_addr_space (mode, op0, as);
9716 }
9717 temp = gen_rtx_MEM (mode, op0);
9718 set_mem_attributes (temp, exp, 0);
9719 set_mem_addr_space (temp, as);
9720 if (TREE_THIS_VOLATILE (exp))
9721 MEM_VOLATILE_P (temp) = 1;
9722 if (modifier != EXPAND_WRITE
9723 && modifier != EXPAND_MEMORY
9724 && mode != BLKmode
9725 && align < GET_MODE_ALIGNMENT (mode))
9726 {
9727 if ((icode = optab_handler (movmisalign_optab, mode))
9728 != CODE_FOR_nothing)
9729 {
9730 struct expand_operand ops[2];
9731
9732 /* We've already validated the memory, and we're creating a
9733 new pseudo destination. The predicates really can't fail,
9734 nor can the generator. */
9735 create_output_operand (&ops[0], NULL_RTX, mode);
9736 create_fixed_operand (&ops[1], temp);
9737 expand_insn (icode, 2, ops);
9738 temp = ops[0].value;
9739 }
9740 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9741 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9742 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9743 (modifier == EXPAND_STACK_PARM
9744 ? NULL_RTX : target),
9745 mode, mode);
9746 }
9747 return temp;
9748 }
9749
9750 case ARRAY_REF:
9751
9752 {
9753 tree array = treeop0;
9754 tree index = treeop1;
9755 tree init;
9756
9757 /* Fold an expression like: "foo"[2].
9758 This is not done in fold so it won't happen inside &.
9759 Don't fold if this is for wide characters since it's too
9760 difficult to do correctly and this is a very rare case. */
9761
9762 if (modifier != EXPAND_CONST_ADDRESS
9763 && modifier != EXPAND_INITIALIZER
9764 && modifier != EXPAND_MEMORY)
9765 {
9766 tree t = fold_read_from_constant_string (exp);
9767
9768 if (t)
9769 return expand_expr (t, target, tmode, modifier);
9770 }
9771
9772 /* If this is a constant index into a constant array,
9773 just get the value from the array. Handle both the cases when
9774 we have an explicit constructor and when our operand is a variable
9775 that was declared const. */
9776
9777 if (modifier != EXPAND_CONST_ADDRESS
9778 && modifier != EXPAND_INITIALIZER
9779 && modifier != EXPAND_MEMORY
9780 && TREE_CODE (array) == CONSTRUCTOR
9781 && ! TREE_SIDE_EFFECTS (array)
9782 && TREE_CODE (index) == INTEGER_CST)
9783 {
9784 unsigned HOST_WIDE_INT ix;
9785 tree field, value;
9786
9787 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9788 field, value)
9789 if (tree_int_cst_equal (field, index))
9790 {
9791 if (!TREE_SIDE_EFFECTS (value))
9792 return expand_expr (fold (value), target, tmode, modifier);
9793 break;
9794 }
9795 }
9796
9797 else if (optimize >= 1
9798 && modifier != EXPAND_CONST_ADDRESS
9799 && modifier != EXPAND_INITIALIZER
9800 && modifier != EXPAND_MEMORY
9801 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9802 && TREE_CODE (index) == INTEGER_CST
9803 && (TREE_CODE (array) == VAR_DECL
9804 || TREE_CODE (array) == CONST_DECL)
9805 && (init = ctor_for_folding (array)) != error_mark_node)
9806 {
9807 if (TREE_CODE (init) == CONSTRUCTOR)
9808 {
9809 unsigned HOST_WIDE_INT ix;
9810 tree field, value;
9811
9812 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
9813 field, value)
9814 if (tree_int_cst_equal (field, index))
9815 {
9816 if (TREE_SIDE_EFFECTS (value))
9817 break;
9818
9819 if (TREE_CODE (value) == CONSTRUCTOR)
9820 {
9821 /* If VALUE is a CONSTRUCTOR, this
9822 optimization is only useful if
9823 this doesn't store the CONSTRUCTOR
9824 into memory. If it does, it is more
9825 efficient to just load the data from
9826 the array directly. */
9827 rtx ret = expand_constructor (value, target,
9828 modifier, true);
9829 if (ret == NULL_RTX)
9830 break;
9831 }
9832
9833 return
9834 expand_expr (fold (value), target, tmode, modifier);
9835 }
9836 }
9837 else if (TREE_CODE (init) == STRING_CST)
9838 {
9839 tree low_bound = array_ref_low_bound (exp);
9840 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
9841
9842 /* Optimize the special case of a zero lower bound.
9843
9844 We convert the lower bound to sizetype to avoid problems
9845 with constant folding. E.g. suppose the lower bound is
9846 1 and its mode is QI. Without the conversion
9847 (ARRAY + (INDEX - (unsigned char)1))
9848 becomes
9849 (ARRAY + (-(unsigned char)1) + INDEX)
9850 which becomes
9851 (ARRAY + 255 + INDEX). Oops! */
9852 if (!integer_zerop (low_bound))
9853 index1 = size_diffop_loc (loc, index1,
9854 fold_convert_loc (loc, sizetype,
9855 low_bound));
9856
9857 if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
9858 {
9859 tree type = TREE_TYPE (TREE_TYPE (init));
9860 enum machine_mode mode = TYPE_MODE (type);
9861
9862 if (GET_MODE_CLASS (mode) == MODE_INT
9863 && GET_MODE_SIZE (mode) == 1)
9864 return gen_int_mode (TREE_STRING_POINTER (init)
9865 [TREE_INT_CST_LOW (index1)],
9866 mode);
9867 }
9868 }
9869 }
9870 }
9871 goto normal_inner_ref;
9872
9873 case COMPONENT_REF:
9874 /* If the operand is a CONSTRUCTOR, we can just extract the
9875 appropriate field if it is present. */
9876 if (TREE_CODE (treeop0) == CONSTRUCTOR)
9877 {
9878 unsigned HOST_WIDE_INT idx;
9879 tree field, value;
9880
9881 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
9882 idx, field, value)
9883 if (field == treeop1
9884 /* We can normally use the value of the field in the
9885 CONSTRUCTOR. However, if this is a bitfield in
9886 an integral mode that we can fit in a HOST_WIDE_INT,
9887 we must mask only the number of bits in the bitfield,
9888 since this is done implicitly by the constructor. If
9889 the bitfield does not meet either of those conditions,
9890 we can't do this optimization. */
9891 && (! DECL_BIT_FIELD (field)
9892 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
9893 && (GET_MODE_PRECISION (DECL_MODE (field))
9894 <= HOST_BITS_PER_WIDE_INT))))
9895 {
9896 if (DECL_BIT_FIELD (field)
9897 && modifier == EXPAND_STACK_PARM)
9898 target = 0;
9899 op0 = expand_expr (value, target, tmode, modifier);
9900 if (DECL_BIT_FIELD (field))
9901 {
9902 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
9903 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
9904
9905 if (TYPE_UNSIGNED (TREE_TYPE (field)))
9906 {
9907 op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
9908 imode);
9909 op0 = expand_and (imode, op0, op1, target);
9910 }
9911 else
9912 {
9913 int count = GET_MODE_PRECISION (imode) - bitsize;
9914
9915 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
9916 target, 0);
9917 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
9918 target, 0);
9919 }
9920 }
9921
9922 return op0;
9923 }
9924 }
9925 goto normal_inner_ref;
9926
9927 case BIT_FIELD_REF:
9928 case ARRAY_RANGE_REF:
9929 normal_inner_ref:
9930 {
9931 enum machine_mode mode1, mode2;
9932 HOST_WIDE_INT bitsize, bitpos;
9933 tree offset;
9934 int volatilep = 0, must_force_mem;
9935 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9936 &mode1, &unsignedp, &volatilep, true);
9937 rtx orig_op0, memloc;
9938 bool mem_attrs_from_type = false;
9939
9940 /* If we got back the original object, something is wrong. Perhaps
9941 we are evaluating an expression too early. In any event, don't
9942 infinitely recurse. */
9943 gcc_assert (tem != exp);
9944
9945 /* If TEM's type is a union of variable size, pass TARGET to the inner
9946 computation, since it will need a temporary and TARGET is known
9947 to have to do. This occurs in unchecked conversion in Ada. */
9948 orig_op0 = op0
9949 = expand_expr (tem,
9950 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9951 && COMPLETE_TYPE_P (TREE_TYPE (tem))
9952 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9953 != INTEGER_CST)
9954 && modifier != EXPAND_STACK_PARM
9955 ? target : NULL_RTX),
9956 VOIDmode,
9957 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
9958
9959 /* If the bitfield is volatile, we want to access it in the
9960 field's mode, not the computed mode.
9961 If a MEM has VOIDmode (external with incomplete type),
9962 use BLKmode for it instead. */
9963 if (MEM_P (op0))
9964 {
9965 if (volatilep && flag_strict_volatile_bitfields > 0)
9966 op0 = adjust_address (op0, mode1, 0);
9967 else if (GET_MODE (op0) == VOIDmode)
9968 op0 = adjust_address (op0, BLKmode, 0);
9969 }
9970
9971 mode2
9972 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
9973
9974 /* If we have either an offset, a BLKmode result, or a reference
9975 outside the underlying object, we must force it to memory.
9976 Such a case can occur in Ada if we have unchecked conversion
9977 of an expression from a scalar type to an aggregate type or
9978 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
9979 passed a partially uninitialized object or a view-conversion
9980 to a larger size. */
9981 must_force_mem = (offset
9982 || mode1 == BLKmode
9983 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
9984
9985 /* Handle CONCAT first. */
9986 if (GET_CODE (op0) == CONCAT && !must_force_mem)
9987 {
9988 if (bitpos == 0
9989 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
9990 return op0;
9991 if (bitpos == 0
9992 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9993 && bitsize)
9994 {
9995 op0 = XEXP (op0, 0);
9996 mode2 = GET_MODE (op0);
9997 }
9998 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9999 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10000 && bitpos
10001 && bitsize)
10002 {
10003 op0 = XEXP (op0, 1);
10004 bitpos = 0;
10005 mode2 = GET_MODE (op0);
10006 }
10007 else
10008 /* Otherwise force into memory. */
10009 must_force_mem = 1;
10010 }
10011
10012 /* If this is a constant, put it in a register if it is a legitimate
10013 constant and we don't need a memory reference. */
10014 if (CONSTANT_P (op0)
10015 && mode2 != BLKmode
10016 && targetm.legitimate_constant_p (mode2, op0)
10017 && !must_force_mem)
10018 op0 = force_reg (mode2, op0);
10019
10020 /* Otherwise, if this is a constant, try to force it to the constant
10021 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10022 is a legitimate constant. */
10023 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10024 op0 = validize_mem (memloc);
10025
10026 /* Otherwise, if this is a constant or the object is not in memory
10027 and need be, put it there. */
10028 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10029 {
10030 tree nt = build_qualified_type (TREE_TYPE (tem),
10031 (TYPE_QUALS (TREE_TYPE (tem))
10032 | TYPE_QUAL_CONST));
10033 memloc = assign_temp (nt, 1, 1);
10034 emit_move_insn (memloc, op0);
10035 op0 = memloc;
10036 mem_attrs_from_type = true;
10037 }
10038
10039 if (offset)
10040 {
10041 enum machine_mode address_mode;
10042 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10043 EXPAND_SUM);
10044
10045 gcc_assert (MEM_P (op0));
10046
10047 address_mode = get_address_mode (op0);
10048 if (GET_MODE (offset_rtx) != address_mode)
10049 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10050
10051 if (GET_MODE (op0) == BLKmode
10052 /* A constant address in OP0 can have VOIDmode, we must
10053 not try to call force_reg in that case. */
10054 && GET_MODE (XEXP (op0, 0)) != VOIDmode
10055 && bitsize != 0
10056 && (bitpos % bitsize) == 0
10057 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10058 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
10059 {
10060 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10061 bitpos = 0;
10062 }
10063
10064 op0 = offset_address (op0, offset_rtx,
10065 highest_pow2_factor (offset));
10066 }
10067
10068 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10069 record its alignment as BIGGEST_ALIGNMENT. */
10070 if (MEM_P (op0) && bitpos == 0 && offset != 0
10071 && is_aligning_offset (offset, tem))
10072 set_mem_align (op0, BIGGEST_ALIGNMENT);
10073
10074 /* Don't forget about volatility even if this is a bitfield. */
10075 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10076 {
10077 if (op0 == orig_op0)
10078 op0 = copy_rtx (op0);
10079
10080 MEM_VOLATILE_P (op0) = 1;
10081 }
10082
10083 /* In cases where an aligned union has an unaligned object
10084 as a field, we might be extracting a BLKmode value from
10085 an integer-mode (e.g., SImode) object. Handle this case
10086 by doing the extract into an object as wide as the field
10087 (which we know to be the width of a basic mode), then
10088 storing into memory, and changing the mode to BLKmode. */
10089 if (mode1 == VOIDmode
10090 || REG_P (op0) || GET_CODE (op0) == SUBREG
10091 || (mode1 != BLKmode && ! direct_load[(int) mode1]
10092 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10093 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10094 && modifier != EXPAND_CONST_ADDRESS
10095 && modifier != EXPAND_INITIALIZER
10096 && modifier != EXPAND_MEMORY)
10097 /* If the field is volatile, we always want an aligned
10098 access. Do this in following two situations:
10099 1. the access is not already naturally
10100 aligned, otherwise "normal" (non-bitfield) volatile fields
10101 become non-addressable.
10102 2. the bitsize is narrower than the access size. Need
10103 to extract bitfields from the access. */
10104 || (volatilep && flag_strict_volatile_bitfields > 0
10105 && (bitpos % GET_MODE_ALIGNMENT (mode) != 0
10106 || (mode1 != BLKmode
10107 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)))
10108 /* If the field isn't aligned enough to fetch as a memref,
10109 fetch it as a bit field. */
10110 || (mode1 != BLKmode
10111 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10112 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10113 || (MEM_P (op0)
10114 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10115 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10116 && modifier != EXPAND_MEMORY
10117 && ((modifier == EXPAND_CONST_ADDRESS
10118 || modifier == EXPAND_INITIALIZER)
10119 ? STRICT_ALIGNMENT
10120 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10121 || (bitpos % BITS_PER_UNIT != 0)))
10122 /* If the type and the field are a constant size and the
10123 size of the type isn't the same size as the bitfield,
10124 we must use bitfield operations. */
10125 || (bitsize >= 0
10126 && TYPE_SIZE (TREE_TYPE (exp))
10127 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10128 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10129 bitsize)))
10130 {
10131 enum machine_mode ext_mode = mode;
10132
10133 if (ext_mode == BLKmode
10134 && ! (target != 0 && MEM_P (op0)
10135 && MEM_P (target)
10136 && bitpos % BITS_PER_UNIT == 0))
10137 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10138
10139 if (ext_mode == BLKmode)
10140 {
10141 if (target == 0)
10142 target = assign_temp (type, 1, 1);
10143
10144 if (bitsize == 0)
10145 return target;
10146
10147 /* In this case, BITPOS must start at a byte boundary and
10148 TARGET, if specified, must be a MEM. */
10149 gcc_assert (MEM_P (op0)
10150 && (!target || MEM_P (target))
10151 && !(bitpos % BITS_PER_UNIT));
10152
10153 emit_block_move (target,
10154 adjust_address (op0, VOIDmode,
10155 bitpos / BITS_PER_UNIT),
10156 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10157 / BITS_PER_UNIT),
10158 (modifier == EXPAND_STACK_PARM
10159 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10160
10161 return target;
10162 }
10163
10164 op0 = validize_mem (op0);
10165
10166 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10167 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10168
10169 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10170 (modifier == EXPAND_STACK_PARM
10171 ? NULL_RTX : target),
10172 ext_mode, ext_mode);
10173
10174 /* If the result is a record type and BITSIZE is narrower than
10175 the mode of OP0, an integral mode, and this is a big endian
10176 machine, we must put the field into the high-order bits. */
10177 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10178 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10179 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10180 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10181 GET_MODE_BITSIZE (GET_MODE (op0))
10182 - bitsize, op0, 1);
10183
10184 /* If the result type is BLKmode, store the data into a temporary
10185 of the appropriate type, but with the mode corresponding to the
10186 mode for the data we have (op0's mode). It's tempting to make
10187 this a constant type, since we know it's only being stored once,
10188 but that can cause problems if we are taking the address of this
10189 COMPONENT_REF because the MEM of any reference via that address
10190 will have flags corresponding to the type, which will not
10191 necessarily be constant. */
10192 if (mode == BLKmode)
10193 {
10194 rtx new_rtx;
10195
10196 new_rtx = assign_stack_temp_for_type (ext_mode,
10197 GET_MODE_BITSIZE (ext_mode),
10198 type);
10199 emit_move_insn (new_rtx, op0);
10200 op0 = copy_rtx (new_rtx);
10201 PUT_MODE (op0, BLKmode);
10202 }
10203
10204 return op0;
10205 }
10206
10207 /* If the result is BLKmode, use that to access the object
10208 now as well. */
10209 if (mode == BLKmode)
10210 mode1 = BLKmode;
10211
10212 /* Get a reference to just this component. */
10213 if (modifier == EXPAND_CONST_ADDRESS
10214 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10215 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10216 else
10217 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10218
10219 if (op0 == orig_op0)
10220 op0 = copy_rtx (op0);
10221
10222 /* If op0 is a temporary because of forcing to memory, pass only the
10223 type to set_mem_attributes so that the original expression is never
10224 marked as ADDRESSABLE through MEM_EXPR of the temporary. */
10225 if (mem_attrs_from_type)
10226 set_mem_attributes (op0, type, 0);
10227 else
10228 set_mem_attributes (op0, exp, 0);
10229
10230 if (REG_P (XEXP (op0, 0)))
10231 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10232
10233 MEM_VOLATILE_P (op0) |= volatilep;
10234 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10235 || modifier == EXPAND_CONST_ADDRESS
10236 || modifier == EXPAND_INITIALIZER)
10237 return op0;
10238
10239 if (target == 0)
10240 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10241
10242 convert_move (target, op0, unsignedp);
10243 return target;
10244 }
10245
10246 case OBJ_TYPE_REF:
10247 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10248
10249 case CALL_EXPR:
10250 /* All valid uses of __builtin_va_arg_pack () are removed during
10251 inlining. */
10252 if (CALL_EXPR_VA_ARG_PACK (exp))
10253 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10254 {
10255 tree fndecl = get_callee_fndecl (exp), attr;
10256
10257 if (fndecl
10258 && (attr = lookup_attribute ("error",
10259 DECL_ATTRIBUTES (fndecl))) != NULL)
10260 error ("%Kcall to %qs declared with attribute error: %s",
10261 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10262 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10263 if (fndecl
10264 && (attr = lookup_attribute ("warning",
10265 DECL_ATTRIBUTES (fndecl))) != NULL)
10266 warning_at (tree_nonartificial_location (exp),
10267 0, "%Kcall to %qs declared with attribute warning: %s",
10268 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10269 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10270
10271 /* Check for a built-in function. */
10272 if (fndecl && DECL_BUILT_IN (fndecl))
10273 {
10274 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10275 return expand_builtin (exp, target, subtarget, tmode, ignore);
10276 }
10277 }
10278 return expand_call (exp, target, ignore);
10279
10280 case VIEW_CONVERT_EXPR:
10281 op0 = NULL_RTX;
10282
10283 /* If we are converting to BLKmode, try to avoid an intermediate
10284 temporary by fetching an inner memory reference. */
10285 if (mode == BLKmode
10286 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10287 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10288 && handled_component_p (treeop0))
10289 {
10290 enum machine_mode mode1;
10291 HOST_WIDE_INT bitsize, bitpos;
10292 tree offset;
10293 int unsignedp;
10294 int volatilep = 0;
10295 tree tem
10296 = get_inner_reference (treeop0, &bitsize, &bitpos,
10297 &offset, &mode1, &unsignedp, &volatilep,
10298 true);
10299 rtx orig_op0;
10300
10301 /* ??? We should work harder and deal with non-zero offsets. */
10302 if (!offset
10303 && (bitpos % BITS_PER_UNIT) == 0
10304 && bitsize >= 0
10305 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10306 {
10307 /* See the normal_inner_ref case for the rationale. */
10308 orig_op0
10309 = expand_expr (tem,
10310 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10311 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10312 != INTEGER_CST)
10313 && modifier != EXPAND_STACK_PARM
10314 ? target : NULL_RTX),
10315 VOIDmode,
10316 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
10317
10318 if (MEM_P (orig_op0))
10319 {
10320 op0 = orig_op0;
10321
10322 /* Get a reference to just this component. */
10323 if (modifier == EXPAND_CONST_ADDRESS
10324 || modifier == EXPAND_SUM
10325 || modifier == EXPAND_INITIALIZER)
10326 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10327 else
10328 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10329
10330 if (op0 == orig_op0)
10331 op0 = copy_rtx (op0);
10332
10333 set_mem_attributes (op0, treeop0, 0);
10334 if (REG_P (XEXP (op0, 0)))
10335 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10336
10337 MEM_VOLATILE_P (op0) |= volatilep;
10338 }
10339 }
10340 }
10341
10342 if (!op0)
10343 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
10344
10345 /* If the input and output modes are both the same, we are done. */
10346 if (mode == GET_MODE (op0))
10347 ;
10348 /* If neither mode is BLKmode, and both modes are the same size
10349 then we can use gen_lowpart. */
10350 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10351 && (GET_MODE_PRECISION (mode)
10352 == GET_MODE_PRECISION (GET_MODE (op0)))
10353 && !COMPLEX_MODE_P (GET_MODE (op0)))
10354 {
10355 if (GET_CODE (op0) == SUBREG)
10356 op0 = force_reg (GET_MODE (op0), op0);
10357 temp = gen_lowpart_common (mode, op0);
10358 if (temp)
10359 op0 = temp;
10360 else
10361 {
10362 if (!REG_P (op0) && !MEM_P (op0))
10363 op0 = force_reg (GET_MODE (op0), op0);
10364 op0 = gen_lowpart (mode, op0);
10365 }
10366 }
10367 /* If both types are integral, convert from one mode to the other. */
10368 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10369 op0 = convert_modes (mode, GET_MODE (op0), op0,
10370 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10371 /* As a last resort, spill op0 to memory, and reload it in a
10372 different mode. */
10373 else if (!MEM_P (op0))
10374 {
10375 /* If the operand is not a MEM, force it into memory. Since we
10376 are going to be changing the mode of the MEM, don't call
10377 force_const_mem for constants because we don't allow pool
10378 constants to change mode. */
10379 tree inner_type = TREE_TYPE (treeop0);
10380
10381 gcc_assert (!TREE_ADDRESSABLE (exp));
10382
10383 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10384 target
10385 = assign_stack_temp_for_type
10386 (TYPE_MODE (inner_type),
10387 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10388
10389 emit_move_insn (target, op0);
10390 op0 = target;
10391 }
10392
10393 /* At this point, OP0 is in the correct mode. If the output type is
10394 such that the operand is known to be aligned, indicate that it is.
10395 Otherwise, we need only be concerned about alignment for non-BLKmode
10396 results. */
10397 if (MEM_P (op0))
10398 {
10399 enum insn_code icode;
10400
10401 if (TYPE_ALIGN_OK (type))
10402 {
10403 /* ??? Copying the MEM without substantially changing it might
10404 run afoul of the code handling volatile memory references in
10405 store_expr, which assumes that TARGET is returned unmodified
10406 if it has been used. */
10407 op0 = copy_rtx (op0);
10408 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10409 }
10410 else if (mode != BLKmode
10411 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode)
10412 /* If the target does have special handling for unaligned
10413 loads of mode then use them. */
10414 && ((icode = optab_handler (movmisalign_optab, mode))
10415 != CODE_FOR_nothing))
10416 {
10417 rtx reg, insn;
10418
10419 op0 = adjust_address (op0, mode, 0);
10420 /* We've already validated the memory, and we're creating a
10421 new pseudo destination. The predicates really can't
10422 fail. */
10423 reg = gen_reg_rtx (mode);
10424
10425 /* Nor can the insn generator. */
10426 insn = GEN_FCN (icode) (reg, op0);
10427 emit_insn (insn);
10428 return reg;
10429 }
10430 else if (STRICT_ALIGNMENT
10431 && mode != BLKmode
10432 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10433 {
10434 tree inner_type = TREE_TYPE (treeop0);
10435 HOST_WIDE_INT temp_size
10436 = MAX (int_size_in_bytes (inner_type),
10437 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10438 rtx new_rtx
10439 = assign_stack_temp_for_type (mode, temp_size, type);
10440 rtx new_with_op0_mode
10441 = adjust_address (new_rtx, GET_MODE (op0), 0);
10442
10443 gcc_assert (!TREE_ADDRESSABLE (exp));
10444
10445 if (GET_MODE (op0) == BLKmode)
10446 emit_block_move (new_with_op0_mode, op0,
10447 GEN_INT (GET_MODE_SIZE (mode)),
10448 (modifier == EXPAND_STACK_PARM
10449 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10450 else
10451 emit_move_insn (new_with_op0_mode, op0);
10452
10453 op0 = new_rtx;
10454 }
10455
10456 op0 = adjust_address (op0, mode, 0);
10457 }
10458
10459 return op0;
10460
10461 case MODIFY_EXPR:
10462 {
10463 tree lhs = treeop0;
10464 tree rhs = treeop1;
10465 gcc_assert (ignore);
10466
10467 /* Check for |= or &= of a bitfield of size one into another bitfield
10468 of size 1. In this case, (unless we need the result of the
10469 assignment) we can do this more efficiently with a
10470 test followed by an assignment, if necessary.
10471
10472 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10473 things change so we do, this code should be enhanced to
10474 support it. */
10475 if (TREE_CODE (lhs) == COMPONENT_REF
10476 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10477 || TREE_CODE (rhs) == BIT_AND_EXPR)
10478 && TREE_OPERAND (rhs, 0) == lhs
10479 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10480 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10481 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10482 {
10483 rtx label = gen_label_rtx ();
10484 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10485 do_jump (TREE_OPERAND (rhs, 1),
10486 value ? label : 0,
10487 value ? 0 : label, -1);
10488 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10489 false);
10490 do_pending_stack_adjust ();
10491 emit_label (label);
10492 return const0_rtx;
10493 }
10494
10495 expand_assignment (lhs, rhs, false);
10496 return const0_rtx;
10497 }
10498
10499 case ADDR_EXPR:
10500 return expand_expr_addr_expr (exp, target, tmode, modifier);
10501
10502 case REALPART_EXPR:
10503 op0 = expand_normal (treeop0);
10504 return read_complex_part (op0, false);
10505
10506 case IMAGPART_EXPR:
10507 op0 = expand_normal (treeop0);
10508 return read_complex_part (op0, true);
10509
10510 case RETURN_EXPR:
10511 case LABEL_EXPR:
10512 case GOTO_EXPR:
10513 case SWITCH_EXPR:
10514 case ASM_EXPR:
10515 /* Expanded in cfgexpand.c. */
10516 gcc_unreachable ();
10517
10518 case TRY_CATCH_EXPR:
10519 case CATCH_EXPR:
10520 case EH_FILTER_EXPR:
10521 case TRY_FINALLY_EXPR:
10522 /* Lowered by tree-eh.c. */
10523 gcc_unreachable ();
10524
10525 case WITH_CLEANUP_EXPR:
10526 case CLEANUP_POINT_EXPR:
10527 case TARGET_EXPR:
10528 case CASE_LABEL_EXPR:
10529 case VA_ARG_EXPR:
10530 case BIND_EXPR:
10531 case INIT_EXPR:
10532 case CONJ_EXPR:
10533 case COMPOUND_EXPR:
10534 case PREINCREMENT_EXPR:
10535 case PREDECREMENT_EXPR:
10536 case POSTINCREMENT_EXPR:
10537 case POSTDECREMENT_EXPR:
10538 case LOOP_EXPR:
10539 case EXIT_EXPR:
10540 case COMPOUND_LITERAL_EXPR:
10541 /* Lowered by gimplify.c. */
10542 gcc_unreachable ();
10543
10544 case FDESC_EXPR:
10545 /* Function descriptors are not valid except for as
10546 initialization constants, and should not be expanded. */
10547 gcc_unreachable ();
10548
10549 case WITH_SIZE_EXPR:
10550 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10551 have pulled out the size to use in whatever context it needed. */
10552 return expand_expr_real (treeop0, original_target, tmode,
10553 modifier, alt_rtl);
10554
10555 default:
10556 return expand_expr_real_2 (&ops, target, tmode, modifier);
10557 }
10558 }
10559 \f
10560 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10561 signedness of TYPE), possibly returning the result in TARGET. */
10562 static rtx
10563 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10564 {
10565 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10566 if (target && GET_MODE (target) != GET_MODE (exp))
10567 target = 0;
10568 /* For constant values, reduce using build_int_cst_type. */
10569 if (CONST_INT_P (exp))
10570 {
10571 HOST_WIDE_INT value = INTVAL (exp);
10572 tree t = build_int_cst_type (type, value);
10573 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10574 }
10575 else if (TYPE_UNSIGNED (type))
10576 {
10577 rtx mask = immed_double_int_const (double_int::mask (prec),
10578 GET_MODE (exp));
10579 return expand_and (GET_MODE (exp), exp, mask, target);
10580 }
10581 else
10582 {
10583 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10584 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10585 exp, count, target, 0);
10586 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10587 exp, count, target, 0);
10588 }
10589 }
10590 \f
10591 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10592 when applied to the address of EXP produces an address known to be
10593 aligned more than BIGGEST_ALIGNMENT. */
10594
10595 static int
10596 is_aligning_offset (const_tree offset, const_tree exp)
10597 {
10598 /* Strip off any conversions. */
10599 while (CONVERT_EXPR_P (offset))
10600 offset = TREE_OPERAND (offset, 0);
10601
10602 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10603 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10604 if (TREE_CODE (offset) != BIT_AND_EXPR
10605 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
10606 || compare_tree_int (TREE_OPERAND (offset, 1),
10607 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10608 || !exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
10609 return 0;
10610
10611 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10612 It must be NEGATE_EXPR. Then strip any more conversions. */
10613 offset = TREE_OPERAND (offset, 0);
10614 while (CONVERT_EXPR_P (offset))
10615 offset = TREE_OPERAND (offset, 0);
10616
10617 if (TREE_CODE (offset) != NEGATE_EXPR)
10618 return 0;
10619
10620 offset = TREE_OPERAND (offset, 0);
10621 while (CONVERT_EXPR_P (offset))
10622 offset = TREE_OPERAND (offset, 0);
10623
10624 /* This must now be the address of EXP. */
10625 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10626 }
10627 \f
10628 /* Return the tree node if an ARG corresponds to a string constant or zero
10629 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10630 in bytes within the string that ARG is accessing. The type of the
10631 offset will be `sizetype'. */
10632
10633 tree
10634 string_constant (tree arg, tree *ptr_offset)
10635 {
10636 tree array, offset, lower_bound;
10637 STRIP_NOPS (arg);
10638
10639 if (TREE_CODE (arg) == ADDR_EXPR)
10640 {
10641 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10642 {
10643 *ptr_offset = size_zero_node;
10644 return TREE_OPERAND (arg, 0);
10645 }
10646 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10647 {
10648 array = TREE_OPERAND (arg, 0);
10649 offset = size_zero_node;
10650 }
10651 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10652 {
10653 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10654 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10655 if (TREE_CODE (array) != STRING_CST
10656 && TREE_CODE (array) != VAR_DECL)
10657 return 0;
10658
10659 /* Check if the array has a nonzero lower bound. */
10660 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10661 if (!integer_zerop (lower_bound))
10662 {
10663 /* If the offset and base aren't both constants, return 0. */
10664 if (TREE_CODE (lower_bound) != INTEGER_CST)
10665 return 0;
10666 if (TREE_CODE (offset) != INTEGER_CST)
10667 return 0;
10668 /* Adjust offset by the lower bound. */
10669 offset = size_diffop (fold_convert (sizetype, offset),
10670 fold_convert (sizetype, lower_bound));
10671 }
10672 }
10673 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10674 {
10675 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10676 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10677 if (TREE_CODE (array) != ADDR_EXPR)
10678 return 0;
10679 array = TREE_OPERAND (array, 0);
10680 if (TREE_CODE (array) != STRING_CST
10681 && TREE_CODE (array) != VAR_DECL)
10682 return 0;
10683 }
10684 else
10685 return 0;
10686 }
10687 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10688 {
10689 tree arg0 = TREE_OPERAND (arg, 0);
10690 tree arg1 = TREE_OPERAND (arg, 1);
10691
10692 STRIP_NOPS (arg0);
10693 STRIP_NOPS (arg1);
10694
10695 if (TREE_CODE (arg0) == ADDR_EXPR
10696 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10697 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10698 {
10699 array = TREE_OPERAND (arg0, 0);
10700 offset = arg1;
10701 }
10702 else if (TREE_CODE (arg1) == ADDR_EXPR
10703 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10704 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10705 {
10706 array = TREE_OPERAND (arg1, 0);
10707 offset = arg0;
10708 }
10709 else
10710 return 0;
10711 }
10712 else
10713 return 0;
10714
10715 if (TREE_CODE (array) == STRING_CST)
10716 {
10717 *ptr_offset = fold_convert (sizetype, offset);
10718 return array;
10719 }
10720 else if (TREE_CODE (array) == VAR_DECL
10721 || TREE_CODE (array) == CONST_DECL)
10722 {
10723 int length;
10724 tree init = ctor_for_folding (array);
10725
10726 /* Variables initialized to string literals can be handled too. */
10727 if (init == error_mark_node
10728 || !init
10729 || TREE_CODE (init) != STRING_CST)
10730 return 0;
10731
10732 /* Avoid const char foo[4] = "abcde"; */
10733 if (DECL_SIZE_UNIT (array) == NULL_TREE
10734 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10735 || (length = TREE_STRING_LENGTH (init)) <= 0
10736 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10737 return 0;
10738
10739 /* If variable is bigger than the string literal, OFFSET must be constant
10740 and inside of the bounds of the string literal. */
10741 offset = fold_convert (sizetype, offset);
10742 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10743 && (! tree_fits_uhwi_p (offset)
10744 || compare_tree_int (offset, length) >= 0))
10745 return 0;
10746
10747 *ptr_offset = offset;
10748 return init;
10749 }
10750
10751 return 0;
10752 }
10753 \f
10754 /* Generate code to calculate OPS, and exploded expression
10755 using a store-flag instruction and return an rtx for the result.
10756 OPS reflects a comparison.
10757
10758 If TARGET is nonzero, store the result there if convenient.
10759
10760 Return zero if there is no suitable set-flag instruction
10761 available on this machine.
10762
10763 Once expand_expr has been called on the arguments of the comparison,
10764 we are committed to doing the store flag, since it is not safe to
10765 re-evaluate the expression. We emit the store-flag insn by calling
10766 emit_store_flag, but only expand the arguments if we have a reason
10767 to believe that emit_store_flag will be successful. If we think that
10768 it will, but it isn't, we have to simulate the store-flag with a
10769 set/jump/set sequence. */
10770
10771 static rtx
10772 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
10773 {
10774 enum rtx_code code;
10775 tree arg0, arg1, type;
10776 tree tem;
10777 enum machine_mode operand_mode;
10778 int unsignedp;
10779 rtx op0, op1;
10780 rtx subtarget = target;
10781 location_t loc = ops->location;
10782
10783 arg0 = ops->op0;
10784 arg1 = ops->op1;
10785
10786 /* Don't crash if the comparison was erroneous. */
10787 if (arg0 == error_mark_node || arg1 == error_mark_node)
10788 return const0_rtx;
10789
10790 type = TREE_TYPE (arg0);
10791 operand_mode = TYPE_MODE (type);
10792 unsignedp = TYPE_UNSIGNED (type);
10793
10794 /* We won't bother with BLKmode store-flag operations because it would mean
10795 passing a lot of information to emit_store_flag. */
10796 if (operand_mode == BLKmode)
10797 return 0;
10798
10799 /* We won't bother with store-flag operations involving function pointers
10800 when function pointers must be canonicalized before comparisons. */
10801 #ifdef HAVE_canonicalize_funcptr_for_compare
10802 if (HAVE_canonicalize_funcptr_for_compare
10803 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
10804 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
10805 == FUNCTION_TYPE))
10806 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
10807 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
10808 == FUNCTION_TYPE))))
10809 return 0;
10810 #endif
10811
10812 STRIP_NOPS (arg0);
10813 STRIP_NOPS (arg1);
10814
10815 /* For vector typed comparisons emit code to generate the desired
10816 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10817 expander for this. */
10818 if (TREE_CODE (ops->type) == VECTOR_TYPE)
10819 {
10820 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10821 tree if_true = constant_boolean_node (true, ops->type);
10822 tree if_false = constant_boolean_node (false, ops->type);
10823 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10824 }
10825
10826 /* Get the rtx comparison code to use. We know that EXP is a comparison
10827 operation of some type. Some comparisons against 1 and -1 can be
10828 converted to comparisons with zero. Do so here so that the tests
10829 below will be aware that we have a comparison with zero. These
10830 tests will not catch constants in the first operand, but constants
10831 are rarely passed as the first operand. */
10832
10833 switch (ops->code)
10834 {
10835 case EQ_EXPR:
10836 code = EQ;
10837 break;
10838 case NE_EXPR:
10839 code = NE;
10840 break;
10841 case LT_EXPR:
10842 if (integer_onep (arg1))
10843 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10844 else
10845 code = unsignedp ? LTU : LT;
10846 break;
10847 case LE_EXPR:
10848 if (! unsignedp && integer_all_onesp (arg1))
10849 arg1 = integer_zero_node, code = LT;
10850 else
10851 code = unsignedp ? LEU : LE;
10852 break;
10853 case GT_EXPR:
10854 if (! unsignedp && integer_all_onesp (arg1))
10855 arg1 = integer_zero_node, code = GE;
10856 else
10857 code = unsignedp ? GTU : GT;
10858 break;
10859 case GE_EXPR:
10860 if (integer_onep (arg1))
10861 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10862 else
10863 code = unsignedp ? GEU : GE;
10864 break;
10865
10866 case UNORDERED_EXPR:
10867 code = UNORDERED;
10868 break;
10869 case ORDERED_EXPR:
10870 code = ORDERED;
10871 break;
10872 case UNLT_EXPR:
10873 code = UNLT;
10874 break;
10875 case UNLE_EXPR:
10876 code = UNLE;
10877 break;
10878 case UNGT_EXPR:
10879 code = UNGT;
10880 break;
10881 case UNGE_EXPR:
10882 code = UNGE;
10883 break;
10884 case UNEQ_EXPR:
10885 code = UNEQ;
10886 break;
10887 case LTGT_EXPR:
10888 code = LTGT;
10889 break;
10890
10891 default:
10892 gcc_unreachable ();
10893 }
10894
10895 /* Put a constant second. */
10896 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10897 || TREE_CODE (arg0) == FIXED_CST)
10898 {
10899 tem = arg0; arg0 = arg1; arg1 = tem;
10900 code = swap_condition (code);
10901 }
10902
10903 /* If this is an equality or inequality test of a single bit, we can
10904 do this by shifting the bit being tested to the low-order bit and
10905 masking the result with the constant 1. If the condition was EQ,
10906 we xor it with 1. This does not require an scc insn and is faster
10907 than an scc insn even if we have it.
10908
10909 The code to make this transformation was moved into fold_single_bit_test,
10910 so we just call into the folder and expand its result. */
10911
10912 if ((code == NE || code == EQ)
10913 && integer_zerop (arg1)
10914 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
10915 {
10916 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
10917 if (srcstmt
10918 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
10919 {
10920 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
10921 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
10922 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
10923 gimple_assign_rhs1 (srcstmt),
10924 gimple_assign_rhs2 (srcstmt));
10925 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
10926 if (temp)
10927 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
10928 }
10929 }
10930
10931 if (! get_subtarget (target)
10932 || GET_MODE (subtarget) != operand_mode)
10933 subtarget = 0;
10934
10935 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
10936
10937 if (target == 0)
10938 target = gen_reg_rtx (mode);
10939
10940 /* Try a cstore if possible. */
10941 return emit_store_flag_force (target, code, op0, op1,
10942 operand_mode, unsignedp,
10943 (TYPE_PRECISION (ops->type) == 1
10944 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
10945 }
10946 \f
10947
10948 /* Stubs in case we haven't got a casesi insn. */
10949 #ifndef HAVE_casesi
10950 # define HAVE_casesi 0
10951 # define gen_casesi(a, b, c, d, e) (0)
10952 # define CODE_FOR_casesi CODE_FOR_nothing
10953 #endif
10954
10955 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10956 0 otherwise (i.e. if there is no casesi instruction).
10957
10958 DEFAULT_PROBABILITY is the probability of jumping to the default
10959 label. */
10960 int
10961 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10962 rtx table_label, rtx default_label, rtx fallback_label,
10963 int default_probability)
10964 {
10965 struct expand_operand ops[5];
10966 enum machine_mode index_mode = SImode;
10967 rtx op1, op2, index;
10968
10969 if (! HAVE_casesi)
10970 return 0;
10971
10972 /* Convert the index to SImode. */
10973 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10974 {
10975 enum machine_mode omode = TYPE_MODE (index_type);
10976 rtx rangertx = expand_normal (range);
10977
10978 /* We must handle the endpoints in the original mode. */
10979 index_expr = build2 (MINUS_EXPR, index_type,
10980 index_expr, minval);
10981 minval = integer_zero_node;
10982 index = expand_normal (index_expr);
10983 if (default_label)
10984 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10985 omode, 1, default_label,
10986 default_probability);
10987 /* Now we can safely truncate. */
10988 index = convert_to_mode (index_mode, index, 0);
10989 }
10990 else
10991 {
10992 if (TYPE_MODE (index_type) != index_mode)
10993 {
10994 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
10995 index_expr = fold_convert (index_type, index_expr);
10996 }
10997
10998 index = expand_normal (index_expr);
10999 }
11000
11001 do_pending_stack_adjust ();
11002
11003 op1 = expand_normal (minval);
11004 op2 = expand_normal (range);
11005
11006 create_input_operand (&ops[0], index, index_mode);
11007 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11008 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11009 create_fixed_operand (&ops[3], table_label);
11010 create_fixed_operand (&ops[4], (default_label
11011 ? default_label
11012 : fallback_label));
11013 expand_jump_insn (CODE_FOR_casesi, 5, ops);
11014 return 1;
11015 }
11016
11017 /* Attempt to generate a tablejump instruction; same concept. */
11018 #ifndef HAVE_tablejump
11019 #define HAVE_tablejump 0
11020 #define gen_tablejump(x, y) (0)
11021 #endif
11022
11023 /* Subroutine of the next function.
11024
11025 INDEX is the value being switched on, with the lowest value
11026 in the table already subtracted.
11027 MODE is its expected mode (needed if INDEX is constant).
11028 RANGE is the length of the jump table.
11029 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11030
11031 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11032 index value is out of range.
11033 DEFAULT_PROBABILITY is the probability of jumping to
11034 the default label. */
11035
11036 static void
11037 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
11038 rtx default_label, int default_probability)
11039 {
11040 rtx temp, vector;
11041
11042 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11043 cfun->cfg->max_jumptable_ents = INTVAL (range);
11044
11045 /* Do an unsigned comparison (in the proper mode) between the index
11046 expression and the value which represents the length of the range.
11047 Since we just finished subtracting the lower bound of the range
11048 from the index expression, this comparison allows us to simultaneously
11049 check that the original index expression value is both greater than
11050 or equal to the minimum value of the range and less than or equal to
11051 the maximum value of the range. */
11052
11053 if (default_label)
11054 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11055 default_label, default_probability);
11056
11057
11058 /* If index is in range, it must fit in Pmode.
11059 Convert to Pmode so we can index with it. */
11060 if (mode != Pmode)
11061 index = convert_to_mode (Pmode, index, 1);
11062
11063 /* Don't let a MEM slip through, because then INDEX that comes
11064 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11065 and break_out_memory_refs will go to work on it and mess it up. */
11066 #ifdef PIC_CASE_VECTOR_ADDRESS
11067 if (flag_pic && !REG_P (index))
11068 index = copy_to_mode_reg (Pmode, index);
11069 #endif
11070
11071 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11072 GET_MODE_SIZE, because this indicates how large insns are. The other
11073 uses should all be Pmode, because they are addresses. This code
11074 could fail if addresses and insns are not the same size. */
11075 index = gen_rtx_PLUS
11076 (Pmode,
11077 gen_rtx_MULT (Pmode, index,
11078 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE), Pmode)),
11079 gen_rtx_LABEL_REF (Pmode, table_label));
11080 #ifdef PIC_CASE_VECTOR_ADDRESS
11081 if (flag_pic)
11082 index = PIC_CASE_VECTOR_ADDRESS (index);
11083 else
11084 #endif
11085 index = memory_address (CASE_VECTOR_MODE, index);
11086 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11087 vector = gen_const_mem (CASE_VECTOR_MODE, index);
11088 convert_move (temp, vector, 0);
11089
11090 emit_jump_insn (gen_tablejump (temp, table_label));
11091
11092 /* If we are generating PIC code or if the table is PC-relative, the
11093 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11094 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11095 emit_barrier ();
11096 }
11097
11098 int
11099 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11100 rtx table_label, rtx default_label, int default_probability)
11101 {
11102 rtx index;
11103
11104 if (! HAVE_tablejump)
11105 return 0;
11106
11107 index_expr = fold_build2 (MINUS_EXPR, index_type,
11108 fold_convert (index_type, index_expr),
11109 fold_convert (index_type, minval));
11110 index = expand_normal (index_expr);
11111 do_pending_stack_adjust ();
11112
11113 do_tablejump (index, TYPE_MODE (index_type),
11114 convert_modes (TYPE_MODE (index_type),
11115 TYPE_MODE (TREE_TYPE (range)),
11116 expand_normal (range),
11117 TYPE_UNSIGNED (TREE_TYPE (range))),
11118 table_label, default_label, default_probability);
11119 return 1;
11120 }
11121
11122 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11123 static rtx
11124 const_vector_from_tree (tree exp)
11125 {
11126 rtvec v;
11127 unsigned i;
11128 int units;
11129 tree elt;
11130 enum machine_mode inner, mode;
11131
11132 mode = TYPE_MODE (TREE_TYPE (exp));
11133
11134 if (initializer_zerop (exp))
11135 return CONST0_RTX (mode);
11136
11137 units = GET_MODE_NUNITS (mode);
11138 inner = GET_MODE_INNER (mode);
11139
11140 v = rtvec_alloc (units);
11141
11142 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11143 {
11144 elt = VECTOR_CST_ELT (exp, i);
11145
11146 if (TREE_CODE (elt) == REAL_CST)
11147 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11148 inner);
11149 else if (TREE_CODE (elt) == FIXED_CST)
11150 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11151 inner);
11152 else
11153 RTVEC_ELT (v, i) = immed_double_int_const (tree_to_double_int (elt),
11154 inner);
11155 }
11156
11157 return gen_rtx_CONST_VECTOR (mode, v);
11158 }
11159
11160 /* Build a decl for a personality function given a language prefix. */
11161
11162 tree
11163 build_personality_function (const char *lang)
11164 {
11165 const char *unwind_and_version;
11166 tree decl, type;
11167 char *name;
11168
11169 switch (targetm_common.except_unwind_info (&global_options))
11170 {
11171 case UI_NONE:
11172 return NULL;
11173 case UI_SJLJ:
11174 unwind_and_version = "_sj0";
11175 break;
11176 case UI_DWARF2:
11177 case UI_TARGET:
11178 unwind_and_version = "_v0";
11179 break;
11180 case UI_SEH:
11181 unwind_and_version = "_seh0";
11182 break;
11183 default:
11184 gcc_unreachable ();
11185 }
11186
11187 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11188
11189 type = build_function_type_list (integer_type_node, integer_type_node,
11190 long_long_unsigned_type_node,
11191 ptr_type_node, ptr_type_node, NULL_TREE);
11192 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11193 get_identifier (name), type);
11194 DECL_ARTIFICIAL (decl) = 1;
11195 DECL_EXTERNAL (decl) = 1;
11196 TREE_PUBLIC (decl) = 1;
11197
11198 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11199 are the flags assigned by targetm.encode_section_info. */
11200 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11201
11202 return decl;
11203 }
11204
11205 /* Extracts the personality function of DECL and returns the corresponding
11206 libfunc. */
11207
11208 rtx
11209 get_personality_function (tree decl)
11210 {
11211 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11212 enum eh_personality_kind pk;
11213
11214 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11215 if (pk == eh_personality_none)
11216 return NULL;
11217
11218 if (!personality
11219 && pk == eh_personality_any)
11220 personality = lang_hooks.eh_personality ();
11221
11222 if (pk == eh_personality_lang)
11223 gcc_assert (personality != NULL_TREE);
11224
11225 return XEXP (DECL_RTL (personality), 0);
11226 }
11227
11228 #include "gt-expr.h"