double-int.c (lshift_double, [...]): Remove SHIFT_COUNT_TRUNCATED handling.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "flags.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "except.h"
31 #include "function.h"
32 #include "insn-config.h"
33 #include "insn-attr.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
35 #include "expr.h"
36 #include "optabs.h"
37 #include "libfuncs.h"
38 #include "recog.h"
39 #include "reload.h"
40 #include "typeclass.h"
41 #include "toplev.h"
42 #include "langhooks.h"
43 #include "intl.h"
44 #include "tm_p.h"
45 #include "tree-iterator.h"
46 #include "gimple.h"
47 #include "gimple-ssa.h"
48 #include "cgraph.h"
49 #include "tree-ssanames.h"
50 #include "target.h"
51 #include "common/common-target.h"
52 #include "timevar.h"
53 #include "df.h"
54 #include "diagnostic.h"
55 #include "tree-ssa-live.h"
56 #include "tree-outof-ssa.h"
57 #include "target-globals.h"
58 #include "params.h"
59 #include "tree-ssa-address.h"
60
61 /* Decide whether a function's arguments should be processed
62 from first to last or from last to first.
63
64 They should if the stack and args grow in opposite directions, but
65 only if we have push insns. */
66
67 #ifdef PUSH_ROUNDING
68
69 #ifndef PUSH_ARGS_REVERSED
70 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
71 #define PUSH_ARGS_REVERSED /* If it's last to first. */
72 #endif
73 #endif
74
75 #endif
76
77 #ifndef STACK_PUSH_CODE
78 #ifdef STACK_GROWS_DOWNWARD
79 #define STACK_PUSH_CODE PRE_DEC
80 #else
81 #define STACK_PUSH_CODE PRE_INC
82 #endif
83 #endif
84
85
86 /* If this is nonzero, we do not bother generating VOLATILE
87 around volatile memory references, and we are willing to
88 output indirect addresses. If cse is to follow, we reject
89 indirect addresses so a useful potential cse is generated;
90 if it is used only once, instruction combination will produce
91 the same indirect address eventually. */
92 int cse_not_expected;
93
94 /* This structure is used by move_by_pieces to describe the move to
95 be performed. */
96 struct move_by_pieces_d
97 {
98 rtx to;
99 rtx to_addr;
100 int autinc_to;
101 int explicit_inc_to;
102 rtx from;
103 rtx from_addr;
104 int autinc_from;
105 int explicit_inc_from;
106 unsigned HOST_WIDE_INT len;
107 HOST_WIDE_INT offset;
108 int reverse;
109 };
110
111 /* This structure is used by store_by_pieces to describe the clear to
112 be performed. */
113
114 struct store_by_pieces_d
115 {
116 rtx to;
117 rtx to_addr;
118 int autinc_to;
119 int explicit_inc_to;
120 unsigned HOST_WIDE_INT len;
121 HOST_WIDE_INT offset;
122 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
123 void *constfundata;
124 int reverse;
125 };
126
127 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
128 struct move_by_pieces_d *);
129 static bool block_move_libcall_safe_for_call_parm (void);
130 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
131 static tree emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
133 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
134 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
136 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
137 struct store_by_pieces_d *);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, int, alias_set_type);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
146 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
147 enum machine_mode, tree, alias_set_type, bool);
148
149 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
150
151 static int is_aligning_offset (const_tree, const_tree);
152 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
153 enum expand_modifier);
154 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
155 static rtx do_store_flag (sepops, rtx, enum machine_mode);
156 #ifdef PUSH_ROUNDING
157 static void emit_single_push_insn (enum machine_mode, rtx, tree);
158 #endif
159 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx, int);
160 static rtx const_vector_from_tree (tree);
161 static void write_complex_part (rtx, rtx, bool);
162
163 /* This macro is used to determine whether move_by_pieces should be called
164 to perform a structure copy. */
165 #ifndef MOVE_BY_PIECES_P
166 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
167 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
168 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
169 #endif
170
171 /* This macro is used to determine whether clear_by_pieces should be
172 called to clear storage. */
173 #ifndef CLEAR_BY_PIECES_P
174 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
175 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
176 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
177 #endif
178
179 /* This macro is used to determine whether store_by_pieces should be
180 called to "memset" storage with byte values other than zero. */
181 #ifndef SET_BY_PIECES_P
182 #define SET_BY_PIECES_P(SIZE, ALIGN) \
183 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
184 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
185 #endif
186
187 /* This macro is used to determine whether store_by_pieces should be
188 called to "memcpy" storage when the source is a constant string. */
189 #ifndef STORE_BY_PIECES_P
190 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
191 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
192 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
193 #endif
194 \f
195 /* This is run to set up which modes can be used
196 directly in memory and to initialize the block move optab. It is run
197 at the beginning of compilation and when the target is reinitialized. */
198
199 void
200 init_expr_target (void)
201 {
202 rtx insn, pat;
203 enum machine_mode mode;
204 int num_clobbers;
205 rtx mem, mem1;
206 rtx reg;
207
208 /* Try indexing by frame ptr and try by stack ptr.
209 It is known that on the Convex the stack ptr isn't a valid index.
210 With luck, one or the other is valid on any machine. */
211 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
212 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
213
214 /* A scratch register we can modify in-place below to avoid
215 useless RTL allocations. */
216 reg = gen_rtx_REG (VOIDmode, -1);
217
218 insn = rtx_alloc (INSN);
219 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
220 PATTERN (insn) = pat;
221
222 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
223 mode = (enum machine_mode) ((int) mode + 1))
224 {
225 int regno;
226
227 direct_load[(int) mode] = direct_store[(int) mode] = 0;
228 PUT_MODE (mem, mode);
229 PUT_MODE (mem1, mode);
230 PUT_MODE (reg, mode);
231
232 /* See if there is some register that can be used in this mode and
233 directly loaded or stored from memory. */
234
235 if (mode != VOIDmode && mode != BLKmode)
236 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
237 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
238 regno++)
239 {
240 if (! HARD_REGNO_MODE_OK (regno, mode))
241 continue;
242
243 SET_REGNO (reg, regno);
244
245 SET_SRC (pat) = mem;
246 SET_DEST (pat) = reg;
247 if (recog (pat, insn, &num_clobbers) >= 0)
248 direct_load[(int) mode] = 1;
249
250 SET_SRC (pat) = mem1;
251 SET_DEST (pat) = reg;
252 if (recog (pat, insn, &num_clobbers) >= 0)
253 direct_load[(int) mode] = 1;
254
255 SET_SRC (pat) = reg;
256 SET_DEST (pat) = mem;
257 if (recog (pat, insn, &num_clobbers) >= 0)
258 direct_store[(int) mode] = 1;
259
260 SET_SRC (pat) = reg;
261 SET_DEST (pat) = mem1;
262 if (recog (pat, insn, &num_clobbers) >= 0)
263 direct_store[(int) mode] = 1;
264 }
265 }
266
267 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
268
269 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
270 mode = GET_MODE_WIDER_MODE (mode))
271 {
272 enum machine_mode srcmode;
273 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
274 srcmode = GET_MODE_WIDER_MODE (srcmode))
275 {
276 enum insn_code ic;
277
278 ic = can_extend_p (mode, srcmode, 0);
279 if (ic == CODE_FOR_nothing)
280 continue;
281
282 PUT_MODE (mem, srcmode);
283
284 if (insn_operand_matches (ic, 1, mem))
285 float_extend_from_mem[mode][srcmode] = true;
286 }
287 }
288 }
289
290 /* This is run at the start of compiling a function. */
291
292 void
293 init_expr (void)
294 {
295 memset (&crtl->expr, 0, sizeof (crtl->expr));
296 }
297 \f
298 /* Copy data from FROM to TO, where the machine modes are not the same.
299 Both modes may be integer, or both may be floating, or both may be
300 fixed-point.
301 UNSIGNEDP should be nonzero if FROM is an unsigned type.
302 This causes zero-extension instead of sign-extension. */
303
304 void
305 convert_move (rtx to, rtx from, int unsignedp)
306 {
307 enum machine_mode to_mode = GET_MODE (to);
308 enum machine_mode from_mode = GET_MODE (from);
309 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
310 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
311 enum insn_code code;
312 rtx libcall;
313
314 /* rtx code for making an equivalent value. */
315 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
316 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
317
318
319 gcc_assert (to_real == from_real);
320 gcc_assert (to_mode != BLKmode);
321 gcc_assert (from_mode != BLKmode);
322
323 /* If the source and destination are already the same, then there's
324 nothing to do. */
325 if (to == from)
326 return;
327
328 /* If FROM is a SUBREG that indicates that we have already done at least
329 the required extension, strip it. We don't handle such SUBREGs as
330 TO here. */
331
332 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
333 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
334 >= GET_MODE_PRECISION (to_mode))
335 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
336 from = gen_lowpart (to_mode, from), from_mode = to_mode;
337
338 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
339
340 if (to_mode == from_mode
341 || (from_mode == VOIDmode && CONSTANT_P (from)))
342 {
343 emit_move_insn (to, from);
344 return;
345 }
346
347 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
348 {
349 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
350
351 if (VECTOR_MODE_P (to_mode))
352 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
353 else
354 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
355
356 emit_move_insn (to, from);
357 return;
358 }
359
360 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
361 {
362 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
363 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
364 return;
365 }
366
367 if (to_real)
368 {
369 rtx value, insns;
370 convert_optab tab;
371
372 gcc_assert ((GET_MODE_PRECISION (from_mode)
373 != GET_MODE_PRECISION (to_mode))
374 || (DECIMAL_FLOAT_MODE_P (from_mode)
375 != DECIMAL_FLOAT_MODE_P (to_mode)));
376
377 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
378 /* Conversion between decimal float and binary float, same size. */
379 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
380 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
381 tab = sext_optab;
382 else
383 tab = trunc_optab;
384
385 /* Try converting directly if the insn is supported. */
386
387 code = convert_optab_handler (tab, to_mode, from_mode);
388 if (code != CODE_FOR_nothing)
389 {
390 emit_unop_insn (code, to, from,
391 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
392 return;
393 }
394
395 /* Otherwise use a libcall. */
396 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
397
398 /* Is this conversion implemented yet? */
399 gcc_assert (libcall);
400
401 start_sequence ();
402 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
403 1, from, from_mode);
404 insns = get_insns ();
405 end_sequence ();
406 emit_libcall_block (insns, to, value,
407 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
408 from)
409 : gen_rtx_FLOAT_EXTEND (to_mode, from));
410 return;
411 }
412
413 /* Handle pointer conversion. */ /* SPEE 900220. */
414 /* Targets are expected to provide conversion insns between PxImode and
415 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
416 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
417 {
418 enum machine_mode full_mode
419 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
420
421 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
422 != CODE_FOR_nothing);
423
424 if (full_mode != from_mode)
425 from = convert_to_mode (full_mode, from, unsignedp);
426 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
427 to, from, UNKNOWN);
428 return;
429 }
430 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
431 {
432 rtx new_from;
433 enum machine_mode full_mode
434 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
435 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
436 enum insn_code icode;
437
438 icode = convert_optab_handler (ctab, full_mode, from_mode);
439 gcc_assert (icode != CODE_FOR_nothing);
440
441 if (to_mode == full_mode)
442 {
443 emit_unop_insn (icode, to, from, UNKNOWN);
444 return;
445 }
446
447 new_from = gen_reg_rtx (full_mode);
448 emit_unop_insn (icode, new_from, from, UNKNOWN);
449
450 /* else proceed to integer conversions below. */
451 from_mode = full_mode;
452 from = new_from;
453 }
454
455 /* Make sure both are fixed-point modes or both are not. */
456 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
457 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
458 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
459 {
460 /* If we widen from_mode to to_mode and they are in the same class,
461 we won't saturate the result.
462 Otherwise, always saturate the result to play safe. */
463 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
464 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
465 expand_fixed_convert (to, from, 0, 0);
466 else
467 expand_fixed_convert (to, from, 0, 1);
468 return;
469 }
470
471 /* Now both modes are integers. */
472
473 /* Handle expanding beyond a word. */
474 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
475 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
476 {
477 rtx insns;
478 rtx lowpart;
479 rtx fill_value;
480 rtx lowfrom;
481 int i;
482 enum machine_mode lowpart_mode;
483 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
484
485 /* Try converting directly if the insn is supported. */
486 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
487 != CODE_FOR_nothing)
488 {
489 /* If FROM is a SUBREG, put it into a register. Do this
490 so that we always generate the same set of insns for
491 better cse'ing; if an intermediate assignment occurred,
492 we won't be doing the operation directly on the SUBREG. */
493 if (optimize > 0 && GET_CODE (from) == SUBREG)
494 from = force_reg (from_mode, from);
495 emit_unop_insn (code, to, from, equiv_code);
496 return;
497 }
498 /* Next, try converting via full word. */
499 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
500 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
501 != CODE_FOR_nothing))
502 {
503 rtx word_to = gen_reg_rtx (word_mode);
504 if (REG_P (to))
505 {
506 if (reg_overlap_mentioned_p (to, from))
507 from = force_reg (from_mode, from);
508 emit_clobber (to);
509 }
510 convert_move (word_to, from, unsignedp);
511 emit_unop_insn (code, to, word_to, equiv_code);
512 return;
513 }
514
515 /* No special multiword conversion insn; do it by hand. */
516 start_sequence ();
517
518 /* Since we will turn this into a no conflict block, we must ensure the
519 the source does not overlap the target so force it into an isolated
520 register when maybe so. Likewise for any MEM input, since the
521 conversion sequence might require several references to it and we
522 must ensure we're getting the same value every time. */
523
524 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
525 from = force_reg (from_mode, from);
526
527 /* Get a copy of FROM widened to a word, if necessary. */
528 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
529 lowpart_mode = word_mode;
530 else
531 lowpart_mode = from_mode;
532
533 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
534
535 lowpart = gen_lowpart (lowpart_mode, to);
536 emit_move_insn (lowpart, lowfrom);
537
538 /* Compute the value to put in each remaining word. */
539 if (unsignedp)
540 fill_value = const0_rtx;
541 else
542 fill_value = emit_store_flag (gen_reg_rtx (word_mode),
543 LT, lowfrom, const0_rtx,
544 VOIDmode, 0, -1);
545
546 /* Fill the remaining words. */
547 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
548 {
549 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
550 rtx subword = operand_subword (to, index, 1, to_mode);
551
552 gcc_assert (subword);
553
554 if (fill_value != subword)
555 emit_move_insn (subword, fill_value);
556 }
557
558 insns = get_insns ();
559 end_sequence ();
560
561 emit_insn (insns);
562 return;
563 }
564
565 /* Truncating multi-word to a word or less. */
566 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
567 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
568 {
569 if (!((MEM_P (from)
570 && ! MEM_VOLATILE_P (from)
571 && direct_load[(int) to_mode]
572 && ! mode_dependent_address_p (XEXP (from, 0),
573 MEM_ADDR_SPACE (from)))
574 || REG_P (from)
575 || GET_CODE (from) == SUBREG))
576 from = force_reg (from_mode, from);
577 convert_move (to, gen_lowpart (word_mode, from), 0);
578 return;
579 }
580
581 /* Now follow all the conversions between integers
582 no more than a word long. */
583
584 /* For truncation, usually we can just refer to FROM in a narrower mode. */
585 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
586 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
587 {
588 if (!((MEM_P (from)
589 && ! MEM_VOLATILE_P (from)
590 && direct_load[(int) to_mode]
591 && ! mode_dependent_address_p (XEXP (from, 0),
592 MEM_ADDR_SPACE (from)))
593 || REG_P (from)
594 || GET_CODE (from) == SUBREG))
595 from = force_reg (from_mode, from);
596 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
597 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
598 from = copy_to_reg (from);
599 emit_move_insn (to, gen_lowpart (to_mode, from));
600 return;
601 }
602
603 /* Handle extension. */
604 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
605 {
606 /* Convert directly if that works. */
607 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
608 != CODE_FOR_nothing)
609 {
610 emit_unop_insn (code, to, from, equiv_code);
611 return;
612 }
613 else
614 {
615 enum machine_mode intermediate;
616 rtx tmp;
617 int shift_amount;
618
619 /* Search for a mode to convert via. */
620 for (intermediate = from_mode; intermediate != VOIDmode;
621 intermediate = GET_MODE_WIDER_MODE (intermediate))
622 if (((can_extend_p (to_mode, intermediate, unsignedp)
623 != CODE_FOR_nothing)
624 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
625 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
626 && (can_extend_p (intermediate, from_mode, unsignedp)
627 != CODE_FOR_nothing))
628 {
629 convert_move (to, convert_to_mode (intermediate, from,
630 unsignedp), unsignedp);
631 return;
632 }
633
634 /* No suitable intermediate mode.
635 Generate what we need with shifts. */
636 shift_amount = (GET_MODE_PRECISION (to_mode)
637 - GET_MODE_PRECISION (from_mode));
638 from = gen_lowpart (to_mode, force_reg (from_mode, from));
639 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
640 to, unsignedp);
641 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
642 to, unsignedp);
643 if (tmp != to)
644 emit_move_insn (to, tmp);
645 return;
646 }
647 }
648
649 /* Support special truncate insns for certain modes. */
650 if (convert_optab_handler (trunc_optab, to_mode,
651 from_mode) != CODE_FOR_nothing)
652 {
653 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
654 to, from, UNKNOWN);
655 return;
656 }
657
658 /* Handle truncation of volatile memrefs, and so on;
659 the things that couldn't be truncated directly,
660 and for which there was no special instruction.
661
662 ??? Code above formerly short-circuited this, for most integer
663 mode pairs, with a force_reg in from_mode followed by a recursive
664 call to this routine. Appears always to have been wrong. */
665 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
666 {
667 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
668 emit_move_insn (to, temp);
669 return;
670 }
671
672 /* Mode combination is not recognized. */
673 gcc_unreachable ();
674 }
675
676 /* Return an rtx for a value that would result
677 from converting X to mode MODE.
678 Both X and MODE may be floating, or both integer.
679 UNSIGNEDP is nonzero if X is an unsigned value.
680 This can be done by referring to a part of X in place
681 or by copying to a new temporary with conversion. */
682
683 rtx
684 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
685 {
686 return convert_modes (mode, VOIDmode, x, unsignedp);
687 }
688
689 /* Return an rtx for a value that would result
690 from converting X from mode OLDMODE to mode MODE.
691 Both modes may be floating, or both integer.
692 UNSIGNEDP is nonzero if X is an unsigned value.
693
694 This can be done by referring to a part of X in place
695 or by copying to a new temporary with conversion.
696
697 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
698
699 rtx
700 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
701 {
702 rtx temp;
703
704 /* If FROM is a SUBREG that indicates that we have already done at least
705 the required extension, strip it. */
706
707 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
708 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
709 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
710 x = gen_lowpart (mode, x);
711
712 if (GET_MODE (x) != VOIDmode)
713 oldmode = GET_MODE (x);
714
715 if (mode == oldmode)
716 return x;
717
718 /* There is one case that we must handle specially: If we are converting
719 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
720 we are to interpret the constant as unsigned, gen_lowpart will do
721 the wrong if the constant appears negative. What we want to do is
722 make the high-order word of the constant zero, not all ones. */
723
724 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
725 && GET_MODE_BITSIZE (mode) == HOST_BITS_PER_DOUBLE_INT
726 && CONST_INT_P (x) && INTVAL (x) < 0)
727 {
728 double_int val = double_int::from_uhwi (INTVAL (x));
729
730 /* We need to zero extend VAL. */
731 if (oldmode != VOIDmode)
732 val = val.zext (GET_MODE_BITSIZE (oldmode));
733
734 return immed_double_int_const (val, mode);
735 }
736
737 /* We can do this with a gen_lowpart if both desired and current modes
738 are integer, and this is either a constant integer, a register, or a
739 non-volatile MEM. Except for the constant case where MODE is no
740 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
741
742 if ((CONST_INT_P (x)
743 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT)
744 || (GET_MODE_CLASS (mode) == MODE_INT
745 && GET_MODE_CLASS (oldmode) == MODE_INT
746 && (CONST_DOUBLE_AS_INT_P (x)
747 || (GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
748 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
749 && direct_load[(int) mode])
750 || (REG_P (x)
751 && (! HARD_REGISTER_P (x)
752 || HARD_REGNO_MODE_OK (REGNO (x), mode))
753 && TRULY_NOOP_TRUNCATION_MODES_P (mode,
754 GET_MODE (x))))))))
755 {
756 /* ?? If we don't know OLDMODE, we have to assume here that
757 X does not need sign- or zero-extension. This may not be
758 the case, but it's the best we can do. */
759 if (CONST_INT_P (x) && oldmode != VOIDmode
760 && GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (oldmode))
761 {
762 HOST_WIDE_INT val = INTVAL (x);
763
764 /* We must sign or zero-extend in this case. Start by
765 zero-extending, then sign extend if we need to. */
766 val &= GET_MODE_MASK (oldmode);
767 if (! unsignedp
768 && val_signbit_known_set_p (oldmode, val))
769 val |= ~GET_MODE_MASK (oldmode);
770
771 return gen_int_mode (val, mode);
772 }
773
774 return gen_lowpart (mode, x);
775 }
776
777 /* Converting from integer constant into mode is always equivalent to an
778 subreg operation. */
779 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
780 {
781 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
782 return simplify_gen_subreg (mode, x, oldmode, 0);
783 }
784
785 temp = gen_reg_rtx (mode);
786 convert_move (temp, x, unsignedp);
787 return temp;
788 }
789 \f
790 /* Return the largest alignment we can use for doing a move (or store)
791 of MAX_PIECES. ALIGN is the largest alignment we could use. */
792
793 static unsigned int
794 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
795 {
796 enum machine_mode tmode;
797
798 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
799 if (align >= GET_MODE_ALIGNMENT (tmode))
800 align = GET_MODE_ALIGNMENT (tmode);
801 else
802 {
803 enum machine_mode tmode, xmode;
804
805 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
806 tmode != VOIDmode;
807 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
808 if (GET_MODE_SIZE (tmode) > max_pieces
809 || SLOW_UNALIGNED_ACCESS (tmode, align))
810 break;
811
812 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
813 }
814
815 return align;
816 }
817
818 /* Return the widest integer mode no wider than SIZE. If no such mode
819 can be found, return VOIDmode. */
820
821 static enum machine_mode
822 widest_int_mode_for_size (unsigned int size)
823 {
824 enum machine_mode tmode, mode = VOIDmode;
825
826 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
827 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
828 if (GET_MODE_SIZE (tmode) < size)
829 mode = tmode;
830
831 return mode;
832 }
833
834 /* STORE_MAX_PIECES is the number of bytes at a time that we can
835 store efficiently. Due to internal GCC limitations, this is
836 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
837 for an immediate constant. */
838
839 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
840
841 /* Determine whether the LEN bytes can be moved by using several move
842 instructions. Return nonzero if a call to move_by_pieces should
843 succeed. */
844
845 int
846 can_move_by_pieces (unsigned HOST_WIDE_INT len ATTRIBUTE_UNUSED,
847 unsigned int align ATTRIBUTE_UNUSED)
848 {
849 return MOVE_BY_PIECES_P (len, align);
850 }
851
852 /* Generate several move instructions to copy LEN bytes from block FROM to
853 block TO. (These are MEM rtx's with BLKmode).
854
855 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
856 used to push FROM to the stack.
857
858 ALIGN is maximum stack alignment we can assume.
859
860 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
861 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
862 stpcpy. */
863
864 rtx
865 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
866 unsigned int align, int endp)
867 {
868 struct move_by_pieces_d data;
869 enum machine_mode to_addr_mode;
870 enum machine_mode from_addr_mode = get_address_mode (from);
871 rtx to_addr, from_addr = XEXP (from, 0);
872 unsigned int max_size = MOVE_MAX_PIECES + 1;
873 enum insn_code icode;
874
875 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
876
877 data.offset = 0;
878 data.from_addr = from_addr;
879 if (to)
880 {
881 to_addr_mode = get_address_mode (to);
882 to_addr = XEXP (to, 0);
883 data.to = to;
884 data.autinc_to
885 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
886 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
887 data.reverse
888 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
889 }
890 else
891 {
892 to_addr_mode = VOIDmode;
893 to_addr = NULL_RTX;
894 data.to = NULL_RTX;
895 data.autinc_to = 1;
896 #ifdef STACK_GROWS_DOWNWARD
897 data.reverse = 1;
898 #else
899 data.reverse = 0;
900 #endif
901 }
902 data.to_addr = to_addr;
903 data.from = from;
904 data.autinc_from
905 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
906 || GET_CODE (from_addr) == POST_INC
907 || GET_CODE (from_addr) == POST_DEC);
908
909 data.explicit_inc_from = 0;
910 data.explicit_inc_to = 0;
911 if (data.reverse) data.offset = len;
912 data.len = len;
913
914 /* If copying requires more than two move insns,
915 copy addresses to registers (to make displacements shorter)
916 and use post-increment if available. */
917 if (!(data.autinc_from && data.autinc_to)
918 && move_by_pieces_ninsns (len, align, max_size) > 2)
919 {
920 /* Find the mode of the largest move...
921 MODE might not be used depending on the definitions of the
922 USE_* macros below. */
923 enum machine_mode mode ATTRIBUTE_UNUSED
924 = widest_int_mode_for_size (max_size);
925
926 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
927 {
928 data.from_addr = copy_to_mode_reg (from_addr_mode,
929 plus_constant (from_addr_mode,
930 from_addr, len));
931 data.autinc_from = 1;
932 data.explicit_inc_from = -1;
933 }
934 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
935 {
936 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
937 data.autinc_from = 1;
938 data.explicit_inc_from = 1;
939 }
940 if (!data.autinc_from && CONSTANT_P (from_addr))
941 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
942 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
943 {
944 data.to_addr = copy_to_mode_reg (to_addr_mode,
945 plus_constant (to_addr_mode,
946 to_addr, len));
947 data.autinc_to = 1;
948 data.explicit_inc_to = -1;
949 }
950 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
951 {
952 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
953 data.autinc_to = 1;
954 data.explicit_inc_to = 1;
955 }
956 if (!data.autinc_to && CONSTANT_P (to_addr))
957 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
958 }
959
960 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
961
962 /* First move what we can in the largest integer mode, then go to
963 successively smaller modes. */
964
965 while (max_size > 1 && data.len > 0)
966 {
967 enum machine_mode mode = widest_int_mode_for_size (max_size);
968
969 if (mode == VOIDmode)
970 break;
971
972 icode = optab_handler (mov_optab, mode);
973 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
974 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
975
976 max_size = GET_MODE_SIZE (mode);
977 }
978
979 /* The code above should have handled everything. */
980 gcc_assert (!data.len);
981
982 if (endp)
983 {
984 rtx to1;
985
986 gcc_assert (!data.reverse);
987 if (data.autinc_to)
988 {
989 if (endp == 2)
990 {
991 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
992 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
993 else
994 data.to_addr = copy_to_mode_reg (to_addr_mode,
995 plus_constant (to_addr_mode,
996 data.to_addr,
997 -1));
998 }
999 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1000 data.offset);
1001 }
1002 else
1003 {
1004 if (endp == 2)
1005 --data.offset;
1006 to1 = adjust_address (data.to, QImode, data.offset);
1007 }
1008 return to1;
1009 }
1010 else
1011 return data.to;
1012 }
1013
1014 /* Return number of insns required to move L bytes by pieces.
1015 ALIGN (in bits) is maximum alignment we can assume. */
1016
1017 unsigned HOST_WIDE_INT
1018 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1019 unsigned int max_size)
1020 {
1021 unsigned HOST_WIDE_INT n_insns = 0;
1022
1023 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1024
1025 while (max_size > 1 && l > 0)
1026 {
1027 enum machine_mode mode;
1028 enum insn_code icode;
1029
1030 mode = widest_int_mode_for_size (max_size);
1031
1032 if (mode == VOIDmode)
1033 break;
1034
1035 icode = optab_handler (mov_optab, mode);
1036 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1037 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1038
1039 max_size = GET_MODE_SIZE (mode);
1040 }
1041
1042 gcc_assert (!l);
1043 return n_insns;
1044 }
1045
1046 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1047 with move instructions for mode MODE. GENFUN is the gen_... function
1048 to make a move insn for that mode. DATA has all the other info. */
1049
1050 static void
1051 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
1052 struct move_by_pieces_d *data)
1053 {
1054 unsigned int size = GET_MODE_SIZE (mode);
1055 rtx to1 = NULL_RTX, from1;
1056
1057 while (data->len >= size)
1058 {
1059 if (data->reverse)
1060 data->offset -= size;
1061
1062 if (data->to)
1063 {
1064 if (data->autinc_to)
1065 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1066 data->offset);
1067 else
1068 to1 = adjust_address (data->to, mode, data->offset);
1069 }
1070
1071 if (data->autinc_from)
1072 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1073 data->offset);
1074 else
1075 from1 = adjust_address (data->from, mode, data->offset);
1076
1077 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1078 emit_insn (gen_add2_insn (data->to_addr,
1079 gen_int_mode (-(HOST_WIDE_INT) size,
1080 GET_MODE (data->to_addr))));
1081 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1082 emit_insn (gen_add2_insn (data->from_addr,
1083 gen_int_mode (-(HOST_WIDE_INT) size,
1084 GET_MODE (data->from_addr))));
1085
1086 if (data->to)
1087 emit_insn ((*genfun) (to1, from1));
1088 else
1089 {
1090 #ifdef PUSH_ROUNDING
1091 emit_single_push_insn (mode, from1, NULL);
1092 #else
1093 gcc_unreachable ();
1094 #endif
1095 }
1096
1097 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1098 emit_insn (gen_add2_insn (data->to_addr,
1099 gen_int_mode (size,
1100 GET_MODE (data->to_addr))));
1101 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1102 emit_insn (gen_add2_insn (data->from_addr,
1103 gen_int_mode (size,
1104 GET_MODE (data->from_addr))));
1105
1106 if (! data->reverse)
1107 data->offset += size;
1108
1109 data->len -= size;
1110 }
1111 }
1112 \f
1113 /* Emit code to move a block Y to a block X. This may be done with
1114 string-move instructions, with multiple scalar move instructions,
1115 or with a library call.
1116
1117 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1118 SIZE is an rtx that says how long they are.
1119 ALIGN is the maximum alignment we can assume they have.
1120 METHOD describes what kind of copy this is, and what mechanisms may be used.
1121
1122 Return the address of the new block, if memcpy is called and returns it,
1123 0 otherwise. */
1124
1125 rtx
1126 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1127 unsigned int expected_align, HOST_WIDE_INT expected_size)
1128 {
1129 bool may_use_call;
1130 rtx retval = 0;
1131 unsigned int align;
1132
1133 gcc_assert (size);
1134 if (CONST_INT_P (size)
1135 && INTVAL (size) == 0)
1136 return 0;
1137
1138 switch (method)
1139 {
1140 case BLOCK_OP_NORMAL:
1141 case BLOCK_OP_TAILCALL:
1142 may_use_call = true;
1143 break;
1144
1145 case BLOCK_OP_CALL_PARM:
1146 may_use_call = block_move_libcall_safe_for_call_parm ();
1147
1148 /* Make inhibit_defer_pop nonzero around the library call
1149 to force it to pop the arguments right away. */
1150 NO_DEFER_POP;
1151 break;
1152
1153 case BLOCK_OP_NO_LIBCALL:
1154 may_use_call = false;
1155 break;
1156
1157 default:
1158 gcc_unreachable ();
1159 }
1160
1161 gcc_assert (MEM_P (x) && MEM_P (y));
1162 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1163 gcc_assert (align >= BITS_PER_UNIT);
1164
1165 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1166 block copy is more efficient for other large modes, e.g. DCmode. */
1167 x = adjust_address (x, BLKmode, 0);
1168 y = adjust_address (y, BLKmode, 0);
1169
1170 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1171 can be incorrect is coming from __builtin_memcpy. */
1172 if (CONST_INT_P (size))
1173 {
1174 x = shallow_copy_rtx (x);
1175 y = shallow_copy_rtx (y);
1176 set_mem_size (x, INTVAL (size));
1177 set_mem_size (y, INTVAL (size));
1178 }
1179
1180 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1181 move_by_pieces (x, y, INTVAL (size), align, 0);
1182 else if (emit_block_move_via_movmem (x, y, size, align,
1183 expected_align, expected_size))
1184 ;
1185 else if (may_use_call
1186 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1187 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1188 {
1189 /* Since x and y are passed to a libcall, mark the corresponding
1190 tree EXPR as addressable. */
1191 tree y_expr = MEM_EXPR (y);
1192 tree x_expr = MEM_EXPR (x);
1193 if (y_expr)
1194 mark_addressable (y_expr);
1195 if (x_expr)
1196 mark_addressable (x_expr);
1197 retval = emit_block_move_via_libcall (x, y, size,
1198 method == BLOCK_OP_TAILCALL);
1199 }
1200
1201 else
1202 emit_block_move_via_loop (x, y, size, align);
1203
1204 if (method == BLOCK_OP_CALL_PARM)
1205 OK_DEFER_POP;
1206
1207 return retval;
1208 }
1209
1210 rtx
1211 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1212 {
1213 return emit_block_move_hints (x, y, size, method, 0, -1);
1214 }
1215
1216 /* A subroutine of emit_block_move. Returns true if calling the
1217 block move libcall will not clobber any parameters which may have
1218 already been placed on the stack. */
1219
1220 static bool
1221 block_move_libcall_safe_for_call_parm (void)
1222 {
1223 #if defined (REG_PARM_STACK_SPACE)
1224 tree fn;
1225 #endif
1226
1227 /* If arguments are pushed on the stack, then they're safe. */
1228 if (PUSH_ARGS)
1229 return true;
1230
1231 /* If registers go on the stack anyway, any argument is sure to clobber
1232 an outgoing argument. */
1233 #if defined (REG_PARM_STACK_SPACE)
1234 fn = emit_block_move_libcall_fn (false);
1235 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1236 depend on its argument. */
1237 (void) fn;
1238 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1239 && REG_PARM_STACK_SPACE (fn) != 0)
1240 return false;
1241 #endif
1242
1243 /* If any argument goes in memory, then it might clobber an outgoing
1244 argument. */
1245 {
1246 CUMULATIVE_ARGS args_so_far_v;
1247 cumulative_args_t args_so_far;
1248 tree fn, arg;
1249
1250 fn = emit_block_move_libcall_fn (false);
1251 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1252 args_so_far = pack_cumulative_args (&args_so_far_v);
1253
1254 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1255 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1256 {
1257 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1258 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1259 NULL_TREE, true);
1260 if (!tmp || !REG_P (tmp))
1261 return false;
1262 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1263 return false;
1264 targetm.calls.function_arg_advance (args_so_far, mode,
1265 NULL_TREE, true);
1266 }
1267 }
1268 return true;
1269 }
1270
1271 /* A subroutine of emit_block_move. Expand a movmem pattern;
1272 return true if successful. */
1273
1274 static bool
1275 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1276 unsigned int expected_align, HOST_WIDE_INT expected_size)
1277 {
1278 int save_volatile_ok = volatile_ok;
1279 enum machine_mode mode;
1280
1281 if (expected_align < align)
1282 expected_align = align;
1283
1284 /* Since this is a move insn, we don't care about volatility. */
1285 volatile_ok = 1;
1286
1287 /* Try the most limited insn first, because there's no point
1288 including more than one in the machine description unless
1289 the more limited one has some advantage. */
1290
1291 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1292 mode = GET_MODE_WIDER_MODE (mode))
1293 {
1294 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1295
1296 if (code != CODE_FOR_nothing
1297 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1298 here because if SIZE is less than the mode mask, as it is
1299 returned by the macro, it will definitely be less than the
1300 actual mode mask. Since SIZE is within the Pmode address
1301 space, we limit MODE to Pmode. */
1302 && ((CONST_INT_P (size)
1303 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1304 <= (GET_MODE_MASK (mode) >> 1)))
1305 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1306 {
1307 struct expand_operand ops[6];
1308 unsigned int nops;
1309
1310 /* ??? When called via emit_block_move_for_call, it'd be
1311 nice if there were some way to inform the backend, so
1312 that it doesn't fail the expansion because it thinks
1313 emitting the libcall would be more efficient. */
1314 nops = insn_data[(int) code].n_generator_args;
1315 gcc_assert (nops == 4 || nops == 6);
1316
1317 create_fixed_operand (&ops[0], x);
1318 create_fixed_operand (&ops[1], y);
1319 /* The check above guarantees that this size conversion is valid. */
1320 create_convert_operand_to (&ops[2], size, mode, true);
1321 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1322 if (nops == 6)
1323 {
1324 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1325 create_integer_operand (&ops[5], expected_size);
1326 }
1327 if (maybe_expand_insn (code, nops, ops))
1328 {
1329 volatile_ok = save_volatile_ok;
1330 return true;
1331 }
1332 }
1333 }
1334
1335 volatile_ok = save_volatile_ok;
1336 return false;
1337 }
1338
1339 /* A subroutine of emit_block_move. Expand a call to memcpy.
1340 Return the return value from memcpy, 0 otherwise. */
1341
1342 rtx
1343 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1344 {
1345 rtx dst_addr, src_addr;
1346 tree call_expr, fn, src_tree, dst_tree, size_tree;
1347 enum machine_mode size_mode;
1348 rtx retval;
1349
1350 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1351 pseudos. We can then place those new pseudos into a VAR_DECL and
1352 use them later. */
1353
1354 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1355 src_addr = copy_addr_to_reg (XEXP (src, 0));
1356
1357 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1358 src_addr = convert_memory_address (ptr_mode, src_addr);
1359
1360 dst_tree = make_tree (ptr_type_node, dst_addr);
1361 src_tree = make_tree (ptr_type_node, src_addr);
1362
1363 size_mode = TYPE_MODE (sizetype);
1364
1365 size = convert_to_mode (size_mode, size, 1);
1366 size = copy_to_mode_reg (size_mode, size);
1367
1368 /* It is incorrect to use the libcall calling conventions to call
1369 memcpy in this context. This could be a user call to memcpy and
1370 the user may wish to examine the return value from memcpy. For
1371 targets where libcalls and normal calls have different conventions
1372 for returning pointers, we could end up generating incorrect code. */
1373
1374 size_tree = make_tree (sizetype, size);
1375
1376 fn = emit_block_move_libcall_fn (true);
1377 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1378 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1379
1380 retval = expand_normal (call_expr);
1381
1382 return retval;
1383 }
1384
1385 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1386 for the function we use for block copies. */
1387
1388 static GTY(()) tree block_move_fn;
1389
1390 void
1391 init_block_move_fn (const char *asmspec)
1392 {
1393 if (!block_move_fn)
1394 {
1395 tree args, fn, attrs, attr_args;
1396
1397 fn = get_identifier ("memcpy");
1398 args = build_function_type_list (ptr_type_node, ptr_type_node,
1399 const_ptr_type_node, sizetype,
1400 NULL_TREE);
1401
1402 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1403 DECL_EXTERNAL (fn) = 1;
1404 TREE_PUBLIC (fn) = 1;
1405 DECL_ARTIFICIAL (fn) = 1;
1406 TREE_NOTHROW (fn) = 1;
1407 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1408 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1409
1410 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1411 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1412
1413 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1414
1415 block_move_fn = fn;
1416 }
1417
1418 if (asmspec)
1419 set_user_assembler_name (block_move_fn, asmspec);
1420 }
1421
1422 static tree
1423 emit_block_move_libcall_fn (int for_call)
1424 {
1425 static bool emitted_extern;
1426
1427 if (!block_move_fn)
1428 init_block_move_fn (NULL);
1429
1430 if (for_call && !emitted_extern)
1431 {
1432 emitted_extern = true;
1433 make_decl_rtl (block_move_fn);
1434 }
1435
1436 return block_move_fn;
1437 }
1438
1439 /* A subroutine of emit_block_move. Copy the data via an explicit
1440 loop. This is used only when libcalls are forbidden. */
1441 /* ??? It'd be nice to copy in hunks larger than QImode. */
1442
1443 static void
1444 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1445 unsigned int align ATTRIBUTE_UNUSED)
1446 {
1447 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1448 enum machine_mode x_addr_mode = get_address_mode (x);
1449 enum machine_mode y_addr_mode = get_address_mode (y);
1450 enum machine_mode iter_mode;
1451
1452 iter_mode = GET_MODE (size);
1453 if (iter_mode == VOIDmode)
1454 iter_mode = word_mode;
1455
1456 top_label = gen_label_rtx ();
1457 cmp_label = gen_label_rtx ();
1458 iter = gen_reg_rtx (iter_mode);
1459
1460 emit_move_insn (iter, const0_rtx);
1461
1462 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1463 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1464 do_pending_stack_adjust ();
1465
1466 emit_jump (cmp_label);
1467 emit_label (top_label);
1468
1469 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1470 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1471
1472 if (x_addr_mode != y_addr_mode)
1473 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1474 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1475
1476 x = change_address (x, QImode, x_addr);
1477 y = change_address (y, QImode, y_addr);
1478
1479 emit_move_insn (x, y);
1480
1481 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1482 true, OPTAB_LIB_WIDEN);
1483 if (tmp != iter)
1484 emit_move_insn (iter, tmp);
1485
1486 emit_label (cmp_label);
1487
1488 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1489 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1490 }
1491 \f
1492 /* Copy all or part of a value X into registers starting at REGNO.
1493 The number of registers to be filled is NREGS. */
1494
1495 void
1496 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1497 {
1498 int i;
1499 #ifdef HAVE_load_multiple
1500 rtx pat;
1501 rtx last;
1502 #endif
1503
1504 if (nregs == 0)
1505 return;
1506
1507 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1508 x = validize_mem (force_const_mem (mode, x));
1509
1510 /* See if the machine can do this with a load multiple insn. */
1511 #ifdef HAVE_load_multiple
1512 if (HAVE_load_multiple)
1513 {
1514 last = get_last_insn ();
1515 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1516 GEN_INT (nregs));
1517 if (pat)
1518 {
1519 emit_insn (pat);
1520 return;
1521 }
1522 else
1523 delete_insns_since (last);
1524 }
1525 #endif
1526
1527 for (i = 0; i < nregs; i++)
1528 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1529 operand_subword_force (x, i, mode));
1530 }
1531
1532 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1533 The number of registers to be filled is NREGS. */
1534
1535 void
1536 move_block_from_reg (int regno, rtx x, int nregs)
1537 {
1538 int i;
1539
1540 if (nregs == 0)
1541 return;
1542
1543 /* See if the machine can do this with a store multiple insn. */
1544 #ifdef HAVE_store_multiple
1545 if (HAVE_store_multiple)
1546 {
1547 rtx last = get_last_insn ();
1548 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1549 GEN_INT (nregs));
1550 if (pat)
1551 {
1552 emit_insn (pat);
1553 return;
1554 }
1555 else
1556 delete_insns_since (last);
1557 }
1558 #endif
1559
1560 for (i = 0; i < nregs; i++)
1561 {
1562 rtx tem = operand_subword (x, i, 1, BLKmode);
1563
1564 gcc_assert (tem);
1565
1566 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1567 }
1568 }
1569
1570 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1571 ORIG, where ORIG is a non-consecutive group of registers represented by
1572 a PARALLEL. The clone is identical to the original except in that the
1573 original set of registers is replaced by a new set of pseudo registers.
1574 The new set has the same modes as the original set. */
1575
1576 rtx
1577 gen_group_rtx (rtx orig)
1578 {
1579 int i, length;
1580 rtx *tmps;
1581
1582 gcc_assert (GET_CODE (orig) == PARALLEL);
1583
1584 length = XVECLEN (orig, 0);
1585 tmps = XALLOCAVEC (rtx, length);
1586
1587 /* Skip a NULL entry in first slot. */
1588 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1589
1590 if (i)
1591 tmps[0] = 0;
1592
1593 for (; i < length; i++)
1594 {
1595 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1596 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1597
1598 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1599 }
1600
1601 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1602 }
1603
1604 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1605 except that values are placed in TMPS[i], and must later be moved
1606 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1607
1608 static void
1609 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1610 {
1611 rtx src;
1612 int start, i;
1613 enum machine_mode m = GET_MODE (orig_src);
1614
1615 gcc_assert (GET_CODE (dst) == PARALLEL);
1616
1617 if (m != VOIDmode
1618 && !SCALAR_INT_MODE_P (m)
1619 && !MEM_P (orig_src)
1620 && GET_CODE (orig_src) != CONCAT)
1621 {
1622 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1623 if (imode == BLKmode)
1624 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1625 else
1626 src = gen_reg_rtx (imode);
1627 if (imode != BLKmode)
1628 src = gen_lowpart (GET_MODE (orig_src), src);
1629 emit_move_insn (src, orig_src);
1630 /* ...and back again. */
1631 if (imode != BLKmode)
1632 src = gen_lowpart (imode, src);
1633 emit_group_load_1 (tmps, dst, src, type, ssize);
1634 return;
1635 }
1636
1637 /* Check for a NULL entry, used to indicate that the parameter goes
1638 both on the stack and in registers. */
1639 if (XEXP (XVECEXP (dst, 0, 0), 0))
1640 start = 0;
1641 else
1642 start = 1;
1643
1644 /* Process the pieces. */
1645 for (i = start; i < XVECLEN (dst, 0); i++)
1646 {
1647 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1648 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1649 unsigned int bytelen = GET_MODE_SIZE (mode);
1650 int shift = 0;
1651
1652 /* Handle trailing fragments that run over the size of the struct. */
1653 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1654 {
1655 /* Arrange to shift the fragment to where it belongs.
1656 extract_bit_field loads to the lsb of the reg. */
1657 if (
1658 #ifdef BLOCK_REG_PADDING
1659 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1660 == (BYTES_BIG_ENDIAN ? upward : downward)
1661 #else
1662 BYTES_BIG_ENDIAN
1663 #endif
1664 )
1665 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1666 bytelen = ssize - bytepos;
1667 gcc_assert (bytelen > 0);
1668 }
1669
1670 /* If we won't be loading directly from memory, protect the real source
1671 from strange tricks we might play; but make sure that the source can
1672 be loaded directly into the destination. */
1673 src = orig_src;
1674 if (!MEM_P (orig_src)
1675 && (!CONSTANT_P (orig_src)
1676 || (GET_MODE (orig_src) != mode
1677 && GET_MODE (orig_src) != VOIDmode)))
1678 {
1679 if (GET_MODE (orig_src) == VOIDmode)
1680 src = gen_reg_rtx (mode);
1681 else
1682 src = gen_reg_rtx (GET_MODE (orig_src));
1683
1684 emit_move_insn (src, orig_src);
1685 }
1686
1687 /* Optimize the access just a bit. */
1688 if (MEM_P (src)
1689 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1690 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1691 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1692 && bytelen == GET_MODE_SIZE (mode))
1693 {
1694 tmps[i] = gen_reg_rtx (mode);
1695 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1696 }
1697 else if (COMPLEX_MODE_P (mode)
1698 && GET_MODE (src) == mode
1699 && bytelen == GET_MODE_SIZE (mode))
1700 /* Let emit_move_complex do the bulk of the work. */
1701 tmps[i] = src;
1702 else if (GET_CODE (src) == CONCAT)
1703 {
1704 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1705 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1706
1707 if ((bytepos == 0 && bytelen == slen0)
1708 || (bytepos != 0 && bytepos + bytelen <= slen))
1709 {
1710 /* The following assumes that the concatenated objects all
1711 have the same size. In this case, a simple calculation
1712 can be used to determine the object and the bit field
1713 to be extracted. */
1714 tmps[i] = XEXP (src, bytepos / slen0);
1715 if (! CONSTANT_P (tmps[i])
1716 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1717 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1718 (bytepos % slen0) * BITS_PER_UNIT,
1719 1, NULL_RTX, mode, mode);
1720 }
1721 else
1722 {
1723 rtx mem;
1724
1725 gcc_assert (!bytepos);
1726 mem = assign_stack_temp (GET_MODE (src), slen);
1727 emit_move_insn (mem, src);
1728 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1729 0, 1, NULL_RTX, mode, mode);
1730 }
1731 }
1732 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1733 SIMD register, which is currently broken. While we get GCC
1734 to emit proper RTL for these cases, let's dump to memory. */
1735 else if (VECTOR_MODE_P (GET_MODE (dst))
1736 && REG_P (src))
1737 {
1738 int slen = GET_MODE_SIZE (GET_MODE (src));
1739 rtx mem;
1740
1741 mem = assign_stack_temp (GET_MODE (src), slen);
1742 emit_move_insn (mem, src);
1743 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1744 }
1745 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1746 && XVECLEN (dst, 0) > 1)
1747 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1748 else if (CONSTANT_P (src))
1749 {
1750 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1751
1752 if (len == ssize)
1753 tmps[i] = src;
1754 else
1755 {
1756 rtx first, second;
1757
1758 gcc_assert (2 * len == ssize);
1759 split_double (src, &first, &second);
1760 if (i)
1761 tmps[i] = second;
1762 else
1763 tmps[i] = first;
1764 }
1765 }
1766 else if (REG_P (src) && GET_MODE (src) == mode)
1767 tmps[i] = src;
1768 else
1769 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1770 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1771 mode, mode);
1772
1773 if (shift)
1774 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1775 shift, tmps[i], 0);
1776 }
1777 }
1778
1779 /* Emit code to move a block SRC of type TYPE to a block DST,
1780 where DST is non-consecutive registers represented by a PARALLEL.
1781 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1782 if not known. */
1783
1784 void
1785 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1786 {
1787 rtx *tmps;
1788 int i;
1789
1790 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1791 emit_group_load_1 (tmps, dst, src, type, ssize);
1792
1793 /* Copy the extracted pieces into the proper (probable) hard regs. */
1794 for (i = 0; i < XVECLEN (dst, 0); i++)
1795 {
1796 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1797 if (d == NULL)
1798 continue;
1799 emit_move_insn (d, tmps[i]);
1800 }
1801 }
1802
1803 /* Similar, but load SRC into new pseudos in a format that looks like
1804 PARALLEL. This can later be fed to emit_group_move to get things
1805 in the right place. */
1806
1807 rtx
1808 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1809 {
1810 rtvec vec;
1811 int i;
1812
1813 vec = rtvec_alloc (XVECLEN (parallel, 0));
1814 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1815
1816 /* Convert the vector to look just like the original PARALLEL, except
1817 with the computed values. */
1818 for (i = 0; i < XVECLEN (parallel, 0); i++)
1819 {
1820 rtx e = XVECEXP (parallel, 0, i);
1821 rtx d = XEXP (e, 0);
1822
1823 if (d)
1824 {
1825 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1826 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1827 }
1828 RTVEC_ELT (vec, i) = e;
1829 }
1830
1831 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1832 }
1833
1834 /* Emit code to move a block SRC to block DST, where SRC and DST are
1835 non-consecutive groups of registers, each represented by a PARALLEL. */
1836
1837 void
1838 emit_group_move (rtx dst, rtx src)
1839 {
1840 int i;
1841
1842 gcc_assert (GET_CODE (src) == PARALLEL
1843 && GET_CODE (dst) == PARALLEL
1844 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1845
1846 /* Skip first entry if NULL. */
1847 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1848 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1849 XEXP (XVECEXP (src, 0, i), 0));
1850 }
1851
1852 /* Move a group of registers represented by a PARALLEL into pseudos. */
1853
1854 rtx
1855 emit_group_move_into_temps (rtx src)
1856 {
1857 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1858 int i;
1859
1860 for (i = 0; i < XVECLEN (src, 0); i++)
1861 {
1862 rtx e = XVECEXP (src, 0, i);
1863 rtx d = XEXP (e, 0);
1864
1865 if (d)
1866 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1867 RTVEC_ELT (vec, i) = e;
1868 }
1869
1870 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1871 }
1872
1873 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1874 where SRC is non-consecutive registers represented by a PARALLEL.
1875 SSIZE represents the total size of block ORIG_DST, or -1 if not
1876 known. */
1877
1878 void
1879 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1880 {
1881 rtx *tmps, dst;
1882 int start, finish, i;
1883 enum machine_mode m = GET_MODE (orig_dst);
1884
1885 gcc_assert (GET_CODE (src) == PARALLEL);
1886
1887 if (!SCALAR_INT_MODE_P (m)
1888 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1889 {
1890 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1891 if (imode == BLKmode)
1892 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1893 else
1894 dst = gen_reg_rtx (imode);
1895 emit_group_store (dst, src, type, ssize);
1896 if (imode != BLKmode)
1897 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1898 emit_move_insn (orig_dst, dst);
1899 return;
1900 }
1901
1902 /* Check for a NULL entry, used to indicate that the parameter goes
1903 both on the stack and in registers. */
1904 if (XEXP (XVECEXP (src, 0, 0), 0))
1905 start = 0;
1906 else
1907 start = 1;
1908 finish = XVECLEN (src, 0);
1909
1910 tmps = XALLOCAVEC (rtx, finish);
1911
1912 /* Copy the (probable) hard regs into pseudos. */
1913 for (i = start; i < finish; i++)
1914 {
1915 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1916 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1917 {
1918 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1919 emit_move_insn (tmps[i], reg);
1920 }
1921 else
1922 tmps[i] = reg;
1923 }
1924
1925 /* If we won't be storing directly into memory, protect the real destination
1926 from strange tricks we might play. */
1927 dst = orig_dst;
1928 if (GET_CODE (dst) == PARALLEL)
1929 {
1930 rtx temp;
1931
1932 /* We can get a PARALLEL dst if there is a conditional expression in
1933 a return statement. In that case, the dst and src are the same,
1934 so no action is necessary. */
1935 if (rtx_equal_p (dst, src))
1936 return;
1937
1938 /* It is unclear if we can ever reach here, but we may as well handle
1939 it. Allocate a temporary, and split this into a store/load to/from
1940 the temporary. */
1941
1942 temp = assign_stack_temp (GET_MODE (dst), ssize);
1943 emit_group_store (temp, src, type, ssize);
1944 emit_group_load (dst, temp, type, ssize);
1945 return;
1946 }
1947 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1948 {
1949 enum machine_mode outer = GET_MODE (dst);
1950 enum machine_mode inner;
1951 HOST_WIDE_INT bytepos;
1952 bool done = false;
1953 rtx temp;
1954
1955 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1956 dst = gen_reg_rtx (outer);
1957
1958 /* Make life a bit easier for combine. */
1959 /* If the first element of the vector is the low part
1960 of the destination mode, use a paradoxical subreg to
1961 initialize the destination. */
1962 if (start < finish)
1963 {
1964 inner = GET_MODE (tmps[start]);
1965 bytepos = subreg_lowpart_offset (inner, outer);
1966 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1967 {
1968 temp = simplify_gen_subreg (outer, tmps[start],
1969 inner, 0);
1970 if (temp)
1971 {
1972 emit_move_insn (dst, temp);
1973 done = true;
1974 start++;
1975 }
1976 }
1977 }
1978
1979 /* If the first element wasn't the low part, try the last. */
1980 if (!done
1981 && start < finish - 1)
1982 {
1983 inner = GET_MODE (tmps[finish - 1]);
1984 bytepos = subreg_lowpart_offset (inner, outer);
1985 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1986 {
1987 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1988 inner, 0);
1989 if (temp)
1990 {
1991 emit_move_insn (dst, temp);
1992 done = true;
1993 finish--;
1994 }
1995 }
1996 }
1997
1998 /* Otherwise, simply initialize the result to zero. */
1999 if (!done)
2000 emit_move_insn (dst, CONST0_RTX (outer));
2001 }
2002
2003 /* Process the pieces. */
2004 for (i = start; i < finish; i++)
2005 {
2006 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2007 enum machine_mode mode = GET_MODE (tmps[i]);
2008 unsigned int bytelen = GET_MODE_SIZE (mode);
2009 unsigned int adj_bytelen = bytelen;
2010 rtx dest = dst;
2011
2012 /* Handle trailing fragments that run over the size of the struct. */
2013 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2014 adj_bytelen = ssize - bytepos;
2015
2016 if (GET_CODE (dst) == CONCAT)
2017 {
2018 if (bytepos + adj_bytelen
2019 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2020 dest = XEXP (dst, 0);
2021 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2022 {
2023 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2024 dest = XEXP (dst, 1);
2025 }
2026 else
2027 {
2028 enum machine_mode dest_mode = GET_MODE (dest);
2029 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2030
2031 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2032
2033 if (GET_MODE_ALIGNMENT (dest_mode)
2034 >= GET_MODE_ALIGNMENT (tmp_mode))
2035 {
2036 dest = assign_stack_temp (dest_mode,
2037 GET_MODE_SIZE (dest_mode));
2038 emit_move_insn (adjust_address (dest,
2039 tmp_mode,
2040 bytepos),
2041 tmps[i]);
2042 dst = dest;
2043 }
2044 else
2045 {
2046 dest = assign_stack_temp (tmp_mode,
2047 GET_MODE_SIZE (tmp_mode));
2048 emit_move_insn (dest, tmps[i]);
2049 dst = adjust_address (dest, dest_mode, bytepos);
2050 }
2051 break;
2052 }
2053 }
2054
2055 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2056 {
2057 /* store_bit_field always takes its value from the lsb.
2058 Move the fragment to the lsb if it's not already there. */
2059 if (
2060 #ifdef BLOCK_REG_PADDING
2061 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2062 == (BYTES_BIG_ENDIAN ? upward : downward)
2063 #else
2064 BYTES_BIG_ENDIAN
2065 #endif
2066 )
2067 {
2068 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2069 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2070 shift, tmps[i], 0);
2071 }
2072 bytelen = adj_bytelen;
2073 }
2074
2075 /* Optimize the access just a bit. */
2076 if (MEM_P (dest)
2077 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2078 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2079 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2080 && bytelen == GET_MODE_SIZE (mode))
2081 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2082 else
2083 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2084 0, 0, mode, tmps[i]);
2085 }
2086
2087 /* Copy from the pseudo into the (probable) hard reg. */
2088 if (orig_dst != dst)
2089 emit_move_insn (orig_dst, dst);
2090 }
2091
2092 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2093 of the value stored in X. */
2094
2095 rtx
2096 maybe_emit_group_store (rtx x, tree type)
2097 {
2098 enum machine_mode mode = TYPE_MODE (type);
2099 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2100 if (GET_CODE (x) == PARALLEL)
2101 {
2102 rtx result = gen_reg_rtx (mode);
2103 emit_group_store (result, x, type, int_size_in_bytes (type));
2104 return result;
2105 }
2106 return x;
2107 }
2108
2109 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2110
2111 This is used on targets that return BLKmode values in registers. */
2112
2113 void
2114 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2115 {
2116 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2117 rtx src = NULL, dst = NULL;
2118 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2119 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2120 enum machine_mode mode = GET_MODE (srcreg);
2121 enum machine_mode tmode = GET_MODE (target);
2122 enum machine_mode copy_mode;
2123
2124 /* BLKmode registers created in the back-end shouldn't have survived. */
2125 gcc_assert (mode != BLKmode);
2126
2127 /* If the structure doesn't take up a whole number of words, see whether
2128 SRCREG is padded on the left or on the right. If it's on the left,
2129 set PADDING_CORRECTION to the number of bits to skip.
2130
2131 In most ABIs, the structure will be returned at the least end of
2132 the register, which translates to right padding on little-endian
2133 targets and left padding on big-endian targets. The opposite
2134 holds if the structure is returned at the most significant
2135 end of the register. */
2136 if (bytes % UNITS_PER_WORD != 0
2137 && (targetm.calls.return_in_msb (type)
2138 ? !BYTES_BIG_ENDIAN
2139 : BYTES_BIG_ENDIAN))
2140 padding_correction
2141 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2142
2143 /* We can use a single move if we have an exact mode for the size. */
2144 else if (MEM_P (target)
2145 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2146 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2147 && bytes == GET_MODE_SIZE (mode))
2148 {
2149 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2150 return;
2151 }
2152
2153 /* And if we additionally have the same mode for a register. */
2154 else if (REG_P (target)
2155 && GET_MODE (target) == mode
2156 && bytes == GET_MODE_SIZE (mode))
2157 {
2158 emit_move_insn (target, srcreg);
2159 return;
2160 }
2161
2162 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2163 into a new pseudo which is a full word. */
2164 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2165 {
2166 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2167 mode = word_mode;
2168 }
2169
2170 /* Copy the structure BITSIZE bits at a time. If the target lives in
2171 memory, take care of not reading/writing past its end by selecting
2172 a copy mode suited to BITSIZE. This should always be possible given
2173 how it is computed.
2174
2175 If the target lives in register, make sure not to select a copy mode
2176 larger than the mode of the register.
2177
2178 We could probably emit more efficient code for machines which do not use
2179 strict alignment, but it doesn't seem worth the effort at the current
2180 time. */
2181
2182 copy_mode = word_mode;
2183 if (MEM_P (target))
2184 {
2185 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2186 if (mem_mode != BLKmode)
2187 copy_mode = mem_mode;
2188 }
2189 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2190 copy_mode = tmode;
2191
2192 for (bitpos = 0, xbitpos = padding_correction;
2193 bitpos < bytes * BITS_PER_UNIT;
2194 bitpos += bitsize, xbitpos += bitsize)
2195 {
2196 /* We need a new source operand each time xbitpos is on a
2197 word boundary and when xbitpos == padding_correction
2198 (the first time through). */
2199 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2200 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2201
2202 /* We need a new destination operand each time bitpos is on
2203 a word boundary. */
2204 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2205 dst = target;
2206 else if (bitpos % BITS_PER_WORD == 0)
2207 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2208
2209 /* Use xbitpos for the source extraction (right justified) and
2210 bitpos for the destination store (left justified). */
2211 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2212 extract_bit_field (src, bitsize,
2213 xbitpos % BITS_PER_WORD, 1,
2214 NULL_RTX, copy_mode, copy_mode));
2215 }
2216 }
2217
2218 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2219 register if it contains any data, otherwise return null.
2220
2221 This is used on targets that return BLKmode values in registers. */
2222
2223 rtx
2224 copy_blkmode_to_reg (enum machine_mode mode, tree src)
2225 {
2226 int i, n_regs;
2227 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2228 unsigned int bitsize;
2229 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2230 enum machine_mode dst_mode;
2231
2232 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2233
2234 x = expand_normal (src);
2235
2236 bytes = int_size_in_bytes (TREE_TYPE (src));
2237 if (bytes == 0)
2238 return NULL_RTX;
2239
2240 /* If the structure doesn't take up a whole number of words, see
2241 whether the register value should be padded on the left or on
2242 the right. Set PADDING_CORRECTION to the number of padding
2243 bits needed on the left side.
2244
2245 In most ABIs, the structure will be returned at the least end of
2246 the register, which translates to right padding on little-endian
2247 targets and left padding on big-endian targets. The opposite
2248 holds if the structure is returned at the most significant
2249 end of the register. */
2250 if (bytes % UNITS_PER_WORD != 0
2251 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2252 ? !BYTES_BIG_ENDIAN
2253 : BYTES_BIG_ENDIAN))
2254 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2255 * BITS_PER_UNIT));
2256
2257 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2258 dst_words = XALLOCAVEC (rtx, n_regs);
2259 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2260
2261 /* Copy the structure BITSIZE bits at a time. */
2262 for (bitpos = 0, xbitpos = padding_correction;
2263 bitpos < bytes * BITS_PER_UNIT;
2264 bitpos += bitsize, xbitpos += bitsize)
2265 {
2266 /* We need a new destination pseudo each time xbitpos is
2267 on a word boundary and when xbitpos == padding_correction
2268 (the first time through). */
2269 if (xbitpos % BITS_PER_WORD == 0
2270 || xbitpos == padding_correction)
2271 {
2272 /* Generate an appropriate register. */
2273 dst_word = gen_reg_rtx (word_mode);
2274 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2275
2276 /* Clear the destination before we move anything into it. */
2277 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2278 }
2279
2280 /* We need a new source operand each time bitpos is on a word
2281 boundary. */
2282 if (bitpos % BITS_PER_WORD == 0)
2283 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2284
2285 /* Use bitpos for the source extraction (left justified) and
2286 xbitpos for the destination store (right justified). */
2287 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2288 0, 0, word_mode,
2289 extract_bit_field (src_word, bitsize,
2290 bitpos % BITS_PER_WORD, 1,
2291 NULL_RTX, word_mode, word_mode));
2292 }
2293
2294 if (mode == BLKmode)
2295 {
2296 /* Find the smallest integer mode large enough to hold the
2297 entire structure. */
2298 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2299 mode != VOIDmode;
2300 mode = GET_MODE_WIDER_MODE (mode))
2301 /* Have we found a large enough mode? */
2302 if (GET_MODE_SIZE (mode) >= bytes)
2303 break;
2304
2305 /* A suitable mode should have been found. */
2306 gcc_assert (mode != VOIDmode);
2307 }
2308
2309 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2310 dst_mode = word_mode;
2311 else
2312 dst_mode = mode;
2313 dst = gen_reg_rtx (dst_mode);
2314
2315 for (i = 0; i < n_regs; i++)
2316 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2317
2318 if (mode != dst_mode)
2319 dst = gen_lowpart (mode, dst);
2320
2321 return dst;
2322 }
2323
2324 /* Add a USE expression for REG to the (possibly empty) list pointed
2325 to by CALL_FUSAGE. REG must denote a hard register. */
2326
2327 void
2328 use_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode)
2329 {
2330 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2331
2332 *call_fusage
2333 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2334 }
2335
2336 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2337 starting at REGNO. All of these registers must be hard registers. */
2338
2339 void
2340 use_regs (rtx *call_fusage, int regno, int nregs)
2341 {
2342 int i;
2343
2344 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2345
2346 for (i = 0; i < nregs; i++)
2347 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2348 }
2349
2350 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2351 PARALLEL REGS. This is for calls that pass values in multiple
2352 non-contiguous locations. The Irix 6 ABI has examples of this. */
2353
2354 void
2355 use_group_regs (rtx *call_fusage, rtx regs)
2356 {
2357 int i;
2358
2359 for (i = 0; i < XVECLEN (regs, 0); i++)
2360 {
2361 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2362
2363 /* A NULL entry means the parameter goes both on the stack and in
2364 registers. This can also be a MEM for targets that pass values
2365 partially on the stack and partially in registers. */
2366 if (reg != 0 && REG_P (reg))
2367 use_reg (call_fusage, reg);
2368 }
2369 }
2370
2371 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2372 assigment and the code of the expresion on the RHS is CODE. Return
2373 NULL otherwise. */
2374
2375 static gimple
2376 get_def_for_expr (tree name, enum tree_code code)
2377 {
2378 gimple def_stmt;
2379
2380 if (TREE_CODE (name) != SSA_NAME)
2381 return NULL;
2382
2383 def_stmt = get_gimple_for_ssa_name (name);
2384 if (!def_stmt
2385 || gimple_assign_rhs_code (def_stmt) != code)
2386 return NULL;
2387
2388 return def_stmt;
2389 }
2390
2391 #ifdef HAVE_conditional_move
2392 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2393 assigment and the class of the expresion on the RHS is CLASS. Return
2394 NULL otherwise. */
2395
2396 static gimple
2397 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2398 {
2399 gimple def_stmt;
2400
2401 if (TREE_CODE (name) != SSA_NAME)
2402 return NULL;
2403
2404 def_stmt = get_gimple_for_ssa_name (name);
2405 if (!def_stmt
2406 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2407 return NULL;
2408
2409 return def_stmt;
2410 }
2411 #endif
2412 \f
2413
2414 /* Determine whether the LEN bytes generated by CONSTFUN can be
2415 stored to memory using several move instructions. CONSTFUNDATA is
2416 a pointer which will be passed as argument in every CONSTFUN call.
2417 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2418 a memset operation and false if it's a copy of a constant string.
2419 Return nonzero if a call to store_by_pieces should succeed. */
2420
2421 int
2422 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2423 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2424 void *constfundata, unsigned int align, bool memsetp)
2425 {
2426 unsigned HOST_WIDE_INT l;
2427 unsigned int max_size;
2428 HOST_WIDE_INT offset = 0;
2429 enum machine_mode mode;
2430 enum insn_code icode;
2431 int reverse;
2432 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2433 rtx cst ATTRIBUTE_UNUSED;
2434
2435 if (len == 0)
2436 return 1;
2437
2438 if (! (memsetp
2439 ? SET_BY_PIECES_P (len, align)
2440 : STORE_BY_PIECES_P (len, align)))
2441 return 0;
2442
2443 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2444
2445 /* We would first store what we can in the largest integer mode, then go to
2446 successively smaller modes. */
2447
2448 for (reverse = 0;
2449 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2450 reverse++)
2451 {
2452 l = len;
2453 max_size = STORE_MAX_PIECES + 1;
2454 while (max_size > 1 && l > 0)
2455 {
2456 mode = widest_int_mode_for_size (max_size);
2457
2458 if (mode == VOIDmode)
2459 break;
2460
2461 icode = optab_handler (mov_optab, mode);
2462 if (icode != CODE_FOR_nothing
2463 && align >= GET_MODE_ALIGNMENT (mode))
2464 {
2465 unsigned int size = GET_MODE_SIZE (mode);
2466
2467 while (l >= size)
2468 {
2469 if (reverse)
2470 offset -= size;
2471
2472 cst = (*constfun) (constfundata, offset, mode);
2473 if (!targetm.legitimate_constant_p (mode, cst))
2474 return 0;
2475
2476 if (!reverse)
2477 offset += size;
2478
2479 l -= size;
2480 }
2481 }
2482
2483 max_size = GET_MODE_SIZE (mode);
2484 }
2485
2486 /* The code above should have handled everything. */
2487 gcc_assert (!l);
2488 }
2489
2490 return 1;
2491 }
2492
2493 /* Generate several move instructions to store LEN bytes generated by
2494 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2495 pointer which will be passed as argument in every CONSTFUN call.
2496 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2497 a memset operation and false if it's a copy of a constant string.
2498 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2499 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2500 stpcpy. */
2501
2502 rtx
2503 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2504 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2505 void *constfundata, unsigned int align, bool memsetp, int endp)
2506 {
2507 enum machine_mode to_addr_mode = get_address_mode (to);
2508 struct store_by_pieces_d data;
2509
2510 if (len == 0)
2511 {
2512 gcc_assert (endp != 2);
2513 return to;
2514 }
2515
2516 gcc_assert (memsetp
2517 ? SET_BY_PIECES_P (len, align)
2518 : STORE_BY_PIECES_P (len, align));
2519 data.constfun = constfun;
2520 data.constfundata = constfundata;
2521 data.len = len;
2522 data.to = to;
2523 store_by_pieces_1 (&data, align);
2524 if (endp)
2525 {
2526 rtx to1;
2527
2528 gcc_assert (!data.reverse);
2529 if (data.autinc_to)
2530 {
2531 if (endp == 2)
2532 {
2533 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2534 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2535 else
2536 data.to_addr = copy_to_mode_reg (to_addr_mode,
2537 plus_constant (to_addr_mode,
2538 data.to_addr,
2539 -1));
2540 }
2541 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2542 data.offset);
2543 }
2544 else
2545 {
2546 if (endp == 2)
2547 --data.offset;
2548 to1 = adjust_address (data.to, QImode, data.offset);
2549 }
2550 return to1;
2551 }
2552 else
2553 return data.to;
2554 }
2555
2556 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2557 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2558
2559 static void
2560 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2561 {
2562 struct store_by_pieces_d data;
2563
2564 if (len == 0)
2565 return;
2566
2567 data.constfun = clear_by_pieces_1;
2568 data.constfundata = NULL;
2569 data.len = len;
2570 data.to = to;
2571 store_by_pieces_1 (&data, align);
2572 }
2573
2574 /* Callback routine for clear_by_pieces.
2575 Return const0_rtx unconditionally. */
2576
2577 static rtx
2578 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2579 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2580 enum machine_mode mode ATTRIBUTE_UNUSED)
2581 {
2582 return const0_rtx;
2583 }
2584
2585 /* Subroutine of clear_by_pieces and store_by_pieces.
2586 Generate several move instructions to store LEN bytes of block TO. (A MEM
2587 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2588
2589 static void
2590 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2591 unsigned int align ATTRIBUTE_UNUSED)
2592 {
2593 enum machine_mode to_addr_mode = get_address_mode (data->to);
2594 rtx to_addr = XEXP (data->to, 0);
2595 unsigned int max_size = STORE_MAX_PIECES + 1;
2596 enum insn_code icode;
2597
2598 data->offset = 0;
2599 data->to_addr = to_addr;
2600 data->autinc_to
2601 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2602 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2603
2604 data->explicit_inc_to = 0;
2605 data->reverse
2606 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2607 if (data->reverse)
2608 data->offset = data->len;
2609
2610 /* If storing requires more than two move insns,
2611 copy addresses to registers (to make displacements shorter)
2612 and use post-increment if available. */
2613 if (!data->autinc_to
2614 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2615 {
2616 /* Determine the main mode we'll be using.
2617 MODE might not be used depending on the definitions of the
2618 USE_* macros below. */
2619 enum machine_mode mode ATTRIBUTE_UNUSED
2620 = widest_int_mode_for_size (max_size);
2621
2622 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2623 {
2624 data->to_addr = copy_to_mode_reg (to_addr_mode,
2625 plus_constant (to_addr_mode,
2626 to_addr,
2627 data->len));
2628 data->autinc_to = 1;
2629 data->explicit_inc_to = -1;
2630 }
2631
2632 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2633 && ! data->autinc_to)
2634 {
2635 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2636 data->autinc_to = 1;
2637 data->explicit_inc_to = 1;
2638 }
2639
2640 if ( !data->autinc_to && CONSTANT_P (to_addr))
2641 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2642 }
2643
2644 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2645
2646 /* First store what we can in the largest integer mode, then go to
2647 successively smaller modes. */
2648
2649 while (max_size > 1 && data->len > 0)
2650 {
2651 enum machine_mode mode = widest_int_mode_for_size (max_size);
2652
2653 if (mode == VOIDmode)
2654 break;
2655
2656 icode = optab_handler (mov_optab, mode);
2657 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2658 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2659
2660 max_size = GET_MODE_SIZE (mode);
2661 }
2662
2663 /* The code above should have handled everything. */
2664 gcc_assert (!data->len);
2665 }
2666
2667 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2668 with move instructions for mode MODE. GENFUN is the gen_... function
2669 to make a move insn for that mode. DATA has all the other info. */
2670
2671 static void
2672 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2673 struct store_by_pieces_d *data)
2674 {
2675 unsigned int size = GET_MODE_SIZE (mode);
2676 rtx to1, cst;
2677
2678 while (data->len >= size)
2679 {
2680 if (data->reverse)
2681 data->offset -= size;
2682
2683 if (data->autinc_to)
2684 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2685 data->offset);
2686 else
2687 to1 = adjust_address (data->to, mode, data->offset);
2688
2689 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2690 emit_insn (gen_add2_insn (data->to_addr,
2691 gen_int_mode (-(HOST_WIDE_INT) size,
2692 GET_MODE (data->to_addr))));
2693
2694 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2695 emit_insn ((*genfun) (to1, cst));
2696
2697 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2698 emit_insn (gen_add2_insn (data->to_addr,
2699 gen_int_mode (size,
2700 GET_MODE (data->to_addr))));
2701
2702 if (! data->reverse)
2703 data->offset += size;
2704
2705 data->len -= size;
2706 }
2707 }
2708 \f
2709 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2710 its length in bytes. */
2711
2712 rtx
2713 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2714 unsigned int expected_align, HOST_WIDE_INT expected_size)
2715 {
2716 enum machine_mode mode = GET_MODE (object);
2717 unsigned int align;
2718
2719 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2720
2721 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2722 just move a zero. Otherwise, do this a piece at a time. */
2723 if (mode != BLKmode
2724 && CONST_INT_P (size)
2725 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2726 {
2727 rtx zero = CONST0_RTX (mode);
2728 if (zero != NULL)
2729 {
2730 emit_move_insn (object, zero);
2731 return NULL;
2732 }
2733
2734 if (COMPLEX_MODE_P (mode))
2735 {
2736 zero = CONST0_RTX (GET_MODE_INNER (mode));
2737 if (zero != NULL)
2738 {
2739 write_complex_part (object, zero, 0);
2740 write_complex_part (object, zero, 1);
2741 return NULL;
2742 }
2743 }
2744 }
2745
2746 if (size == const0_rtx)
2747 return NULL;
2748
2749 align = MEM_ALIGN (object);
2750
2751 if (CONST_INT_P (size)
2752 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2753 clear_by_pieces (object, INTVAL (size), align);
2754 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2755 expected_align, expected_size))
2756 ;
2757 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2758 return set_storage_via_libcall (object, size, const0_rtx,
2759 method == BLOCK_OP_TAILCALL);
2760 else
2761 gcc_unreachable ();
2762
2763 return NULL;
2764 }
2765
2766 rtx
2767 clear_storage (rtx object, rtx size, enum block_op_methods method)
2768 {
2769 return clear_storage_hints (object, size, method, 0, -1);
2770 }
2771
2772
2773 /* A subroutine of clear_storage. Expand a call to memset.
2774 Return the return value of memset, 0 otherwise. */
2775
2776 rtx
2777 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2778 {
2779 tree call_expr, fn, object_tree, size_tree, val_tree;
2780 enum machine_mode size_mode;
2781 rtx retval;
2782
2783 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2784 place those into new pseudos into a VAR_DECL and use them later. */
2785
2786 object = copy_addr_to_reg (XEXP (object, 0));
2787
2788 size_mode = TYPE_MODE (sizetype);
2789 size = convert_to_mode (size_mode, size, 1);
2790 size = copy_to_mode_reg (size_mode, size);
2791
2792 /* It is incorrect to use the libcall calling conventions to call
2793 memset in this context. This could be a user call to memset and
2794 the user may wish to examine the return value from memset. For
2795 targets where libcalls and normal calls have different conventions
2796 for returning pointers, we could end up generating incorrect code. */
2797
2798 object_tree = make_tree (ptr_type_node, object);
2799 if (!CONST_INT_P (val))
2800 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2801 size_tree = make_tree (sizetype, size);
2802 val_tree = make_tree (integer_type_node, val);
2803
2804 fn = clear_storage_libcall_fn (true);
2805 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2806 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2807
2808 retval = expand_normal (call_expr);
2809
2810 return retval;
2811 }
2812
2813 /* A subroutine of set_storage_via_libcall. Create the tree node
2814 for the function we use for block clears. */
2815
2816 tree block_clear_fn;
2817
2818 void
2819 init_block_clear_fn (const char *asmspec)
2820 {
2821 if (!block_clear_fn)
2822 {
2823 tree fn, args;
2824
2825 fn = get_identifier ("memset");
2826 args = build_function_type_list (ptr_type_node, ptr_type_node,
2827 integer_type_node, sizetype,
2828 NULL_TREE);
2829
2830 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2831 DECL_EXTERNAL (fn) = 1;
2832 TREE_PUBLIC (fn) = 1;
2833 DECL_ARTIFICIAL (fn) = 1;
2834 TREE_NOTHROW (fn) = 1;
2835 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2836 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2837
2838 block_clear_fn = fn;
2839 }
2840
2841 if (asmspec)
2842 set_user_assembler_name (block_clear_fn, asmspec);
2843 }
2844
2845 static tree
2846 clear_storage_libcall_fn (int for_call)
2847 {
2848 static bool emitted_extern;
2849
2850 if (!block_clear_fn)
2851 init_block_clear_fn (NULL);
2852
2853 if (for_call && !emitted_extern)
2854 {
2855 emitted_extern = true;
2856 make_decl_rtl (block_clear_fn);
2857 }
2858
2859 return block_clear_fn;
2860 }
2861 \f
2862 /* Expand a setmem pattern; return true if successful. */
2863
2864 bool
2865 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2866 unsigned int expected_align, HOST_WIDE_INT expected_size)
2867 {
2868 /* Try the most limited insn first, because there's no point
2869 including more than one in the machine description unless
2870 the more limited one has some advantage. */
2871
2872 enum machine_mode mode;
2873
2874 if (expected_align < align)
2875 expected_align = align;
2876
2877 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2878 mode = GET_MODE_WIDER_MODE (mode))
2879 {
2880 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2881
2882 if (code != CODE_FOR_nothing
2883 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2884 here because if SIZE is less than the mode mask, as it is
2885 returned by the macro, it will definitely be less than the
2886 actual mode mask. Since SIZE is within the Pmode address
2887 space, we limit MODE to Pmode. */
2888 && ((CONST_INT_P (size)
2889 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2890 <= (GET_MODE_MASK (mode) >> 1)))
2891 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2892 {
2893 struct expand_operand ops[6];
2894 unsigned int nops;
2895
2896 nops = insn_data[(int) code].n_generator_args;
2897 gcc_assert (nops == 4 || nops == 6);
2898
2899 create_fixed_operand (&ops[0], object);
2900 /* The check above guarantees that this size conversion is valid. */
2901 create_convert_operand_to (&ops[1], size, mode, true);
2902 create_convert_operand_from (&ops[2], val, byte_mode, true);
2903 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2904 if (nops == 6)
2905 {
2906 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2907 create_integer_operand (&ops[5], expected_size);
2908 }
2909 if (maybe_expand_insn (code, nops, ops))
2910 return true;
2911 }
2912 }
2913
2914 return false;
2915 }
2916
2917 \f
2918 /* Write to one of the components of the complex value CPLX. Write VAL to
2919 the real part if IMAG_P is false, and the imaginary part if its true. */
2920
2921 static void
2922 write_complex_part (rtx cplx, rtx val, bool imag_p)
2923 {
2924 enum machine_mode cmode;
2925 enum machine_mode imode;
2926 unsigned ibitsize;
2927
2928 if (GET_CODE (cplx) == CONCAT)
2929 {
2930 emit_move_insn (XEXP (cplx, imag_p), val);
2931 return;
2932 }
2933
2934 cmode = GET_MODE (cplx);
2935 imode = GET_MODE_INNER (cmode);
2936 ibitsize = GET_MODE_BITSIZE (imode);
2937
2938 /* For MEMs simplify_gen_subreg may generate an invalid new address
2939 because, e.g., the original address is considered mode-dependent
2940 by the target, which restricts simplify_subreg from invoking
2941 adjust_address_nv. Instead of preparing fallback support for an
2942 invalid address, we call adjust_address_nv directly. */
2943 if (MEM_P (cplx))
2944 {
2945 emit_move_insn (adjust_address_nv (cplx, imode,
2946 imag_p ? GET_MODE_SIZE (imode) : 0),
2947 val);
2948 return;
2949 }
2950
2951 /* If the sub-object is at least word sized, then we know that subregging
2952 will work. This special case is important, since store_bit_field
2953 wants to operate on integer modes, and there's rarely an OImode to
2954 correspond to TCmode. */
2955 if (ibitsize >= BITS_PER_WORD
2956 /* For hard regs we have exact predicates. Assume we can split
2957 the original object if it spans an even number of hard regs.
2958 This special case is important for SCmode on 64-bit platforms
2959 where the natural size of floating-point regs is 32-bit. */
2960 || (REG_P (cplx)
2961 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2962 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2963 {
2964 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2965 imag_p ? GET_MODE_SIZE (imode) : 0);
2966 if (part)
2967 {
2968 emit_move_insn (part, val);
2969 return;
2970 }
2971 else
2972 /* simplify_gen_subreg may fail for sub-word MEMs. */
2973 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2974 }
2975
2976 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
2977 }
2978
2979 /* Extract one of the components of the complex value CPLX. Extract the
2980 real part if IMAG_P is false, and the imaginary part if it's true. */
2981
2982 static rtx
2983 read_complex_part (rtx cplx, bool imag_p)
2984 {
2985 enum machine_mode cmode, imode;
2986 unsigned ibitsize;
2987
2988 if (GET_CODE (cplx) == CONCAT)
2989 return XEXP (cplx, imag_p);
2990
2991 cmode = GET_MODE (cplx);
2992 imode = GET_MODE_INNER (cmode);
2993 ibitsize = GET_MODE_BITSIZE (imode);
2994
2995 /* Special case reads from complex constants that got spilled to memory. */
2996 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2997 {
2998 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2999 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3000 {
3001 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3002 if (CONSTANT_CLASS_P (part))
3003 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3004 }
3005 }
3006
3007 /* For MEMs simplify_gen_subreg may generate an invalid new address
3008 because, e.g., the original address is considered mode-dependent
3009 by the target, which restricts simplify_subreg from invoking
3010 adjust_address_nv. Instead of preparing fallback support for an
3011 invalid address, we call adjust_address_nv directly. */
3012 if (MEM_P (cplx))
3013 return adjust_address_nv (cplx, imode,
3014 imag_p ? GET_MODE_SIZE (imode) : 0);
3015
3016 /* If the sub-object is at least word sized, then we know that subregging
3017 will work. This special case is important, since extract_bit_field
3018 wants to operate on integer modes, and there's rarely an OImode to
3019 correspond to TCmode. */
3020 if (ibitsize >= BITS_PER_WORD
3021 /* For hard regs we have exact predicates. Assume we can split
3022 the original object if it spans an even number of hard regs.
3023 This special case is important for SCmode on 64-bit platforms
3024 where the natural size of floating-point regs is 32-bit. */
3025 || (REG_P (cplx)
3026 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3027 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3028 {
3029 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3030 imag_p ? GET_MODE_SIZE (imode) : 0);
3031 if (ret)
3032 return ret;
3033 else
3034 /* simplify_gen_subreg may fail for sub-word MEMs. */
3035 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3036 }
3037
3038 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3039 true, NULL_RTX, imode, imode);
3040 }
3041 \f
3042 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3043 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3044 represented in NEW_MODE. If FORCE is true, this will never happen, as
3045 we'll force-create a SUBREG if needed. */
3046
3047 static rtx
3048 emit_move_change_mode (enum machine_mode new_mode,
3049 enum machine_mode old_mode, rtx x, bool force)
3050 {
3051 rtx ret;
3052
3053 if (push_operand (x, GET_MODE (x)))
3054 {
3055 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3056 MEM_COPY_ATTRIBUTES (ret, x);
3057 }
3058 else if (MEM_P (x))
3059 {
3060 /* We don't have to worry about changing the address since the
3061 size in bytes is supposed to be the same. */
3062 if (reload_in_progress)
3063 {
3064 /* Copy the MEM to change the mode and move any
3065 substitutions from the old MEM to the new one. */
3066 ret = adjust_address_nv (x, new_mode, 0);
3067 copy_replacements (x, ret);
3068 }
3069 else
3070 ret = adjust_address (x, new_mode, 0);
3071 }
3072 else
3073 {
3074 /* Note that we do want simplify_subreg's behavior of validating
3075 that the new mode is ok for a hard register. If we were to use
3076 simplify_gen_subreg, we would create the subreg, but would
3077 probably run into the target not being able to implement it. */
3078 /* Except, of course, when FORCE is true, when this is exactly what
3079 we want. Which is needed for CCmodes on some targets. */
3080 if (force)
3081 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3082 else
3083 ret = simplify_subreg (new_mode, x, old_mode, 0);
3084 }
3085
3086 return ret;
3087 }
3088
3089 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3090 an integer mode of the same size as MODE. Returns the instruction
3091 emitted, or NULL if such a move could not be generated. */
3092
3093 static rtx
3094 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
3095 {
3096 enum machine_mode imode;
3097 enum insn_code code;
3098
3099 /* There must exist a mode of the exact size we require. */
3100 imode = int_mode_for_mode (mode);
3101 if (imode == BLKmode)
3102 return NULL_RTX;
3103
3104 /* The target must support moves in this mode. */
3105 code = optab_handler (mov_optab, imode);
3106 if (code == CODE_FOR_nothing)
3107 return NULL_RTX;
3108
3109 x = emit_move_change_mode (imode, mode, x, force);
3110 if (x == NULL_RTX)
3111 return NULL_RTX;
3112 y = emit_move_change_mode (imode, mode, y, force);
3113 if (y == NULL_RTX)
3114 return NULL_RTX;
3115 return emit_insn (GEN_FCN (code) (x, y));
3116 }
3117
3118 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3119 Return an equivalent MEM that does not use an auto-increment. */
3120
3121 static rtx
3122 emit_move_resolve_push (enum machine_mode mode, rtx x)
3123 {
3124 enum rtx_code code = GET_CODE (XEXP (x, 0));
3125 HOST_WIDE_INT adjust;
3126 rtx temp;
3127
3128 adjust = GET_MODE_SIZE (mode);
3129 #ifdef PUSH_ROUNDING
3130 adjust = PUSH_ROUNDING (adjust);
3131 #endif
3132 if (code == PRE_DEC || code == POST_DEC)
3133 adjust = -adjust;
3134 else if (code == PRE_MODIFY || code == POST_MODIFY)
3135 {
3136 rtx expr = XEXP (XEXP (x, 0), 1);
3137 HOST_WIDE_INT val;
3138
3139 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3140 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3141 val = INTVAL (XEXP (expr, 1));
3142 if (GET_CODE (expr) == MINUS)
3143 val = -val;
3144 gcc_assert (adjust == val || adjust == -val);
3145 adjust = val;
3146 }
3147
3148 /* Do not use anti_adjust_stack, since we don't want to update
3149 stack_pointer_delta. */
3150 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3151 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3152 0, OPTAB_LIB_WIDEN);
3153 if (temp != stack_pointer_rtx)
3154 emit_move_insn (stack_pointer_rtx, temp);
3155
3156 switch (code)
3157 {
3158 case PRE_INC:
3159 case PRE_DEC:
3160 case PRE_MODIFY:
3161 temp = stack_pointer_rtx;
3162 break;
3163 case POST_INC:
3164 case POST_DEC:
3165 case POST_MODIFY:
3166 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3167 break;
3168 default:
3169 gcc_unreachable ();
3170 }
3171
3172 return replace_equiv_address (x, temp);
3173 }
3174
3175 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3176 X is known to satisfy push_operand, and MODE is known to be complex.
3177 Returns the last instruction emitted. */
3178
3179 rtx
3180 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3181 {
3182 enum machine_mode submode = GET_MODE_INNER (mode);
3183 bool imag_first;
3184
3185 #ifdef PUSH_ROUNDING
3186 unsigned int submodesize = GET_MODE_SIZE (submode);
3187
3188 /* In case we output to the stack, but the size is smaller than the
3189 machine can push exactly, we need to use move instructions. */
3190 if (PUSH_ROUNDING (submodesize) != submodesize)
3191 {
3192 x = emit_move_resolve_push (mode, x);
3193 return emit_move_insn (x, y);
3194 }
3195 #endif
3196
3197 /* Note that the real part always precedes the imag part in memory
3198 regardless of machine's endianness. */
3199 switch (GET_CODE (XEXP (x, 0)))
3200 {
3201 case PRE_DEC:
3202 case POST_DEC:
3203 imag_first = true;
3204 break;
3205 case PRE_INC:
3206 case POST_INC:
3207 imag_first = false;
3208 break;
3209 default:
3210 gcc_unreachable ();
3211 }
3212
3213 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3214 read_complex_part (y, imag_first));
3215 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3216 read_complex_part (y, !imag_first));
3217 }
3218
3219 /* A subroutine of emit_move_complex. Perform the move from Y to X
3220 via two moves of the parts. Returns the last instruction emitted. */
3221
3222 rtx
3223 emit_move_complex_parts (rtx x, rtx y)
3224 {
3225 /* Show the output dies here. This is necessary for SUBREGs
3226 of pseudos since we cannot track their lifetimes correctly;
3227 hard regs shouldn't appear here except as return values. */
3228 if (!reload_completed && !reload_in_progress
3229 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3230 emit_clobber (x);
3231
3232 write_complex_part (x, read_complex_part (y, false), false);
3233 write_complex_part (x, read_complex_part (y, true), true);
3234
3235 return get_last_insn ();
3236 }
3237
3238 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3239 MODE is known to be complex. Returns the last instruction emitted. */
3240
3241 static rtx
3242 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3243 {
3244 bool try_int;
3245
3246 /* Need to take special care for pushes, to maintain proper ordering
3247 of the data, and possibly extra padding. */
3248 if (push_operand (x, mode))
3249 return emit_move_complex_push (mode, x, y);
3250
3251 /* See if we can coerce the target into moving both values at once, except
3252 for floating point where we favor moving as parts if this is easy. */
3253 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3254 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3255 && !(REG_P (x)
3256 && HARD_REGISTER_P (x)
3257 && hard_regno_nregs[REGNO (x)][mode] == 1)
3258 && !(REG_P (y)
3259 && HARD_REGISTER_P (y)
3260 && hard_regno_nregs[REGNO (y)][mode] == 1))
3261 try_int = false;
3262 /* Not possible if the values are inherently not adjacent. */
3263 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3264 try_int = false;
3265 /* Is possible if both are registers (or subregs of registers). */
3266 else if (register_operand (x, mode) && register_operand (y, mode))
3267 try_int = true;
3268 /* If one of the operands is a memory, and alignment constraints
3269 are friendly enough, we may be able to do combined memory operations.
3270 We do not attempt this if Y is a constant because that combination is
3271 usually better with the by-parts thing below. */
3272 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3273 && (!STRICT_ALIGNMENT
3274 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3275 try_int = true;
3276 else
3277 try_int = false;
3278
3279 if (try_int)
3280 {
3281 rtx ret;
3282
3283 /* For memory to memory moves, optimal behavior can be had with the
3284 existing block move logic. */
3285 if (MEM_P (x) && MEM_P (y))
3286 {
3287 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3288 BLOCK_OP_NO_LIBCALL);
3289 return get_last_insn ();
3290 }
3291
3292 ret = emit_move_via_integer (mode, x, y, true);
3293 if (ret)
3294 return ret;
3295 }
3296
3297 return emit_move_complex_parts (x, y);
3298 }
3299
3300 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3301 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3302
3303 static rtx
3304 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3305 {
3306 rtx ret;
3307
3308 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3309 if (mode != CCmode)
3310 {
3311 enum insn_code code = optab_handler (mov_optab, CCmode);
3312 if (code != CODE_FOR_nothing)
3313 {
3314 x = emit_move_change_mode (CCmode, mode, x, true);
3315 y = emit_move_change_mode (CCmode, mode, y, true);
3316 return emit_insn (GEN_FCN (code) (x, y));
3317 }
3318 }
3319
3320 /* Otherwise, find the MODE_INT mode of the same width. */
3321 ret = emit_move_via_integer (mode, x, y, false);
3322 gcc_assert (ret != NULL);
3323 return ret;
3324 }
3325
3326 /* Return true if word I of OP lies entirely in the
3327 undefined bits of a paradoxical subreg. */
3328
3329 static bool
3330 undefined_operand_subword_p (const_rtx op, int i)
3331 {
3332 enum machine_mode innermode, innermostmode;
3333 int offset;
3334 if (GET_CODE (op) != SUBREG)
3335 return false;
3336 innermode = GET_MODE (op);
3337 innermostmode = GET_MODE (SUBREG_REG (op));
3338 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3339 /* The SUBREG_BYTE represents offset, as if the value were stored in
3340 memory, except for a paradoxical subreg where we define
3341 SUBREG_BYTE to be 0; undo this exception as in
3342 simplify_subreg. */
3343 if (SUBREG_BYTE (op) == 0
3344 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3345 {
3346 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3347 if (WORDS_BIG_ENDIAN)
3348 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3349 if (BYTES_BIG_ENDIAN)
3350 offset += difference % UNITS_PER_WORD;
3351 }
3352 if (offset >= GET_MODE_SIZE (innermostmode)
3353 || offset <= -GET_MODE_SIZE (word_mode))
3354 return true;
3355 return false;
3356 }
3357
3358 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3359 MODE is any multi-word or full-word mode that lacks a move_insn
3360 pattern. Note that you will get better code if you define such
3361 patterns, even if they must turn into multiple assembler instructions. */
3362
3363 static rtx
3364 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3365 {
3366 rtx last_insn = 0;
3367 rtx seq, inner;
3368 bool need_clobber;
3369 int i;
3370
3371 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3372
3373 /* If X is a push on the stack, do the push now and replace
3374 X with a reference to the stack pointer. */
3375 if (push_operand (x, mode))
3376 x = emit_move_resolve_push (mode, x);
3377
3378 /* If we are in reload, see if either operand is a MEM whose address
3379 is scheduled for replacement. */
3380 if (reload_in_progress && MEM_P (x)
3381 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3382 x = replace_equiv_address_nv (x, inner);
3383 if (reload_in_progress && MEM_P (y)
3384 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3385 y = replace_equiv_address_nv (y, inner);
3386
3387 start_sequence ();
3388
3389 need_clobber = false;
3390 for (i = 0;
3391 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3392 i++)
3393 {
3394 rtx xpart = operand_subword (x, i, 1, mode);
3395 rtx ypart;
3396
3397 /* Do not generate code for a move if it would come entirely
3398 from the undefined bits of a paradoxical subreg. */
3399 if (undefined_operand_subword_p (y, i))
3400 continue;
3401
3402 ypart = operand_subword (y, i, 1, mode);
3403
3404 /* If we can't get a part of Y, put Y into memory if it is a
3405 constant. Otherwise, force it into a register. Then we must
3406 be able to get a part of Y. */
3407 if (ypart == 0 && CONSTANT_P (y))
3408 {
3409 y = use_anchored_address (force_const_mem (mode, y));
3410 ypart = operand_subword (y, i, 1, mode);
3411 }
3412 else if (ypart == 0)
3413 ypart = operand_subword_force (y, i, mode);
3414
3415 gcc_assert (xpart && ypart);
3416
3417 need_clobber |= (GET_CODE (xpart) == SUBREG);
3418
3419 last_insn = emit_move_insn (xpart, ypart);
3420 }
3421
3422 seq = get_insns ();
3423 end_sequence ();
3424
3425 /* Show the output dies here. This is necessary for SUBREGs
3426 of pseudos since we cannot track their lifetimes correctly;
3427 hard regs shouldn't appear here except as return values.
3428 We never want to emit such a clobber after reload. */
3429 if (x != y
3430 && ! (reload_in_progress || reload_completed)
3431 && need_clobber != 0)
3432 emit_clobber (x);
3433
3434 emit_insn (seq);
3435
3436 return last_insn;
3437 }
3438
3439 /* Low level part of emit_move_insn.
3440 Called just like emit_move_insn, but assumes X and Y
3441 are basically valid. */
3442
3443 rtx
3444 emit_move_insn_1 (rtx x, rtx y)
3445 {
3446 enum machine_mode mode = GET_MODE (x);
3447 enum insn_code code;
3448
3449 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3450
3451 code = optab_handler (mov_optab, mode);
3452 if (code != CODE_FOR_nothing)
3453 return emit_insn (GEN_FCN (code) (x, y));
3454
3455 /* Expand complex moves by moving real part and imag part. */
3456 if (COMPLEX_MODE_P (mode))
3457 return emit_move_complex (mode, x, y);
3458
3459 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3460 || ALL_FIXED_POINT_MODE_P (mode))
3461 {
3462 rtx result = emit_move_via_integer (mode, x, y, true);
3463
3464 /* If we can't find an integer mode, use multi words. */
3465 if (result)
3466 return result;
3467 else
3468 return emit_move_multi_word (mode, x, y);
3469 }
3470
3471 if (GET_MODE_CLASS (mode) == MODE_CC)
3472 return emit_move_ccmode (mode, x, y);
3473
3474 /* Try using a move pattern for the corresponding integer mode. This is
3475 only safe when simplify_subreg can convert MODE constants into integer
3476 constants. At present, it can only do this reliably if the value
3477 fits within a HOST_WIDE_INT. */
3478 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3479 {
3480 rtx ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3481
3482 if (ret)
3483 {
3484 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3485 return ret;
3486 }
3487 }
3488
3489 return emit_move_multi_word (mode, x, y);
3490 }
3491
3492 /* Generate code to copy Y into X.
3493 Both Y and X must have the same mode, except that
3494 Y can be a constant with VOIDmode.
3495 This mode cannot be BLKmode; use emit_block_move for that.
3496
3497 Return the last instruction emitted. */
3498
3499 rtx
3500 emit_move_insn (rtx x, rtx y)
3501 {
3502 enum machine_mode mode = GET_MODE (x);
3503 rtx y_cst = NULL_RTX;
3504 rtx last_insn, set;
3505
3506 gcc_assert (mode != BLKmode
3507 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3508
3509 if (CONSTANT_P (y))
3510 {
3511 if (optimize
3512 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3513 && (last_insn = compress_float_constant (x, y)))
3514 return last_insn;
3515
3516 y_cst = y;
3517
3518 if (!targetm.legitimate_constant_p (mode, y))
3519 {
3520 y = force_const_mem (mode, y);
3521
3522 /* If the target's cannot_force_const_mem prevented the spill,
3523 assume that the target's move expanders will also take care
3524 of the non-legitimate constant. */
3525 if (!y)
3526 y = y_cst;
3527 else
3528 y = use_anchored_address (y);
3529 }
3530 }
3531
3532 /* If X or Y are memory references, verify that their addresses are valid
3533 for the machine. */
3534 if (MEM_P (x)
3535 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3536 MEM_ADDR_SPACE (x))
3537 && ! push_operand (x, GET_MODE (x))))
3538 x = validize_mem (x);
3539
3540 if (MEM_P (y)
3541 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3542 MEM_ADDR_SPACE (y)))
3543 y = validize_mem (y);
3544
3545 gcc_assert (mode != BLKmode);
3546
3547 last_insn = emit_move_insn_1 (x, y);
3548
3549 if (y_cst && REG_P (x)
3550 && (set = single_set (last_insn)) != NULL_RTX
3551 && SET_DEST (set) == x
3552 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3553 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3554
3555 return last_insn;
3556 }
3557
3558 /* If Y is representable exactly in a narrower mode, and the target can
3559 perform the extension directly from constant or memory, then emit the
3560 move as an extension. */
3561
3562 static rtx
3563 compress_float_constant (rtx x, rtx y)
3564 {
3565 enum machine_mode dstmode = GET_MODE (x);
3566 enum machine_mode orig_srcmode = GET_MODE (y);
3567 enum machine_mode srcmode;
3568 REAL_VALUE_TYPE r;
3569 int oldcost, newcost;
3570 bool speed = optimize_insn_for_speed_p ();
3571
3572 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3573
3574 if (targetm.legitimate_constant_p (dstmode, y))
3575 oldcost = set_src_cost (y, speed);
3576 else
3577 oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3578
3579 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3580 srcmode != orig_srcmode;
3581 srcmode = GET_MODE_WIDER_MODE (srcmode))
3582 {
3583 enum insn_code ic;
3584 rtx trunc_y, last_insn;
3585
3586 /* Skip if the target can't extend this way. */
3587 ic = can_extend_p (dstmode, srcmode, 0);
3588 if (ic == CODE_FOR_nothing)
3589 continue;
3590
3591 /* Skip if the narrowed value isn't exact. */
3592 if (! exact_real_truncate (srcmode, &r))
3593 continue;
3594
3595 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3596
3597 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3598 {
3599 /* Skip if the target needs extra instructions to perform
3600 the extension. */
3601 if (!insn_operand_matches (ic, 1, trunc_y))
3602 continue;
3603 /* This is valid, but may not be cheaper than the original. */
3604 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3605 speed);
3606 if (oldcost < newcost)
3607 continue;
3608 }
3609 else if (float_extend_from_mem[dstmode][srcmode])
3610 {
3611 trunc_y = force_const_mem (srcmode, trunc_y);
3612 /* This is valid, but may not be cheaper than the original. */
3613 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3614 speed);
3615 if (oldcost < newcost)
3616 continue;
3617 trunc_y = validize_mem (trunc_y);
3618 }
3619 else
3620 continue;
3621
3622 /* For CSE's benefit, force the compressed constant pool entry
3623 into a new pseudo. This constant may be used in different modes,
3624 and if not, combine will put things back together for us. */
3625 trunc_y = force_reg (srcmode, trunc_y);
3626 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3627 last_insn = get_last_insn ();
3628
3629 if (REG_P (x))
3630 set_unique_reg_note (last_insn, REG_EQUAL, y);
3631
3632 return last_insn;
3633 }
3634
3635 return NULL_RTX;
3636 }
3637 \f
3638 /* Pushing data onto the stack. */
3639
3640 /* Push a block of length SIZE (perhaps variable)
3641 and return an rtx to address the beginning of the block.
3642 The value may be virtual_outgoing_args_rtx.
3643
3644 EXTRA is the number of bytes of padding to push in addition to SIZE.
3645 BELOW nonzero means this padding comes at low addresses;
3646 otherwise, the padding comes at high addresses. */
3647
3648 rtx
3649 push_block (rtx size, int extra, int below)
3650 {
3651 rtx temp;
3652
3653 size = convert_modes (Pmode, ptr_mode, size, 1);
3654 if (CONSTANT_P (size))
3655 anti_adjust_stack (plus_constant (Pmode, size, extra));
3656 else if (REG_P (size) && extra == 0)
3657 anti_adjust_stack (size);
3658 else
3659 {
3660 temp = copy_to_mode_reg (Pmode, size);
3661 if (extra != 0)
3662 temp = expand_binop (Pmode, add_optab, temp,
3663 gen_int_mode (extra, Pmode),
3664 temp, 0, OPTAB_LIB_WIDEN);
3665 anti_adjust_stack (temp);
3666 }
3667
3668 #ifndef STACK_GROWS_DOWNWARD
3669 if (0)
3670 #else
3671 if (1)
3672 #endif
3673 {
3674 temp = virtual_outgoing_args_rtx;
3675 if (extra != 0 && below)
3676 temp = plus_constant (Pmode, temp, extra);
3677 }
3678 else
3679 {
3680 if (CONST_INT_P (size))
3681 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3682 -INTVAL (size) - (below ? 0 : extra));
3683 else if (extra != 0 && !below)
3684 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3685 negate_rtx (Pmode, plus_constant (Pmode, size,
3686 extra)));
3687 else
3688 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3689 negate_rtx (Pmode, size));
3690 }
3691
3692 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3693 }
3694
3695 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3696
3697 static rtx
3698 mem_autoinc_base (rtx mem)
3699 {
3700 if (MEM_P (mem))
3701 {
3702 rtx addr = XEXP (mem, 0);
3703 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3704 return XEXP (addr, 0);
3705 }
3706 return NULL;
3707 }
3708
3709 /* A utility routine used here, in reload, and in try_split. The insns
3710 after PREV up to and including LAST are known to adjust the stack,
3711 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3712 placing notes as appropriate. PREV may be NULL, indicating the
3713 entire insn sequence prior to LAST should be scanned.
3714
3715 The set of allowed stack pointer modifications is small:
3716 (1) One or more auto-inc style memory references (aka pushes),
3717 (2) One or more addition/subtraction with the SP as destination,
3718 (3) A single move insn with the SP as destination,
3719 (4) A call_pop insn,
3720 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3721
3722 Insns in the sequence that do not modify the SP are ignored,
3723 except for noreturn calls.
3724
3725 The return value is the amount of adjustment that can be trivially
3726 verified, via immediate operand or auto-inc. If the adjustment
3727 cannot be trivially extracted, the return value is INT_MIN. */
3728
3729 HOST_WIDE_INT
3730 find_args_size_adjust (rtx insn)
3731 {
3732 rtx dest, set, pat;
3733 int i;
3734
3735 pat = PATTERN (insn);
3736 set = NULL;
3737
3738 /* Look for a call_pop pattern. */
3739 if (CALL_P (insn))
3740 {
3741 /* We have to allow non-call_pop patterns for the case
3742 of emit_single_push_insn of a TLS address. */
3743 if (GET_CODE (pat) != PARALLEL)
3744 return 0;
3745
3746 /* All call_pop have a stack pointer adjust in the parallel.
3747 The call itself is always first, and the stack adjust is
3748 usually last, so search from the end. */
3749 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3750 {
3751 set = XVECEXP (pat, 0, i);
3752 if (GET_CODE (set) != SET)
3753 continue;
3754 dest = SET_DEST (set);
3755 if (dest == stack_pointer_rtx)
3756 break;
3757 }
3758 /* We'd better have found the stack pointer adjust. */
3759 if (i == 0)
3760 return 0;
3761 /* Fall through to process the extracted SET and DEST
3762 as if it was a standalone insn. */
3763 }
3764 else if (GET_CODE (pat) == SET)
3765 set = pat;
3766 else if ((set = single_set (insn)) != NULL)
3767 ;
3768 else if (GET_CODE (pat) == PARALLEL)
3769 {
3770 /* ??? Some older ports use a parallel with a stack adjust
3771 and a store for a PUSH_ROUNDING pattern, rather than a
3772 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3773 /* ??? See h8300 and m68k, pushqi1. */
3774 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3775 {
3776 set = XVECEXP (pat, 0, i);
3777 if (GET_CODE (set) != SET)
3778 continue;
3779 dest = SET_DEST (set);
3780 if (dest == stack_pointer_rtx)
3781 break;
3782
3783 /* We do not expect an auto-inc of the sp in the parallel. */
3784 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3785 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3786 != stack_pointer_rtx);
3787 }
3788 if (i < 0)
3789 return 0;
3790 }
3791 else
3792 return 0;
3793
3794 dest = SET_DEST (set);
3795
3796 /* Look for direct modifications of the stack pointer. */
3797 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3798 {
3799 /* Look for a trivial adjustment, otherwise assume nothing. */
3800 /* Note that the SPU restore_stack_block pattern refers to
3801 the stack pointer in V4SImode. Consider that non-trivial. */
3802 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3803 && GET_CODE (SET_SRC (set)) == PLUS
3804 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3805 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3806 return INTVAL (XEXP (SET_SRC (set), 1));
3807 /* ??? Reload can generate no-op moves, which will be cleaned
3808 up later. Recognize it and continue searching. */
3809 else if (rtx_equal_p (dest, SET_SRC (set)))
3810 return 0;
3811 else
3812 return HOST_WIDE_INT_MIN;
3813 }
3814 else
3815 {
3816 rtx mem, addr;
3817
3818 /* Otherwise only think about autoinc patterns. */
3819 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3820 {
3821 mem = dest;
3822 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3823 != stack_pointer_rtx);
3824 }
3825 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3826 mem = SET_SRC (set);
3827 else
3828 return 0;
3829
3830 addr = XEXP (mem, 0);
3831 switch (GET_CODE (addr))
3832 {
3833 case PRE_INC:
3834 case POST_INC:
3835 return GET_MODE_SIZE (GET_MODE (mem));
3836 case PRE_DEC:
3837 case POST_DEC:
3838 return -GET_MODE_SIZE (GET_MODE (mem));
3839 case PRE_MODIFY:
3840 case POST_MODIFY:
3841 addr = XEXP (addr, 1);
3842 gcc_assert (GET_CODE (addr) == PLUS);
3843 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3844 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3845 return INTVAL (XEXP (addr, 1));
3846 default:
3847 gcc_unreachable ();
3848 }
3849 }
3850 }
3851
3852 int
3853 fixup_args_size_notes (rtx prev, rtx last, int end_args_size)
3854 {
3855 int args_size = end_args_size;
3856 bool saw_unknown = false;
3857 rtx insn;
3858
3859 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3860 {
3861 HOST_WIDE_INT this_delta;
3862
3863 if (!NONDEBUG_INSN_P (insn))
3864 continue;
3865
3866 this_delta = find_args_size_adjust (insn);
3867 if (this_delta == 0)
3868 {
3869 if (!CALL_P (insn)
3870 || ACCUMULATE_OUTGOING_ARGS
3871 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3872 continue;
3873 }
3874
3875 gcc_assert (!saw_unknown);
3876 if (this_delta == HOST_WIDE_INT_MIN)
3877 saw_unknown = true;
3878
3879 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3880 #ifdef STACK_GROWS_DOWNWARD
3881 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3882 #endif
3883 args_size -= this_delta;
3884 }
3885
3886 return saw_unknown ? INT_MIN : args_size;
3887 }
3888
3889 #ifdef PUSH_ROUNDING
3890 /* Emit single push insn. */
3891
3892 static void
3893 emit_single_push_insn_1 (enum machine_mode mode, rtx x, tree type)
3894 {
3895 rtx dest_addr;
3896 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3897 rtx dest;
3898 enum insn_code icode;
3899
3900 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3901 /* If there is push pattern, use it. Otherwise try old way of throwing
3902 MEM representing push operation to move expander. */
3903 icode = optab_handler (push_optab, mode);
3904 if (icode != CODE_FOR_nothing)
3905 {
3906 struct expand_operand ops[1];
3907
3908 create_input_operand (&ops[0], x, mode);
3909 if (maybe_expand_insn (icode, 1, ops))
3910 return;
3911 }
3912 if (GET_MODE_SIZE (mode) == rounded_size)
3913 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3914 /* If we are to pad downward, adjust the stack pointer first and
3915 then store X into the stack location using an offset. This is
3916 because emit_move_insn does not know how to pad; it does not have
3917 access to type. */
3918 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3919 {
3920 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3921 HOST_WIDE_INT offset;
3922
3923 emit_move_insn (stack_pointer_rtx,
3924 expand_binop (Pmode,
3925 #ifdef STACK_GROWS_DOWNWARD
3926 sub_optab,
3927 #else
3928 add_optab,
3929 #endif
3930 stack_pointer_rtx,
3931 gen_int_mode (rounded_size, Pmode),
3932 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3933
3934 offset = (HOST_WIDE_INT) padding_size;
3935 #ifdef STACK_GROWS_DOWNWARD
3936 if (STACK_PUSH_CODE == POST_DEC)
3937 /* We have already decremented the stack pointer, so get the
3938 previous value. */
3939 offset += (HOST_WIDE_INT) rounded_size;
3940 #else
3941 if (STACK_PUSH_CODE == POST_INC)
3942 /* We have already incremented the stack pointer, so get the
3943 previous value. */
3944 offset -= (HOST_WIDE_INT) rounded_size;
3945 #endif
3946 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3947 gen_int_mode (offset, Pmode));
3948 }
3949 else
3950 {
3951 #ifdef STACK_GROWS_DOWNWARD
3952 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3953 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3954 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
3955 Pmode));
3956 #else
3957 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3958 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3959 gen_int_mode (rounded_size, Pmode));
3960 #endif
3961 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3962 }
3963
3964 dest = gen_rtx_MEM (mode, dest_addr);
3965
3966 if (type != 0)
3967 {
3968 set_mem_attributes (dest, type, 1);
3969
3970 if (flag_optimize_sibling_calls)
3971 /* Function incoming arguments may overlap with sibling call
3972 outgoing arguments and we cannot allow reordering of reads
3973 from function arguments with stores to outgoing arguments
3974 of sibling calls. */
3975 set_mem_alias_set (dest, 0);
3976 }
3977 emit_move_insn (dest, x);
3978 }
3979
3980 /* Emit and annotate a single push insn. */
3981
3982 static void
3983 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3984 {
3985 int delta, old_delta = stack_pointer_delta;
3986 rtx prev = get_last_insn ();
3987 rtx last;
3988
3989 emit_single_push_insn_1 (mode, x, type);
3990
3991 last = get_last_insn ();
3992
3993 /* Notice the common case where we emitted exactly one insn. */
3994 if (PREV_INSN (last) == prev)
3995 {
3996 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
3997 return;
3998 }
3999
4000 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4001 gcc_assert (delta == INT_MIN || delta == old_delta);
4002 }
4003 #endif
4004
4005 /* Generate code to push X onto the stack, assuming it has mode MODE and
4006 type TYPE.
4007 MODE is redundant except when X is a CONST_INT (since they don't
4008 carry mode info).
4009 SIZE is an rtx for the size of data to be copied (in bytes),
4010 needed only if X is BLKmode.
4011
4012 ALIGN (in bits) is maximum alignment we can assume.
4013
4014 If PARTIAL and REG are both nonzero, then copy that many of the first
4015 bytes of X into registers starting with REG, and push the rest of X.
4016 The amount of space pushed is decreased by PARTIAL bytes.
4017 REG must be a hard register in this case.
4018 If REG is zero but PARTIAL is not, take any all others actions for an
4019 argument partially in registers, but do not actually load any
4020 registers.
4021
4022 EXTRA is the amount in bytes of extra space to leave next to this arg.
4023 This is ignored if an argument block has already been allocated.
4024
4025 On a machine that lacks real push insns, ARGS_ADDR is the address of
4026 the bottom of the argument block for this call. We use indexing off there
4027 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4028 argument block has not been preallocated.
4029
4030 ARGS_SO_FAR is the size of args previously pushed for this call.
4031
4032 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4033 for arguments passed in registers. If nonzero, it will be the number
4034 of bytes required. */
4035
4036 void
4037 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
4038 unsigned int align, int partial, rtx reg, int extra,
4039 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4040 rtx alignment_pad)
4041 {
4042 rtx xinner;
4043 enum direction stack_direction
4044 #ifdef STACK_GROWS_DOWNWARD
4045 = downward;
4046 #else
4047 = upward;
4048 #endif
4049
4050 /* Decide where to pad the argument: `downward' for below,
4051 `upward' for above, or `none' for don't pad it.
4052 Default is below for small data on big-endian machines; else above. */
4053 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4054
4055 /* Invert direction if stack is post-decrement.
4056 FIXME: why? */
4057 if (STACK_PUSH_CODE == POST_DEC)
4058 if (where_pad != none)
4059 where_pad = (where_pad == downward ? upward : downward);
4060
4061 xinner = x;
4062
4063 if (mode == BLKmode
4064 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4065 {
4066 /* Copy a block into the stack, entirely or partially. */
4067
4068 rtx temp;
4069 int used;
4070 int offset;
4071 int skip;
4072
4073 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4074 used = partial - offset;
4075
4076 if (mode != BLKmode)
4077 {
4078 /* A value is to be stored in an insufficiently aligned
4079 stack slot; copy via a suitably aligned slot if
4080 necessary. */
4081 size = GEN_INT (GET_MODE_SIZE (mode));
4082 if (!MEM_P (xinner))
4083 {
4084 temp = assign_temp (type, 1, 1);
4085 emit_move_insn (temp, xinner);
4086 xinner = temp;
4087 }
4088 }
4089
4090 gcc_assert (size);
4091
4092 /* USED is now the # of bytes we need not copy to the stack
4093 because registers will take care of them. */
4094
4095 if (partial != 0)
4096 xinner = adjust_address (xinner, BLKmode, used);
4097
4098 /* If the partial register-part of the arg counts in its stack size,
4099 skip the part of stack space corresponding to the registers.
4100 Otherwise, start copying to the beginning of the stack space,
4101 by setting SKIP to 0. */
4102 skip = (reg_parm_stack_space == 0) ? 0 : used;
4103
4104 #ifdef PUSH_ROUNDING
4105 /* Do it with several push insns if that doesn't take lots of insns
4106 and if there is no difficulty with push insns that skip bytes
4107 on the stack for alignment purposes. */
4108 if (args_addr == 0
4109 && PUSH_ARGS
4110 && CONST_INT_P (size)
4111 && skip == 0
4112 && MEM_ALIGN (xinner) >= align
4113 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
4114 /* Here we avoid the case of a structure whose weak alignment
4115 forces many pushes of a small amount of data,
4116 and such small pushes do rounding that causes trouble. */
4117 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4118 || align >= BIGGEST_ALIGNMENT
4119 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4120 == (align / BITS_PER_UNIT)))
4121 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4122 {
4123 /* Push padding now if padding above and stack grows down,
4124 or if padding below and stack grows up.
4125 But if space already allocated, this has already been done. */
4126 if (extra && args_addr == 0
4127 && where_pad != none && where_pad != stack_direction)
4128 anti_adjust_stack (GEN_INT (extra));
4129
4130 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4131 }
4132 else
4133 #endif /* PUSH_ROUNDING */
4134 {
4135 rtx target;
4136
4137 /* Otherwise make space on the stack and copy the data
4138 to the address of that space. */
4139
4140 /* Deduct words put into registers from the size we must copy. */
4141 if (partial != 0)
4142 {
4143 if (CONST_INT_P (size))
4144 size = GEN_INT (INTVAL (size) - used);
4145 else
4146 size = expand_binop (GET_MODE (size), sub_optab, size,
4147 gen_int_mode (used, GET_MODE (size)),
4148 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4149 }
4150
4151 /* Get the address of the stack space.
4152 In this case, we do not deal with EXTRA separately.
4153 A single stack adjust will do. */
4154 if (! args_addr)
4155 {
4156 temp = push_block (size, extra, where_pad == downward);
4157 extra = 0;
4158 }
4159 else if (CONST_INT_P (args_so_far))
4160 temp = memory_address (BLKmode,
4161 plus_constant (Pmode, args_addr,
4162 skip + INTVAL (args_so_far)));
4163 else
4164 temp = memory_address (BLKmode,
4165 plus_constant (Pmode,
4166 gen_rtx_PLUS (Pmode,
4167 args_addr,
4168 args_so_far),
4169 skip));
4170
4171 if (!ACCUMULATE_OUTGOING_ARGS)
4172 {
4173 /* If the source is referenced relative to the stack pointer,
4174 copy it to another register to stabilize it. We do not need
4175 to do this if we know that we won't be changing sp. */
4176
4177 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4178 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4179 temp = copy_to_reg (temp);
4180 }
4181
4182 target = gen_rtx_MEM (BLKmode, temp);
4183
4184 /* We do *not* set_mem_attributes here, because incoming arguments
4185 may overlap with sibling call outgoing arguments and we cannot
4186 allow reordering of reads from function arguments with stores
4187 to outgoing arguments of sibling calls. We do, however, want
4188 to record the alignment of the stack slot. */
4189 /* ALIGN may well be better aligned than TYPE, e.g. due to
4190 PARM_BOUNDARY. Assume the caller isn't lying. */
4191 set_mem_align (target, align);
4192
4193 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4194 }
4195 }
4196 else if (partial > 0)
4197 {
4198 /* Scalar partly in registers. */
4199
4200 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4201 int i;
4202 int not_stack;
4203 /* # bytes of start of argument
4204 that we must make space for but need not store. */
4205 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4206 int args_offset = INTVAL (args_so_far);
4207 int skip;
4208
4209 /* Push padding now if padding above and stack grows down,
4210 or if padding below and stack grows up.
4211 But if space already allocated, this has already been done. */
4212 if (extra && args_addr == 0
4213 && where_pad != none && where_pad != stack_direction)
4214 anti_adjust_stack (GEN_INT (extra));
4215
4216 /* If we make space by pushing it, we might as well push
4217 the real data. Otherwise, we can leave OFFSET nonzero
4218 and leave the space uninitialized. */
4219 if (args_addr == 0)
4220 offset = 0;
4221
4222 /* Now NOT_STACK gets the number of words that we don't need to
4223 allocate on the stack. Convert OFFSET to words too. */
4224 not_stack = (partial - offset) / UNITS_PER_WORD;
4225 offset /= UNITS_PER_WORD;
4226
4227 /* If the partial register-part of the arg counts in its stack size,
4228 skip the part of stack space corresponding to the registers.
4229 Otherwise, start copying to the beginning of the stack space,
4230 by setting SKIP to 0. */
4231 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4232
4233 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4234 x = validize_mem (force_const_mem (mode, x));
4235
4236 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4237 SUBREGs of such registers are not allowed. */
4238 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4239 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4240 x = copy_to_reg (x);
4241
4242 /* Loop over all the words allocated on the stack for this arg. */
4243 /* We can do it by words, because any scalar bigger than a word
4244 has a size a multiple of a word. */
4245 #ifndef PUSH_ARGS_REVERSED
4246 for (i = not_stack; i < size; i++)
4247 #else
4248 for (i = size - 1; i >= not_stack; i--)
4249 #endif
4250 if (i >= not_stack + offset)
4251 emit_push_insn (operand_subword_force (x, i, mode),
4252 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4253 0, args_addr,
4254 GEN_INT (args_offset + ((i - not_stack + skip)
4255 * UNITS_PER_WORD)),
4256 reg_parm_stack_space, alignment_pad);
4257 }
4258 else
4259 {
4260 rtx addr;
4261 rtx dest;
4262
4263 /* Push padding now if padding above and stack grows down,
4264 or if padding below and stack grows up.
4265 But if space already allocated, this has already been done. */
4266 if (extra && args_addr == 0
4267 && where_pad != none && where_pad != stack_direction)
4268 anti_adjust_stack (GEN_INT (extra));
4269
4270 #ifdef PUSH_ROUNDING
4271 if (args_addr == 0 && PUSH_ARGS)
4272 emit_single_push_insn (mode, x, type);
4273 else
4274 #endif
4275 {
4276 if (CONST_INT_P (args_so_far))
4277 addr
4278 = memory_address (mode,
4279 plus_constant (Pmode, args_addr,
4280 INTVAL (args_so_far)));
4281 else
4282 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4283 args_so_far));
4284 dest = gen_rtx_MEM (mode, addr);
4285
4286 /* We do *not* set_mem_attributes here, because incoming arguments
4287 may overlap with sibling call outgoing arguments and we cannot
4288 allow reordering of reads from function arguments with stores
4289 to outgoing arguments of sibling calls. We do, however, want
4290 to record the alignment of the stack slot. */
4291 /* ALIGN may well be better aligned than TYPE, e.g. due to
4292 PARM_BOUNDARY. Assume the caller isn't lying. */
4293 set_mem_align (dest, align);
4294
4295 emit_move_insn (dest, x);
4296 }
4297 }
4298
4299 /* If part should go in registers, copy that part
4300 into the appropriate registers. Do this now, at the end,
4301 since mem-to-mem copies above may do function calls. */
4302 if (partial > 0 && reg != 0)
4303 {
4304 /* Handle calls that pass values in multiple non-contiguous locations.
4305 The Irix 6 ABI has examples of this. */
4306 if (GET_CODE (reg) == PARALLEL)
4307 emit_group_load (reg, x, type, -1);
4308 else
4309 {
4310 gcc_assert (partial % UNITS_PER_WORD == 0);
4311 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4312 }
4313 }
4314
4315 if (extra && args_addr == 0 && where_pad == stack_direction)
4316 anti_adjust_stack (GEN_INT (extra));
4317
4318 if (alignment_pad && args_addr == 0)
4319 anti_adjust_stack (alignment_pad);
4320 }
4321 \f
4322 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4323 operations. */
4324
4325 static rtx
4326 get_subtarget (rtx x)
4327 {
4328 return (optimize
4329 || x == 0
4330 /* Only registers can be subtargets. */
4331 || !REG_P (x)
4332 /* Don't use hard regs to avoid extending their life. */
4333 || REGNO (x) < FIRST_PSEUDO_REGISTER
4334 ? 0 : x);
4335 }
4336
4337 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4338 FIELD is a bitfield. Returns true if the optimization was successful,
4339 and there's nothing else to do. */
4340
4341 static bool
4342 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4343 unsigned HOST_WIDE_INT bitpos,
4344 unsigned HOST_WIDE_INT bitregion_start,
4345 unsigned HOST_WIDE_INT bitregion_end,
4346 enum machine_mode mode1, rtx str_rtx,
4347 tree to, tree src)
4348 {
4349 enum machine_mode str_mode = GET_MODE (str_rtx);
4350 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4351 tree op0, op1;
4352 rtx value, result;
4353 optab binop;
4354 gimple srcstmt;
4355 enum tree_code code;
4356
4357 if (mode1 != VOIDmode
4358 || bitsize >= BITS_PER_WORD
4359 || str_bitsize > BITS_PER_WORD
4360 || TREE_SIDE_EFFECTS (to)
4361 || TREE_THIS_VOLATILE (to))
4362 return false;
4363
4364 STRIP_NOPS (src);
4365 if (TREE_CODE (src) != SSA_NAME)
4366 return false;
4367 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4368 return false;
4369
4370 srcstmt = get_gimple_for_ssa_name (src);
4371 if (!srcstmt
4372 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4373 return false;
4374
4375 code = gimple_assign_rhs_code (srcstmt);
4376
4377 op0 = gimple_assign_rhs1 (srcstmt);
4378
4379 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4380 to find its initialization. Hopefully the initialization will
4381 be from a bitfield load. */
4382 if (TREE_CODE (op0) == SSA_NAME)
4383 {
4384 gimple op0stmt = get_gimple_for_ssa_name (op0);
4385
4386 /* We want to eventually have OP0 be the same as TO, which
4387 should be a bitfield. */
4388 if (!op0stmt
4389 || !is_gimple_assign (op0stmt)
4390 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4391 return false;
4392 op0 = gimple_assign_rhs1 (op0stmt);
4393 }
4394
4395 op1 = gimple_assign_rhs2 (srcstmt);
4396
4397 if (!operand_equal_p (to, op0, 0))
4398 return false;
4399
4400 if (MEM_P (str_rtx))
4401 {
4402 unsigned HOST_WIDE_INT offset1;
4403
4404 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4405 str_mode = word_mode;
4406 str_mode = get_best_mode (bitsize, bitpos,
4407 bitregion_start, bitregion_end,
4408 MEM_ALIGN (str_rtx), str_mode, 0);
4409 if (str_mode == VOIDmode)
4410 return false;
4411 str_bitsize = GET_MODE_BITSIZE (str_mode);
4412
4413 offset1 = bitpos;
4414 bitpos %= str_bitsize;
4415 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4416 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4417 }
4418 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4419 return false;
4420
4421 /* If the bit field covers the whole REG/MEM, store_field
4422 will likely generate better code. */
4423 if (bitsize >= str_bitsize)
4424 return false;
4425
4426 /* We can't handle fields split across multiple entities. */
4427 if (bitpos + bitsize > str_bitsize)
4428 return false;
4429
4430 if (BYTES_BIG_ENDIAN)
4431 bitpos = str_bitsize - bitpos - bitsize;
4432
4433 switch (code)
4434 {
4435 case PLUS_EXPR:
4436 case MINUS_EXPR:
4437 /* For now, just optimize the case of the topmost bitfield
4438 where we don't need to do any masking and also
4439 1 bit bitfields where xor can be used.
4440 We might win by one instruction for the other bitfields
4441 too if insv/extv instructions aren't used, so that
4442 can be added later. */
4443 if (bitpos + bitsize != str_bitsize
4444 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4445 break;
4446
4447 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4448 value = convert_modes (str_mode,
4449 TYPE_MODE (TREE_TYPE (op1)), value,
4450 TYPE_UNSIGNED (TREE_TYPE (op1)));
4451
4452 /* We may be accessing data outside the field, which means
4453 we can alias adjacent data. */
4454 if (MEM_P (str_rtx))
4455 {
4456 str_rtx = shallow_copy_rtx (str_rtx);
4457 set_mem_alias_set (str_rtx, 0);
4458 set_mem_expr (str_rtx, 0);
4459 }
4460
4461 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4462 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4463 {
4464 value = expand_and (str_mode, value, const1_rtx, NULL);
4465 binop = xor_optab;
4466 }
4467 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4468 result = expand_binop (str_mode, binop, str_rtx,
4469 value, str_rtx, 1, OPTAB_WIDEN);
4470 if (result != str_rtx)
4471 emit_move_insn (str_rtx, result);
4472 return true;
4473
4474 case BIT_IOR_EXPR:
4475 case BIT_XOR_EXPR:
4476 if (TREE_CODE (op1) != INTEGER_CST)
4477 break;
4478 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4479 value = convert_modes (str_mode,
4480 TYPE_MODE (TREE_TYPE (op1)), value,
4481 TYPE_UNSIGNED (TREE_TYPE (op1)));
4482
4483 /* We may be accessing data outside the field, which means
4484 we can alias adjacent data. */
4485 if (MEM_P (str_rtx))
4486 {
4487 str_rtx = shallow_copy_rtx (str_rtx);
4488 set_mem_alias_set (str_rtx, 0);
4489 set_mem_expr (str_rtx, 0);
4490 }
4491
4492 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4493 if (bitpos + bitsize != str_bitsize)
4494 {
4495 rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4496 str_mode);
4497 value = expand_and (str_mode, value, mask, NULL_RTX);
4498 }
4499 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4500 result = expand_binop (str_mode, binop, str_rtx,
4501 value, str_rtx, 1, OPTAB_WIDEN);
4502 if (result != str_rtx)
4503 emit_move_insn (str_rtx, result);
4504 return true;
4505
4506 default:
4507 break;
4508 }
4509
4510 return false;
4511 }
4512
4513 /* In the C++ memory model, consecutive bit fields in a structure are
4514 considered one memory location.
4515
4516 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4517 returns the bit range of consecutive bits in which this COMPONENT_REF
4518 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4519 and *OFFSET may be adjusted in the process.
4520
4521 If the access does not need to be restricted, 0 is returned in both
4522 *BITSTART and *BITEND. */
4523
4524 static void
4525 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4526 unsigned HOST_WIDE_INT *bitend,
4527 tree exp,
4528 HOST_WIDE_INT *bitpos,
4529 tree *offset)
4530 {
4531 HOST_WIDE_INT bitoffset;
4532 tree field, repr;
4533
4534 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4535
4536 field = TREE_OPERAND (exp, 1);
4537 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4538 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4539 need to limit the range we can access. */
4540 if (!repr)
4541 {
4542 *bitstart = *bitend = 0;
4543 return;
4544 }
4545
4546 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4547 part of a larger bit field, then the representative does not serve any
4548 useful purpose. This can occur in Ada. */
4549 if (handled_component_p (TREE_OPERAND (exp, 0)))
4550 {
4551 enum machine_mode rmode;
4552 HOST_WIDE_INT rbitsize, rbitpos;
4553 tree roffset;
4554 int unsignedp;
4555 int volatilep = 0;
4556 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4557 &roffset, &rmode, &unsignedp, &volatilep, false);
4558 if ((rbitpos % BITS_PER_UNIT) != 0)
4559 {
4560 *bitstart = *bitend = 0;
4561 return;
4562 }
4563 }
4564
4565 /* Compute the adjustment to bitpos from the offset of the field
4566 relative to the representative. DECL_FIELD_OFFSET of field and
4567 repr are the same by construction if they are not constants,
4568 see finish_bitfield_layout. */
4569 if (host_integerp (DECL_FIELD_OFFSET (field), 1)
4570 && host_integerp (DECL_FIELD_OFFSET (repr), 1))
4571 bitoffset = (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
4572 - tree_low_cst (DECL_FIELD_OFFSET (repr), 1)) * BITS_PER_UNIT;
4573 else
4574 bitoffset = 0;
4575 bitoffset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
4576 - tree_low_cst (DECL_FIELD_BIT_OFFSET (repr), 1));
4577
4578 /* If the adjustment is larger than bitpos, we would have a negative bit
4579 position for the lower bound and this may wreak havoc later. This can
4580 occur only if we have a non-null offset, so adjust offset and bitpos
4581 to make the lower bound non-negative. */
4582 if (bitoffset > *bitpos)
4583 {
4584 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4585
4586 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4587 gcc_assert (*offset != NULL_TREE);
4588
4589 *bitpos += adjust;
4590 *offset
4591 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4592 *bitstart = 0;
4593 }
4594 else
4595 *bitstart = *bitpos - bitoffset;
4596
4597 *bitend = *bitstart + tree_low_cst (DECL_SIZE (repr), 1) - 1;
4598 }
4599
4600 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4601 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4602 DECL_RTL was not set yet, return NORTL. */
4603
4604 static inline bool
4605 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4606 {
4607 if (TREE_CODE (addr) != ADDR_EXPR)
4608 return false;
4609
4610 tree base = TREE_OPERAND (addr, 0);
4611
4612 if (!DECL_P (base)
4613 || TREE_ADDRESSABLE (base)
4614 || DECL_MODE (base) == BLKmode)
4615 return false;
4616
4617 if (!DECL_RTL_SET_P (base))
4618 return nortl;
4619
4620 return (!MEM_P (DECL_RTL (base)));
4621 }
4622
4623 /* Returns true if the MEM_REF REF refers to an object that does not
4624 reside in memory and has non-BLKmode. */
4625
4626 static inline bool
4627 mem_ref_refers_to_non_mem_p (tree ref)
4628 {
4629 tree base = TREE_OPERAND (ref, 0);
4630 return addr_expr_of_non_mem_decl_p_1 (base, false);
4631 }
4632
4633 /* Return TRUE iff OP is an ADDR_EXPR of a DECL that's not
4634 addressable. This is very much like mem_ref_refers_to_non_mem_p,
4635 but instead of the MEM_REF, it takes its base, and it doesn't
4636 assume a DECL is in memory just because its RTL is not set yet. */
4637
4638 bool
4639 addr_expr_of_non_mem_decl_p (tree op)
4640 {
4641 return addr_expr_of_non_mem_decl_p_1 (op, true);
4642 }
4643
4644 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4645 is true, try generating a nontemporal store. */
4646
4647 void
4648 expand_assignment (tree to, tree from, bool nontemporal)
4649 {
4650 rtx to_rtx = 0;
4651 rtx result;
4652 enum machine_mode mode;
4653 unsigned int align;
4654 enum insn_code icode;
4655
4656 /* Don't crash if the lhs of the assignment was erroneous. */
4657 if (TREE_CODE (to) == ERROR_MARK)
4658 {
4659 expand_normal (from);
4660 return;
4661 }
4662
4663 /* Optimize away no-op moves without side-effects. */
4664 if (operand_equal_p (to, from, 0))
4665 return;
4666
4667 /* Handle misaligned stores. */
4668 mode = TYPE_MODE (TREE_TYPE (to));
4669 if ((TREE_CODE (to) == MEM_REF
4670 || TREE_CODE (to) == TARGET_MEM_REF)
4671 && mode != BLKmode
4672 && !mem_ref_refers_to_non_mem_p (to)
4673 && ((align = get_object_alignment (to))
4674 < GET_MODE_ALIGNMENT (mode))
4675 && (((icode = optab_handler (movmisalign_optab, mode))
4676 != CODE_FOR_nothing)
4677 || SLOW_UNALIGNED_ACCESS (mode, align)))
4678 {
4679 rtx reg, mem;
4680
4681 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4682 reg = force_not_mem (reg);
4683 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4684
4685 if (icode != CODE_FOR_nothing)
4686 {
4687 struct expand_operand ops[2];
4688
4689 create_fixed_operand (&ops[0], mem);
4690 create_input_operand (&ops[1], reg, mode);
4691 /* The movmisalign<mode> pattern cannot fail, else the assignment
4692 would silently be omitted. */
4693 expand_insn (icode, 2, ops);
4694 }
4695 else
4696 store_bit_field (mem, GET_MODE_BITSIZE (mode),
4697 0, 0, 0, mode, reg);
4698 return;
4699 }
4700
4701 /* Assignment of a structure component needs special treatment
4702 if the structure component's rtx is not simply a MEM.
4703 Assignment of an array element at a constant index, and assignment of
4704 an array element in an unaligned packed structure field, has the same
4705 problem. Same for (partially) storing into a non-memory object. */
4706 if (handled_component_p (to)
4707 || (TREE_CODE (to) == MEM_REF
4708 && mem_ref_refers_to_non_mem_p (to))
4709 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4710 {
4711 enum machine_mode mode1;
4712 HOST_WIDE_INT bitsize, bitpos;
4713 unsigned HOST_WIDE_INT bitregion_start = 0;
4714 unsigned HOST_WIDE_INT bitregion_end = 0;
4715 tree offset;
4716 int unsignedp;
4717 int volatilep = 0;
4718 tree tem;
4719
4720 push_temp_slots ();
4721 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4722 &unsignedp, &volatilep, true);
4723
4724 if (TREE_CODE (to) == COMPONENT_REF
4725 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4726 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4727
4728 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4729
4730 /* If the bitfield is volatile, we want to access it in the
4731 field's mode, not the computed mode.
4732 If a MEM has VOIDmode (external with incomplete type),
4733 use BLKmode for it instead. */
4734 if (MEM_P (to_rtx))
4735 {
4736 if (volatilep && flag_strict_volatile_bitfields > 0)
4737 to_rtx = adjust_address (to_rtx, mode1, 0);
4738 else if (GET_MODE (to_rtx) == VOIDmode)
4739 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4740 }
4741
4742 if (offset != 0)
4743 {
4744 enum machine_mode address_mode;
4745 rtx offset_rtx;
4746
4747 if (!MEM_P (to_rtx))
4748 {
4749 /* We can get constant negative offsets into arrays with broken
4750 user code. Translate this to a trap instead of ICEing. */
4751 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4752 expand_builtin_trap ();
4753 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4754 }
4755
4756 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4757 address_mode = get_address_mode (to_rtx);
4758 if (GET_MODE (offset_rtx) != address_mode)
4759 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4760
4761 /* A constant address in TO_RTX can have VOIDmode, we must not try
4762 to call force_reg for that case. Avoid that case. */
4763 if (MEM_P (to_rtx)
4764 && GET_MODE (to_rtx) == BLKmode
4765 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4766 && bitsize > 0
4767 && (bitpos % bitsize) == 0
4768 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4769 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4770 {
4771 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4772 bitpos = 0;
4773 }
4774
4775 to_rtx = offset_address (to_rtx, offset_rtx,
4776 highest_pow2_factor_for_target (to,
4777 offset));
4778 }
4779
4780 /* No action is needed if the target is not a memory and the field
4781 lies completely outside that target. This can occur if the source
4782 code contains an out-of-bounds access to a small array. */
4783 if (!MEM_P (to_rtx)
4784 && GET_MODE (to_rtx) != BLKmode
4785 && (unsigned HOST_WIDE_INT) bitpos
4786 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4787 {
4788 expand_normal (from);
4789 result = NULL;
4790 }
4791 /* Handle expand_expr of a complex value returning a CONCAT. */
4792 else if (GET_CODE (to_rtx) == CONCAT)
4793 {
4794 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4795 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4796 && bitpos == 0
4797 && bitsize == mode_bitsize)
4798 result = store_expr (from, to_rtx, false, nontemporal);
4799 else if (bitsize == mode_bitsize / 2
4800 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4801 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4802 nontemporal);
4803 else if (bitpos + bitsize <= mode_bitsize / 2)
4804 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4805 bitregion_start, bitregion_end,
4806 mode1, from,
4807 get_alias_set (to), nontemporal);
4808 else if (bitpos >= mode_bitsize / 2)
4809 result = store_field (XEXP (to_rtx, 1), bitsize,
4810 bitpos - mode_bitsize / 2,
4811 bitregion_start, bitregion_end,
4812 mode1, from,
4813 get_alias_set (to), nontemporal);
4814 else if (bitpos == 0 && bitsize == mode_bitsize)
4815 {
4816 rtx from_rtx;
4817 result = expand_normal (from);
4818 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4819 TYPE_MODE (TREE_TYPE (from)), 0);
4820 emit_move_insn (XEXP (to_rtx, 0),
4821 read_complex_part (from_rtx, false));
4822 emit_move_insn (XEXP (to_rtx, 1),
4823 read_complex_part (from_rtx, true));
4824 }
4825 else
4826 {
4827 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4828 GET_MODE_SIZE (GET_MODE (to_rtx)));
4829 write_complex_part (temp, XEXP (to_rtx, 0), false);
4830 write_complex_part (temp, XEXP (to_rtx, 1), true);
4831 result = store_field (temp, bitsize, bitpos,
4832 bitregion_start, bitregion_end,
4833 mode1, from,
4834 get_alias_set (to), nontemporal);
4835 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4836 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4837 }
4838 }
4839 else
4840 {
4841 if (MEM_P (to_rtx))
4842 {
4843 /* If the field is at offset zero, we could have been given the
4844 DECL_RTX of the parent struct. Don't munge it. */
4845 to_rtx = shallow_copy_rtx (to_rtx);
4846 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4847 if (volatilep)
4848 MEM_VOLATILE_P (to_rtx) = 1;
4849 }
4850
4851 if (optimize_bitfield_assignment_op (bitsize, bitpos,
4852 bitregion_start, bitregion_end,
4853 mode1,
4854 to_rtx, to, from))
4855 result = NULL;
4856 else
4857 result = store_field (to_rtx, bitsize, bitpos,
4858 bitregion_start, bitregion_end,
4859 mode1, from,
4860 get_alias_set (to), nontemporal);
4861 }
4862
4863 if (result)
4864 preserve_temp_slots (result);
4865 pop_temp_slots ();
4866 return;
4867 }
4868
4869 /* If the rhs is a function call and its value is not an aggregate,
4870 call the function before we start to compute the lhs.
4871 This is needed for correct code for cases such as
4872 val = setjmp (buf) on machines where reference to val
4873 requires loading up part of an address in a separate insn.
4874
4875 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4876 since it might be a promoted variable where the zero- or sign- extension
4877 needs to be done. Handling this in the normal way is safe because no
4878 computation is done before the call. The same is true for SSA names. */
4879 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4880 && COMPLETE_TYPE_P (TREE_TYPE (from))
4881 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4882 && ! (((TREE_CODE (to) == VAR_DECL
4883 || TREE_CODE (to) == PARM_DECL
4884 || TREE_CODE (to) == RESULT_DECL)
4885 && REG_P (DECL_RTL (to)))
4886 || TREE_CODE (to) == SSA_NAME))
4887 {
4888 rtx value;
4889
4890 push_temp_slots ();
4891 value = expand_normal (from);
4892 if (to_rtx == 0)
4893 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4894
4895 /* Handle calls that return values in multiple non-contiguous locations.
4896 The Irix 6 ABI has examples of this. */
4897 if (GET_CODE (to_rtx) == PARALLEL)
4898 {
4899 if (GET_CODE (value) == PARALLEL)
4900 emit_group_move (to_rtx, value);
4901 else
4902 emit_group_load (to_rtx, value, TREE_TYPE (from),
4903 int_size_in_bytes (TREE_TYPE (from)));
4904 }
4905 else if (GET_CODE (value) == PARALLEL)
4906 emit_group_store (to_rtx, value, TREE_TYPE (from),
4907 int_size_in_bytes (TREE_TYPE (from)));
4908 else if (GET_MODE (to_rtx) == BLKmode)
4909 {
4910 /* Handle calls that return BLKmode values in registers. */
4911 if (REG_P (value))
4912 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
4913 else
4914 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4915 }
4916 else
4917 {
4918 if (POINTER_TYPE_P (TREE_TYPE (to)))
4919 value = convert_memory_address_addr_space
4920 (GET_MODE (to_rtx), value,
4921 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
4922
4923 emit_move_insn (to_rtx, value);
4924 }
4925 preserve_temp_slots (to_rtx);
4926 pop_temp_slots ();
4927 return;
4928 }
4929
4930 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
4931 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4932
4933 /* Don't move directly into a return register. */
4934 if (TREE_CODE (to) == RESULT_DECL
4935 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4936 {
4937 rtx temp;
4938
4939 push_temp_slots ();
4940
4941 /* If the source is itself a return value, it still is in a pseudo at
4942 this point so we can move it back to the return register directly. */
4943 if (REG_P (to_rtx)
4944 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
4945 && TREE_CODE (from) != CALL_EXPR)
4946 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
4947 else
4948 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4949
4950 /* Handle calls that return values in multiple non-contiguous locations.
4951 The Irix 6 ABI has examples of this. */
4952 if (GET_CODE (to_rtx) == PARALLEL)
4953 {
4954 if (GET_CODE (temp) == PARALLEL)
4955 emit_group_move (to_rtx, temp);
4956 else
4957 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4958 int_size_in_bytes (TREE_TYPE (from)));
4959 }
4960 else if (temp)
4961 emit_move_insn (to_rtx, temp);
4962
4963 preserve_temp_slots (to_rtx);
4964 pop_temp_slots ();
4965 return;
4966 }
4967
4968 /* In case we are returning the contents of an object which overlaps
4969 the place the value is being stored, use a safe function when copying
4970 a value through a pointer into a structure value return block. */
4971 if (TREE_CODE (to) == RESULT_DECL
4972 && TREE_CODE (from) == INDIRECT_REF
4973 && ADDR_SPACE_GENERIC_P
4974 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
4975 && refs_may_alias_p (to, from)
4976 && cfun->returns_struct
4977 && !cfun->returns_pcc_struct)
4978 {
4979 rtx from_rtx, size;
4980
4981 push_temp_slots ();
4982 size = expr_size (from);
4983 from_rtx = expand_normal (from);
4984
4985 emit_library_call (memmove_libfunc, LCT_NORMAL,
4986 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4987 XEXP (from_rtx, 0), Pmode,
4988 convert_to_mode (TYPE_MODE (sizetype),
4989 size, TYPE_UNSIGNED (sizetype)),
4990 TYPE_MODE (sizetype));
4991
4992 preserve_temp_slots (to_rtx);
4993 pop_temp_slots ();
4994 return;
4995 }
4996
4997 /* Compute FROM and store the value in the rtx we got. */
4998
4999 push_temp_slots ();
5000 result = store_expr (from, to_rtx, 0, nontemporal);
5001 preserve_temp_slots (result);
5002 pop_temp_slots ();
5003 return;
5004 }
5005
5006 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5007 succeeded, false otherwise. */
5008
5009 bool
5010 emit_storent_insn (rtx to, rtx from)
5011 {
5012 struct expand_operand ops[2];
5013 enum machine_mode mode = GET_MODE (to);
5014 enum insn_code code = optab_handler (storent_optab, mode);
5015
5016 if (code == CODE_FOR_nothing)
5017 return false;
5018
5019 create_fixed_operand (&ops[0], to);
5020 create_input_operand (&ops[1], from, mode);
5021 return maybe_expand_insn (code, 2, ops);
5022 }
5023
5024 /* Generate code for computing expression EXP,
5025 and storing the value into TARGET.
5026
5027 If the mode is BLKmode then we may return TARGET itself.
5028 It turns out that in BLKmode it doesn't cause a problem.
5029 because C has no operators that could combine two different
5030 assignments into the same BLKmode object with different values
5031 with no sequence point. Will other languages need this to
5032 be more thorough?
5033
5034 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5035 stack, and block moves may need to be treated specially.
5036
5037 If NONTEMPORAL is true, try using a nontemporal store instruction. */
5038
5039 rtx
5040 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5041 {
5042 rtx temp;
5043 rtx alt_rtl = NULL_RTX;
5044 location_t loc = curr_insn_location ();
5045
5046 if (VOID_TYPE_P (TREE_TYPE (exp)))
5047 {
5048 /* C++ can generate ?: expressions with a throw expression in one
5049 branch and an rvalue in the other. Here, we resolve attempts to
5050 store the throw expression's nonexistent result. */
5051 gcc_assert (!call_param_p);
5052 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5053 return NULL_RTX;
5054 }
5055 if (TREE_CODE (exp) == COMPOUND_EXPR)
5056 {
5057 /* Perform first part of compound expression, then assign from second
5058 part. */
5059 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5060 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5061 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5062 nontemporal);
5063 }
5064 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5065 {
5066 /* For conditional expression, get safe form of the target. Then
5067 test the condition, doing the appropriate assignment on either
5068 side. This avoids the creation of unnecessary temporaries.
5069 For non-BLKmode, it is more efficient not to do this. */
5070
5071 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
5072
5073 do_pending_stack_adjust ();
5074 NO_DEFER_POP;
5075 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5076 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5077 nontemporal);
5078 emit_jump_insn (gen_jump (lab2));
5079 emit_barrier ();
5080 emit_label (lab1);
5081 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
5082 nontemporal);
5083 emit_label (lab2);
5084 OK_DEFER_POP;
5085
5086 return NULL_RTX;
5087 }
5088 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5089 /* If this is a scalar in a register that is stored in a wider mode
5090 than the declared mode, compute the result into its declared mode
5091 and then convert to the wider mode. Our value is the computed
5092 expression. */
5093 {
5094 rtx inner_target = 0;
5095
5096 /* We can do the conversion inside EXP, which will often result
5097 in some optimizations. Do the conversion in two steps: first
5098 change the signedness, if needed, then the extend. But don't
5099 do this if the type of EXP is a subtype of something else
5100 since then the conversion might involve more than just
5101 converting modes. */
5102 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5103 && TREE_TYPE (TREE_TYPE (exp)) == 0
5104 && GET_MODE_PRECISION (GET_MODE (target))
5105 == TYPE_PRECISION (TREE_TYPE (exp)))
5106 {
5107 if (TYPE_UNSIGNED (TREE_TYPE (exp))
5108 != SUBREG_PROMOTED_UNSIGNED_P (target))
5109 {
5110 /* Some types, e.g. Fortran's logical*4, won't have a signed
5111 version, so use the mode instead. */
5112 tree ntype
5113 = (signed_or_unsigned_type_for
5114 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
5115 if (ntype == NULL)
5116 ntype = lang_hooks.types.type_for_mode
5117 (TYPE_MODE (TREE_TYPE (exp)),
5118 SUBREG_PROMOTED_UNSIGNED_P (target));
5119
5120 exp = fold_convert_loc (loc, ntype, exp);
5121 }
5122
5123 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5124 (GET_MODE (SUBREG_REG (target)),
5125 SUBREG_PROMOTED_UNSIGNED_P (target)),
5126 exp);
5127
5128 inner_target = SUBREG_REG (target);
5129 }
5130
5131 temp = expand_expr (exp, inner_target, VOIDmode,
5132 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5133
5134 /* If TEMP is a VOIDmode constant, use convert_modes to make
5135 sure that we properly convert it. */
5136 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5137 {
5138 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5139 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
5140 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5141 GET_MODE (target), temp,
5142 SUBREG_PROMOTED_UNSIGNED_P (target));
5143 }
5144
5145 convert_move (SUBREG_REG (target), temp,
5146 SUBREG_PROMOTED_UNSIGNED_P (target));
5147
5148 return NULL_RTX;
5149 }
5150 else if ((TREE_CODE (exp) == STRING_CST
5151 || (TREE_CODE (exp) == MEM_REF
5152 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5153 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5154 == STRING_CST
5155 && integer_zerop (TREE_OPERAND (exp, 1))))
5156 && !nontemporal && !call_param_p
5157 && MEM_P (target))
5158 {
5159 /* Optimize initialization of an array with a STRING_CST. */
5160 HOST_WIDE_INT exp_len, str_copy_len;
5161 rtx dest_mem;
5162 tree str = TREE_CODE (exp) == STRING_CST
5163 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5164
5165 exp_len = int_expr_size (exp);
5166 if (exp_len <= 0)
5167 goto normal_expr;
5168
5169 if (TREE_STRING_LENGTH (str) <= 0)
5170 goto normal_expr;
5171
5172 str_copy_len = strlen (TREE_STRING_POINTER (str));
5173 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5174 goto normal_expr;
5175
5176 str_copy_len = TREE_STRING_LENGTH (str);
5177 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5178 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5179 {
5180 str_copy_len += STORE_MAX_PIECES - 1;
5181 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5182 }
5183 str_copy_len = MIN (str_copy_len, exp_len);
5184 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5185 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5186 MEM_ALIGN (target), false))
5187 goto normal_expr;
5188
5189 dest_mem = target;
5190
5191 dest_mem = store_by_pieces (dest_mem,
5192 str_copy_len, builtin_strncpy_read_str,
5193 CONST_CAST (char *,
5194 TREE_STRING_POINTER (str)),
5195 MEM_ALIGN (target), false,
5196 exp_len > str_copy_len ? 1 : 0);
5197 if (exp_len > str_copy_len)
5198 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5199 GEN_INT (exp_len - str_copy_len),
5200 BLOCK_OP_NORMAL);
5201 return NULL_RTX;
5202 }
5203 else
5204 {
5205 rtx tmp_target;
5206
5207 normal_expr:
5208 /* If we want to use a nontemporal store, force the value to
5209 register first. */
5210 tmp_target = nontemporal ? NULL_RTX : target;
5211 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5212 (call_param_p
5213 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5214 &alt_rtl);
5215 }
5216
5217 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5218 the same as that of TARGET, adjust the constant. This is needed, for
5219 example, in case it is a CONST_DOUBLE and we want only a word-sized
5220 value. */
5221 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5222 && TREE_CODE (exp) != ERROR_MARK
5223 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5224 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5225 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5226
5227 /* If value was not generated in the target, store it there.
5228 Convert the value to TARGET's type first if necessary and emit the
5229 pending incrementations that have been queued when expanding EXP.
5230 Note that we cannot emit the whole queue blindly because this will
5231 effectively disable the POST_INC optimization later.
5232
5233 If TEMP and TARGET compare equal according to rtx_equal_p, but
5234 one or both of them are volatile memory refs, we have to distinguish
5235 two cases:
5236 - expand_expr has used TARGET. In this case, we must not generate
5237 another copy. This can be detected by TARGET being equal according
5238 to == .
5239 - expand_expr has not used TARGET - that means that the source just
5240 happens to have the same RTX form. Since temp will have been created
5241 by expand_expr, it will compare unequal according to == .
5242 We must generate a copy in this case, to reach the correct number
5243 of volatile memory references. */
5244
5245 if ((! rtx_equal_p (temp, target)
5246 || (temp != target && (side_effects_p (temp)
5247 || side_effects_p (target))))
5248 && TREE_CODE (exp) != ERROR_MARK
5249 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5250 but TARGET is not valid memory reference, TEMP will differ
5251 from TARGET although it is really the same location. */
5252 && !(alt_rtl
5253 && rtx_equal_p (alt_rtl, target)
5254 && !side_effects_p (alt_rtl)
5255 && !side_effects_p (target))
5256 /* If there's nothing to copy, don't bother. Don't call
5257 expr_size unless necessary, because some front-ends (C++)
5258 expr_size-hook must not be given objects that are not
5259 supposed to be bit-copied or bit-initialized. */
5260 && expr_size (exp) != const0_rtx)
5261 {
5262 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5263 {
5264 if (GET_MODE (target) == BLKmode)
5265 {
5266 /* Handle calls that return BLKmode values in registers. */
5267 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5268 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5269 else
5270 store_bit_field (target,
5271 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5272 0, 0, 0, GET_MODE (temp), temp);
5273 }
5274 else
5275 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5276 }
5277
5278 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5279 {
5280 /* Handle copying a string constant into an array. The string
5281 constant may be shorter than the array. So copy just the string's
5282 actual length, and clear the rest. First get the size of the data
5283 type of the string, which is actually the size of the target. */
5284 rtx size = expr_size (exp);
5285
5286 if (CONST_INT_P (size)
5287 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5288 emit_block_move (target, temp, size,
5289 (call_param_p
5290 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5291 else
5292 {
5293 enum machine_mode pointer_mode
5294 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5295 enum machine_mode address_mode = get_address_mode (target);
5296
5297 /* Compute the size of the data to copy from the string. */
5298 tree copy_size
5299 = size_binop_loc (loc, MIN_EXPR,
5300 make_tree (sizetype, size),
5301 size_int (TREE_STRING_LENGTH (exp)));
5302 rtx copy_size_rtx
5303 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5304 (call_param_p
5305 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5306 rtx label = 0;
5307
5308 /* Copy that much. */
5309 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5310 TYPE_UNSIGNED (sizetype));
5311 emit_block_move (target, temp, copy_size_rtx,
5312 (call_param_p
5313 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5314
5315 /* Figure out how much is left in TARGET that we have to clear.
5316 Do all calculations in pointer_mode. */
5317 if (CONST_INT_P (copy_size_rtx))
5318 {
5319 size = plus_constant (address_mode, size,
5320 -INTVAL (copy_size_rtx));
5321 target = adjust_address (target, BLKmode,
5322 INTVAL (copy_size_rtx));
5323 }
5324 else
5325 {
5326 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5327 copy_size_rtx, NULL_RTX, 0,
5328 OPTAB_LIB_WIDEN);
5329
5330 if (GET_MODE (copy_size_rtx) != address_mode)
5331 copy_size_rtx = convert_to_mode (address_mode,
5332 copy_size_rtx,
5333 TYPE_UNSIGNED (sizetype));
5334
5335 target = offset_address (target, copy_size_rtx,
5336 highest_pow2_factor (copy_size));
5337 label = gen_label_rtx ();
5338 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5339 GET_MODE (size), 0, label);
5340 }
5341
5342 if (size != const0_rtx)
5343 clear_storage (target, size, BLOCK_OP_NORMAL);
5344
5345 if (label)
5346 emit_label (label);
5347 }
5348 }
5349 /* Handle calls that return values in multiple non-contiguous locations.
5350 The Irix 6 ABI has examples of this. */
5351 else if (GET_CODE (target) == PARALLEL)
5352 {
5353 if (GET_CODE (temp) == PARALLEL)
5354 emit_group_move (target, temp);
5355 else
5356 emit_group_load (target, temp, TREE_TYPE (exp),
5357 int_size_in_bytes (TREE_TYPE (exp)));
5358 }
5359 else if (GET_CODE (temp) == PARALLEL)
5360 emit_group_store (target, temp, TREE_TYPE (exp),
5361 int_size_in_bytes (TREE_TYPE (exp)));
5362 else if (GET_MODE (temp) == BLKmode)
5363 emit_block_move (target, temp, expr_size (exp),
5364 (call_param_p
5365 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5366 /* If we emit a nontemporal store, there is nothing else to do. */
5367 else if (nontemporal && emit_storent_insn (target, temp))
5368 ;
5369 else
5370 {
5371 temp = force_operand (temp, target);
5372 if (temp != target)
5373 emit_move_insn (target, temp);
5374 }
5375 }
5376
5377 return NULL_RTX;
5378 }
5379 \f
5380 /* Return true if field F of structure TYPE is a flexible array. */
5381
5382 static bool
5383 flexible_array_member_p (const_tree f, const_tree type)
5384 {
5385 const_tree tf;
5386
5387 tf = TREE_TYPE (f);
5388 return (DECL_CHAIN (f) == NULL
5389 && TREE_CODE (tf) == ARRAY_TYPE
5390 && TYPE_DOMAIN (tf)
5391 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5392 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5393 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5394 && int_size_in_bytes (type) >= 0);
5395 }
5396
5397 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5398 must have in order for it to completely initialize a value of type TYPE.
5399 Return -1 if the number isn't known.
5400
5401 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5402
5403 static HOST_WIDE_INT
5404 count_type_elements (const_tree type, bool for_ctor_p)
5405 {
5406 switch (TREE_CODE (type))
5407 {
5408 case ARRAY_TYPE:
5409 {
5410 tree nelts;
5411
5412 nelts = array_type_nelts (type);
5413 if (nelts && host_integerp (nelts, 1))
5414 {
5415 unsigned HOST_WIDE_INT n;
5416
5417 n = tree_low_cst (nelts, 1) + 1;
5418 if (n == 0 || for_ctor_p)
5419 return n;
5420 else
5421 return n * count_type_elements (TREE_TYPE (type), false);
5422 }
5423 return for_ctor_p ? -1 : 1;
5424 }
5425
5426 case RECORD_TYPE:
5427 {
5428 unsigned HOST_WIDE_INT n;
5429 tree f;
5430
5431 n = 0;
5432 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5433 if (TREE_CODE (f) == FIELD_DECL)
5434 {
5435 if (!for_ctor_p)
5436 n += count_type_elements (TREE_TYPE (f), false);
5437 else if (!flexible_array_member_p (f, type))
5438 /* Don't count flexible arrays, which are not supposed
5439 to be initialized. */
5440 n += 1;
5441 }
5442
5443 return n;
5444 }
5445
5446 case UNION_TYPE:
5447 case QUAL_UNION_TYPE:
5448 {
5449 tree f;
5450 HOST_WIDE_INT n, m;
5451
5452 gcc_assert (!for_ctor_p);
5453 /* Estimate the number of scalars in each field and pick the
5454 maximum. Other estimates would do instead; the idea is simply
5455 to make sure that the estimate is not sensitive to the ordering
5456 of the fields. */
5457 n = 1;
5458 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5459 if (TREE_CODE (f) == FIELD_DECL)
5460 {
5461 m = count_type_elements (TREE_TYPE (f), false);
5462 /* If the field doesn't span the whole union, add an extra
5463 scalar for the rest. */
5464 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5465 TYPE_SIZE (type)) != 1)
5466 m++;
5467 if (n < m)
5468 n = m;
5469 }
5470 return n;
5471 }
5472
5473 case COMPLEX_TYPE:
5474 return 2;
5475
5476 case VECTOR_TYPE:
5477 return TYPE_VECTOR_SUBPARTS (type);
5478
5479 case INTEGER_TYPE:
5480 case REAL_TYPE:
5481 case FIXED_POINT_TYPE:
5482 case ENUMERAL_TYPE:
5483 case BOOLEAN_TYPE:
5484 case POINTER_TYPE:
5485 case OFFSET_TYPE:
5486 case REFERENCE_TYPE:
5487 case NULLPTR_TYPE:
5488 return 1;
5489
5490 case ERROR_MARK:
5491 return 0;
5492
5493 case VOID_TYPE:
5494 case METHOD_TYPE:
5495 case FUNCTION_TYPE:
5496 case LANG_TYPE:
5497 default:
5498 gcc_unreachable ();
5499 }
5500 }
5501
5502 /* Helper for categorize_ctor_elements. Identical interface. */
5503
5504 static bool
5505 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5506 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5507 {
5508 unsigned HOST_WIDE_INT idx;
5509 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5510 tree value, purpose, elt_type;
5511
5512 /* Whether CTOR is a valid constant initializer, in accordance with what
5513 initializer_constant_valid_p does. If inferred from the constructor
5514 elements, true until proven otherwise. */
5515 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5516 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5517
5518 nz_elts = 0;
5519 init_elts = 0;
5520 num_fields = 0;
5521 elt_type = NULL_TREE;
5522
5523 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5524 {
5525 HOST_WIDE_INT mult = 1;
5526
5527 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5528 {
5529 tree lo_index = TREE_OPERAND (purpose, 0);
5530 tree hi_index = TREE_OPERAND (purpose, 1);
5531
5532 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
5533 mult = (tree_low_cst (hi_index, 1)
5534 - tree_low_cst (lo_index, 1) + 1);
5535 }
5536 num_fields += mult;
5537 elt_type = TREE_TYPE (value);
5538
5539 switch (TREE_CODE (value))
5540 {
5541 case CONSTRUCTOR:
5542 {
5543 HOST_WIDE_INT nz = 0, ic = 0;
5544
5545 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5546 p_complete);
5547
5548 nz_elts += mult * nz;
5549 init_elts += mult * ic;
5550
5551 if (const_from_elts_p && const_p)
5552 const_p = const_elt_p;
5553 }
5554 break;
5555
5556 case INTEGER_CST:
5557 case REAL_CST:
5558 case FIXED_CST:
5559 if (!initializer_zerop (value))
5560 nz_elts += mult;
5561 init_elts += mult;
5562 break;
5563
5564 case STRING_CST:
5565 nz_elts += mult * TREE_STRING_LENGTH (value);
5566 init_elts += mult * TREE_STRING_LENGTH (value);
5567 break;
5568
5569 case COMPLEX_CST:
5570 if (!initializer_zerop (TREE_REALPART (value)))
5571 nz_elts += mult;
5572 if (!initializer_zerop (TREE_IMAGPART (value)))
5573 nz_elts += mult;
5574 init_elts += mult;
5575 break;
5576
5577 case VECTOR_CST:
5578 {
5579 unsigned i;
5580 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5581 {
5582 tree v = VECTOR_CST_ELT (value, i);
5583 if (!initializer_zerop (v))
5584 nz_elts += mult;
5585 init_elts += mult;
5586 }
5587 }
5588 break;
5589
5590 default:
5591 {
5592 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5593 nz_elts += mult * tc;
5594 init_elts += mult * tc;
5595
5596 if (const_from_elts_p && const_p)
5597 const_p = initializer_constant_valid_p (value, elt_type)
5598 != NULL_TREE;
5599 }
5600 break;
5601 }
5602 }
5603
5604 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5605 num_fields, elt_type))
5606 *p_complete = false;
5607
5608 *p_nz_elts += nz_elts;
5609 *p_init_elts += init_elts;
5610
5611 return const_p;
5612 }
5613
5614 /* Examine CTOR to discover:
5615 * how many scalar fields are set to nonzero values,
5616 and place it in *P_NZ_ELTS;
5617 * how many scalar fields in total are in CTOR,
5618 and place it in *P_ELT_COUNT.
5619 * whether the constructor is complete -- in the sense that every
5620 meaningful byte is explicitly given a value --
5621 and place it in *P_COMPLETE.
5622
5623 Return whether or not CTOR is a valid static constant initializer, the same
5624 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5625
5626 bool
5627 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5628 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5629 {
5630 *p_nz_elts = 0;
5631 *p_init_elts = 0;
5632 *p_complete = true;
5633
5634 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5635 }
5636
5637 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5638 of which had type LAST_TYPE. Each element was itself a complete
5639 initializer, in the sense that every meaningful byte was explicitly
5640 given a value. Return true if the same is true for the constructor
5641 as a whole. */
5642
5643 bool
5644 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5645 const_tree last_type)
5646 {
5647 if (TREE_CODE (type) == UNION_TYPE
5648 || TREE_CODE (type) == QUAL_UNION_TYPE)
5649 {
5650 if (num_elts == 0)
5651 return false;
5652
5653 gcc_assert (num_elts == 1 && last_type);
5654
5655 /* ??? We could look at each element of the union, and find the
5656 largest element. Which would avoid comparing the size of the
5657 initialized element against any tail padding in the union.
5658 Doesn't seem worth the effort... */
5659 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5660 }
5661
5662 return count_type_elements (type, true) == num_elts;
5663 }
5664
5665 /* Return 1 if EXP contains mostly (3/4) zeros. */
5666
5667 static int
5668 mostly_zeros_p (const_tree exp)
5669 {
5670 if (TREE_CODE (exp) == CONSTRUCTOR)
5671 {
5672 HOST_WIDE_INT nz_elts, init_elts;
5673 bool complete_p;
5674
5675 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5676 return !complete_p || nz_elts < init_elts / 4;
5677 }
5678
5679 return initializer_zerop (exp);
5680 }
5681
5682 /* Return 1 if EXP contains all zeros. */
5683
5684 static int
5685 all_zeros_p (const_tree exp)
5686 {
5687 if (TREE_CODE (exp) == CONSTRUCTOR)
5688 {
5689 HOST_WIDE_INT nz_elts, init_elts;
5690 bool complete_p;
5691
5692 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5693 return nz_elts == 0;
5694 }
5695
5696 return initializer_zerop (exp);
5697 }
5698 \f
5699 /* Helper function for store_constructor.
5700 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5701 CLEARED is as for store_constructor.
5702 ALIAS_SET is the alias set to use for any stores.
5703
5704 This provides a recursive shortcut back to store_constructor when it isn't
5705 necessary to go through store_field. This is so that we can pass through
5706 the cleared field to let store_constructor know that we may not have to
5707 clear a substructure if the outer structure has already been cleared. */
5708
5709 static void
5710 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5711 HOST_WIDE_INT bitpos, enum machine_mode mode,
5712 tree exp, int cleared, alias_set_type alias_set)
5713 {
5714 if (TREE_CODE (exp) == CONSTRUCTOR
5715 /* We can only call store_constructor recursively if the size and
5716 bit position are on a byte boundary. */
5717 && bitpos % BITS_PER_UNIT == 0
5718 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5719 /* If we have a nonzero bitpos for a register target, then we just
5720 let store_field do the bitfield handling. This is unlikely to
5721 generate unnecessary clear instructions anyways. */
5722 && (bitpos == 0 || MEM_P (target)))
5723 {
5724 if (MEM_P (target))
5725 target
5726 = adjust_address (target,
5727 GET_MODE (target) == BLKmode
5728 || 0 != (bitpos
5729 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5730 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5731
5732
5733 /* Update the alias set, if required. */
5734 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5735 && MEM_ALIAS_SET (target) != 0)
5736 {
5737 target = copy_rtx (target);
5738 set_mem_alias_set (target, alias_set);
5739 }
5740
5741 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5742 }
5743 else
5744 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false);
5745 }
5746
5747 /* Store the value of constructor EXP into the rtx TARGET.
5748 TARGET is either a REG or a MEM; we know it cannot conflict, since
5749 safe_from_p has been called.
5750 CLEARED is true if TARGET is known to have been zero'd.
5751 SIZE is the number of bytes of TARGET we are allowed to modify: this
5752 may not be the same as the size of EXP if we are assigning to a field
5753 which has been packed to exclude padding bits. */
5754
5755 static void
5756 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5757 {
5758 tree type = TREE_TYPE (exp);
5759 #ifdef WORD_REGISTER_OPERATIONS
5760 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5761 #endif
5762
5763 switch (TREE_CODE (type))
5764 {
5765 case RECORD_TYPE:
5766 case UNION_TYPE:
5767 case QUAL_UNION_TYPE:
5768 {
5769 unsigned HOST_WIDE_INT idx;
5770 tree field, value;
5771
5772 /* If size is zero or the target is already cleared, do nothing. */
5773 if (size == 0 || cleared)
5774 cleared = 1;
5775 /* We either clear the aggregate or indicate the value is dead. */
5776 else if ((TREE_CODE (type) == UNION_TYPE
5777 || TREE_CODE (type) == QUAL_UNION_TYPE)
5778 && ! CONSTRUCTOR_ELTS (exp))
5779 /* If the constructor is empty, clear the union. */
5780 {
5781 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5782 cleared = 1;
5783 }
5784
5785 /* If we are building a static constructor into a register,
5786 set the initial value as zero so we can fold the value into
5787 a constant. But if more than one register is involved,
5788 this probably loses. */
5789 else if (REG_P (target) && TREE_STATIC (exp)
5790 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5791 {
5792 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5793 cleared = 1;
5794 }
5795
5796 /* If the constructor has fewer fields than the structure or
5797 if we are initializing the structure to mostly zeros, clear
5798 the whole structure first. Don't do this if TARGET is a
5799 register whose mode size isn't equal to SIZE since
5800 clear_storage can't handle this case. */
5801 else if (size > 0
5802 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
5803 != fields_length (type))
5804 || mostly_zeros_p (exp))
5805 && (!REG_P (target)
5806 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5807 == size)))
5808 {
5809 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5810 cleared = 1;
5811 }
5812
5813 if (REG_P (target) && !cleared)
5814 emit_clobber (target);
5815
5816 /* Store each element of the constructor into the
5817 corresponding field of TARGET. */
5818 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5819 {
5820 enum machine_mode mode;
5821 HOST_WIDE_INT bitsize;
5822 HOST_WIDE_INT bitpos = 0;
5823 tree offset;
5824 rtx to_rtx = target;
5825
5826 /* Just ignore missing fields. We cleared the whole
5827 structure, above, if any fields are missing. */
5828 if (field == 0)
5829 continue;
5830
5831 if (cleared && initializer_zerop (value))
5832 continue;
5833
5834 if (host_integerp (DECL_SIZE (field), 1))
5835 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5836 else
5837 bitsize = -1;
5838
5839 mode = DECL_MODE (field);
5840 if (DECL_BIT_FIELD (field))
5841 mode = VOIDmode;
5842
5843 offset = DECL_FIELD_OFFSET (field);
5844 if (host_integerp (offset, 0)
5845 && host_integerp (bit_position (field), 0))
5846 {
5847 bitpos = int_bit_position (field);
5848 offset = 0;
5849 }
5850 else
5851 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5852
5853 if (offset)
5854 {
5855 enum machine_mode address_mode;
5856 rtx offset_rtx;
5857
5858 offset
5859 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5860 make_tree (TREE_TYPE (exp),
5861 target));
5862
5863 offset_rtx = expand_normal (offset);
5864 gcc_assert (MEM_P (to_rtx));
5865
5866 address_mode = get_address_mode (to_rtx);
5867 if (GET_MODE (offset_rtx) != address_mode)
5868 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5869
5870 to_rtx = offset_address (to_rtx, offset_rtx,
5871 highest_pow2_factor (offset));
5872 }
5873
5874 #ifdef WORD_REGISTER_OPERATIONS
5875 /* If this initializes a field that is smaller than a
5876 word, at the start of a word, try to widen it to a full
5877 word. This special case allows us to output C++ member
5878 function initializations in a form that the optimizers
5879 can understand. */
5880 if (REG_P (target)
5881 && bitsize < BITS_PER_WORD
5882 && bitpos % BITS_PER_WORD == 0
5883 && GET_MODE_CLASS (mode) == MODE_INT
5884 && TREE_CODE (value) == INTEGER_CST
5885 && exp_size >= 0
5886 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5887 {
5888 tree type = TREE_TYPE (value);
5889
5890 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5891 {
5892 type = lang_hooks.types.type_for_mode
5893 (word_mode, TYPE_UNSIGNED (type));
5894 value = fold_convert (type, value);
5895 }
5896
5897 if (BYTES_BIG_ENDIAN)
5898 value
5899 = fold_build2 (LSHIFT_EXPR, type, value,
5900 build_int_cst (type,
5901 BITS_PER_WORD - bitsize));
5902 bitsize = BITS_PER_WORD;
5903 mode = word_mode;
5904 }
5905 #endif
5906
5907 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5908 && DECL_NONADDRESSABLE_P (field))
5909 {
5910 to_rtx = copy_rtx (to_rtx);
5911 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5912 }
5913
5914 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5915 value, cleared,
5916 get_alias_set (TREE_TYPE (field)));
5917 }
5918 break;
5919 }
5920 case ARRAY_TYPE:
5921 {
5922 tree value, index;
5923 unsigned HOST_WIDE_INT i;
5924 int need_to_clear;
5925 tree domain;
5926 tree elttype = TREE_TYPE (type);
5927 int const_bounds_p;
5928 HOST_WIDE_INT minelt = 0;
5929 HOST_WIDE_INT maxelt = 0;
5930
5931 domain = TYPE_DOMAIN (type);
5932 const_bounds_p = (TYPE_MIN_VALUE (domain)
5933 && TYPE_MAX_VALUE (domain)
5934 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5935 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5936
5937 /* If we have constant bounds for the range of the type, get them. */
5938 if (const_bounds_p)
5939 {
5940 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5941 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5942 }
5943
5944 /* If the constructor has fewer elements than the array, clear
5945 the whole array first. Similarly if this is static
5946 constructor of a non-BLKmode object. */
5947 if (cleared)
5948 need_to_clear = 0;
5949 else if (REG_P (target) && TREE_STATIC (exp))
5950 need_to_clear = 1;
5951 else
5952 {
5953 unsigned HOST_WIDE_INT idx;
5954 tree index, value;
5955 HOST_WIDE_INT count = 0, zero_count = 0;
5956 need_to_clear = ! const_bounds_p;
5957
5958 /* This loop is a more accurate version of the loop in
5959 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5960 is also needed to check for missing elements. */
5961 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5962 {
5963 HOST_WIDE_INT this_node_count;
5964
5965 if (need_to_clear)
5966 break;
5967
5968 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5969 {
5970 tree lo_index = TREE_OPERAND (index, 0);
5971 tree hi_index = TREE_OPERAND (index, 1);
5972
5973 if (! host_integerp (lo_index, 1)
5974 || ! host_integerp (hi_index, 1))
5975 {
5976 need_to_clear = 1;
5977 break;
5978 }
5979
5980 this_node_count = (tree_low_cst (hi_index, 1)
5981 - tree_low_cst (lo_index, 1) + 1);
5982 }
5983 else
5984 this_node_count = 1;
5985
5986 count += this_node_count;
5987 if (mostly_zeros_p (value))
5988 zero_count += this_node_count;
5989 }
5990
5991 /* Clear the entire array first if there are any missing
5992 elements, or if the incidence of zero elements is >=
5993 75%. */
5994 if (! need_to_clear
5995 && (count < maxelt - minelt + 1
5996 || 4 * zero_count >= 3 * count))
5997 need_to_clear = 1;
5998 }
5999
6000 if (need_to_clear && size > 0)
6001 {
6002 if (REG_P (target))
6003 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6004 else
6005 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6006 cleared = 1;
6007 }
6008
6009 if (!cleared && REG_P (target))
6010 /* Inform later passes that the old value is dead. */
6011 emit_clobber (target);
6012
6013 /* Store each element of the constructor into the
6014 corresponding element of TARGET, determined by counting the
6015 elements. */
6016 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6017 {
6018 enum machine_mode mode;
6019 HOST_WIDE_INT bitsize;
6020 HOST_WIDE_INT bitpos;
6021 rtx xtarget = target;
6022
6023 if (cleared && initializer_zerop (value))
6024 continue;
6025
6026 mode = TYPE_MODE (elttype);
6027 if (mode == BLKmode)
6028 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
6029 ? tree_low_cst (TYPE_SIZE (elttype), 1)
6030 : -1);
6031 else
6032 bitsize = GET_MODE_BITSIZE (mode);
6033
6034 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6035 {
6036 tree lo_index = TREE_OPERAND (index, 0);
6037 tree hi_index = TREE_OPERAND (index, 1);
6038 rtx index_r, pos_rtx;
6039 HOST_WIDE_INT lo, hi, count;
6040 tree position;
6041
6042 /* If the range is constant and "small", unroll the loop. */
6043 if (const_bounds_p
6044 && host_integerp (lo_index, 0)
6045 && host_integerp (hi_index, 0)
6046 && (lo = tree_low_cst (lo_index, 0),
6047 hi = tree_low_cst (hi_index, 0),
6048 count = hi - lo + 1,
6049 (!MEM_P (target)
6050 || count <= 2
6051 || (host_integerp (TYPE_SIZE (elttype), 1)
6052 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
6053 <= 40 * 8)))))
6054 {
6055 lo -= minelt; hi -= minelt;
6056 for (; lo <= hi; lo++)
6057 {
6058 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
6059
6060 if (MEM_P (target)
6061 && !MEM_KEEP_ALIAS_SET_P (target)
6062 && TREE_CODE (type) == ARRAY_TYPE
6063 && TYPE_NONALIASED_COMPONENT (type))
6064 {
6065 target = copy_rtx (target);
6066 MEM_KEEP_ALIAS_SET_P (target) = 1;
6067 }
6068
6069 store_constructor_field
6070 (target, bitsize, bitpos, mode, value, cleared,
6071 get_alias_set (elttype));
6072 }
6073 }
6074 else
6075 {
6076 rtx loop_start = gen_label_rtx ();
6077 rtx loop_end = gen_label_rtx ();
6078 tree exit_cond;
6079
6080 expand_normal (hi_index);
6081
6082 index = build_decl (EXPR_LOCATION (exp),
6083 VAR_DECL, NULL_TREE, domain);
6084 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6085 SET_DECL_RTL (index, index_r);
6086 store_expr (lo_index, index_r, 0, false);
6087
6088 /* Build the head of the loop. */
6089 do_pending_stack_adjust ();
6090 emit_label (loop_start);
6091
6092 /* Assign value to element index. */
6093 position =
6094 fold_convert (ssizetype,
6095 fold_build2 (MINUS_EXPR,
6096 TREE_TYPE (index),
6097 index,
6098 TYPE_MIN_VALUE (domain)));
6099
6100 position =
6101 size_binop (MULT_EXPR, position,
6102 fold_convert (ssizetype,
6103 TYPE_SIZE_UNIT (elttype)));
6104
6105 pos_rtx = expand_normal (position);
6106 xtarget = offset_address (target, pos_rtx,
6107 highest_pow2_factor (position));
6108 xtarget = adjust_address (xtarget, mode, 0);
6109 if (TREE_CODE (value) == CONSTRUCTOR)
6110 store_constructor (value, xtarget, cleared,
6111 bitsize / BITS_PER_UNIT);
6112 else
6113 store_expr (value, xtarget, 0, false);
6114
6115 /* Generate a conditional jump to exit the loop. */
6116 exit_cond = build2 (LT_EXPR, integer_type_node,
6117 index, hi_index);
6118 jumpif (exit_cond, loop_end, -1);
6119
6120 /* Update the loop counter, and jump to the head of
6121 the loop. */
6122 expand_assignment (index,
6123 build2 (PLUS_EXPR, TREE_TYPE (index),
6124 index, integer_one_node),
6125 false);
6126
6127 emit_jump (loop_start);
6128
6129 /* Build the end of the loop. */
6130 emit_label (loop_end);
6131 }
6132 }
6133 else if ((index != 0 && ! host_integerp (index, 0))
6134 || ! host_integerp (TYPE_SIZE (elttype), 1))
6135 {
6136 tree position;
6137
6138 if (index == 0)
6139 index = ssize_int (1);
6140
6141 if (minelt)
6142 index = fold_convert (ssizetype,
6143 fold_build2 (MINUS_EXPR,
6144 TREE_TYPE (index),
6145 index,
6146 TYPE_MIN_VALUE (domain)));
6147
6148 position =
6149 size_binop (MULT_EXPR, index,
6150 fold_convert (ssizetype,
6151 TYPE_SIZE_UNIT (elttype)));
6152 xtarget = offset_address (target,
6153 expand_normal (position),
6154 highest_pow2_factor (position));
6155 xtarget = adjust_address (xtarget, mode, 0);
6156 store_expr (value, xtarget, 0, false);
6157 }
6158 else
6159 {
6160 if (index != 0)
6161 bitpos = ((tree_low_cst (index, 0) - minelt)
6162 * tree_low_cst (TYPE_SIZE (elttype), 1));
6163 else
6164 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
6165
6166 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6167 && TREE_CODE (type) == ARRAY_TYPE
6168 && TYPE_NONALIASED_COMPONENT (type))
6169 {
6170 target = copy_rtx (target);
6171 MEM_KEEP_ALIAS_SET_P (target) = 1;
6172 }
6173 store_constructor_field (target, bitsize, bitpos, mode, value,
6174 cleared, get_alias_set (elttype));
6175 }
6176 }
6177 break;
6178 }
6179
6180 case VECTOR_TYPE:
6181 {
6182 unsigned HOST_WIDE_INT idx;
6183 constructor_elt *ce;
6184 int i;
6185 int need_to_clear;
6186 int icode = CODE_FOR_nothing;
6187 tree elttype = TREE_TYPE (type);
6188 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
6189 enum machine_mode eltmode = TYPE_MODE (elttype);
6190 HOST_WIDE_INT bitsize;
6191 HOST_WIDE_INT bitpos;
6192 rtvec vector = NULL;
6193 unsigned n_elts;
6194 alias_set_type alias;
6195
6196 gcc_assert (eltmode != BLKmode);
6197
6198 n_elts = TYPE_VECTOR_SUBPARTS (type);
6199 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6200 {
6201 enum machine_mode mode = GET_MODE (target);
6202
6203 icode = (int) optab_handler (vec_init_optab, mode);
6204 if (icode != CODE_FOR_nothing)
6205 {
6206 unsigned int i;
6207
6208 vector = rtvec_alloc (n_elts);
6209 for (i = 0; i < n_elts; i++)
6210 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6211 }
6212 }
6213
6214 /* If the constructor has fewer elements than the vector,
6215 clear the whole array first. Similarly if this is static
6216 constructor of a non-BLKmode object. */
6217 if (cleared)
6218 need_to_clear = 0;
6219 else if (REG_P (target) && TREE_STATIC (exp))
6220 need_to_clear = 1;
6221 else
6222 {
6223 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6224 tree value;
6225
6226 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6227 {
6228 int n_elts_here = tree_low_cst
6229 (int_const_binop (TRUNC_DIV_EXPR,
6230 TYPE_SIZE (TREE_TYPE (value)),
6231 TYPE_SIZE (elttype)), 1);
6232
6233 count += n_elts_here;
6234 if (mostly_zeros_p (value))
6235 zero_count += n_elts_here;
6236 }
6237
6238 /* Clear the entire vector first if there are any missing elements,
6239 or if the incidence of zero elements is >= 75%. */
6240 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6241 }
6242
6243 if (need_to_clear && size > 0 && !vector)
6244 {
6245 if (REG_P (target))
6246 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6247 else
6248 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6249 cleared = 1;
6250 }
6251
6252 /* Inform later passes that the old value is dead. */
6253 if (!cleared && !vector && REG_P (target))
6254 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6255
6256 if (MEM_P (target))
6257 alias = MEM_ALIAS_SET (target);
6258 else
6259 alias = get_alias_set (elttype);
6260
6261 /* Store each element of the constructor into the corresponding
6262 element of TARGET, determined by counting the elements. */
6263 for (idx = 0, i = 0;
6264 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6265 idx++, i += bitsize / elt_size)
6266 {
6267 HOST_WIDE_INT eltpos;
6268 tree value = ce->value;
6269
6270 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
6271 if (cleared && initializer_zerop (value))
6272 continue;
6273
6274 if (ce->index)
6275 eltpos = tree_low_cst (ce->index, 1);
6276 else
6277 eltpos = i;
6278
6279 if (vector)
6280 {
6281 /* Vector CONSTRUCTORs should only be built from smaller
6282 vectors in the case of BLKmode vectors. */
6283 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6284 RTVEC_ELT (vector, eltpos)
6285 = expand_normal (value);
6286 }
6287 else
6288 {
6289 enum machine_mode value_mode =
6290 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6291 ? TYPE_MODE (TREE_TYPE (value))
6292 : eltmode;
6293 bitpos = eltpos * elt_size;
6294 store_constructor_field (target, bitsize, bitpos, value_mode,
6295 value, cleared, alias);
6296 }
6297 }
6298
6299 if (vector)
6300 emit_insn (GEN_FCN (icode)
6301 (target,
6302 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6303 break;
6304 }
6305
6306 default:
6307 gcc_unreachable ();
6308 }
6309 }
6310
6311 /* Store the value of EXP (an expression tree)
6312 into a subfield of TARGET which has mode MODE and occupies
6313 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6314 If MODE is VOIDmode, it means that we are storing into a bit-field.
6315
6316 BITREGION_START is bitpos of the first bitfield in this region.
6317 BITREGION_END is the bitpos of the ending bitfield in this region.
6318 These two fields are 0, if the C++ memory model does not apply,
6319 or we are not interested in keeping track of bitfield regions.
6320
6321 Always return const0_rtx unless we have something particular to
6322 return.
6323
6324 ALIAS_SET is the alias set for the destination. This value will
6325 (in general) be different from that for TARGET, since TARGET is a
6326 reference to the containing structure.
6327
6328 If NONTEMPORAL is true, try generating a nontemporal store. */
6329
6330 static rtx
6331 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6332 unsigned HOST_WIDE_INT bitregion_start,
6333 unsigned HOST_WIDE_INT bitregion_end,
6334 enum machine_mode mode, tree exp,
6335 alias_set_type alias_set, bool nontemporal)
6336 {
6337 if (TREE_CODE (exp) == ERROR_MARK)
6338 return const0_rtx;
6339
6340 /* If we have nothing to store, do nothing unless the expression has
6341 side-effects. */
6342 if (bitsize == 0)
6343 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6344
6345 if (GET_CODE (target) == CONCAT)
6346 {
6347 /* We're storing into a struct containing a single __complex. */
6348
6349 gcc_assert (!bitpos);
6350 return store_expr (exp, target, 0, nontemporal);
6351 }
6352
6353 /* If the structure is in a register or if the component
6354 is a bit field, we cannot use addressing to access it.
6355 Use bit-field techniques or SUBREG to store in it. */
6356
6357 if (mode == VOIDmode
6358 || (mode != BLKmode && ! direct_store[(int) mode]
6359 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6360 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6361 || REG_P (target)
6362 || GET_CODE (target) == SUBREG
6363 /* If the field isn't aligned enough to store as an ordinary memref,
6364 store it as a bit field. */
6365 || (mode != BLKmode
6366 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6367 || bitpos % GET_MODE_ALIGNMENT (mode))
6368 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6369 || (bitpos % BITS_PER_UNIT != 0)))
6370 || (bitsize >= 0 && mode != BLKmode
6371 && GET_MODE_BITSIZE (mode) > bitsize)
6372 /* If the RHS and field are a constant size and the size of the
6373 RHS isn't the same size as the bitfield, we must use bitfield
6374 operations. */
6375 || (bitsize >= 0
6376 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6377 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6378 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6379 decl we must use bitfield operations. */
6380 || (bitsize >= 0
6381 && TREE_CODE (exp) == MEM_REF
6382 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6383 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6384 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6385 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6386 {
6387 rtx temp;
6388 gimple nop_def;
6389
6390 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6391 implies a mask operation. If the precision is the same size as
6392 the field we're storing into, that mask is redundant. This is
6393 particularly common with bit field assignments generated by the
6394 C front end. */
6395 nop_def = get_def_for_expr (exp, NOP_EXPR);
6396 if (nop_def)
6397 {
6398 tree type = TREE_TYPE (exp);
6399 if (INTEGRAL_TYPE_P (type)
6400 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6401 && bitsize == TYPE_PRECISION (type))
6402 {
6403 tree op = gimple_assign_rhs1 (nop_def);
6404 type = TREE_TYPE (op);
6405 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6406 exp = op;
6407 }
6408 }
6409
6410 temp = expand_normal (exp);
6411
6412 /* If BITSIZE is narrower than the size of the type of EXP
6413 we will be narrowing TEMP. Normally, what's wanted are the
6414 low-order bits. However, if EXP's type is a record and this is
6415 big-endian machine, we want the upper BITSIZE bits. */
6416 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6417 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6418 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6419 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6420 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6421 NULL_RTX, 1);
6422
6423 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6424 if (mode != VOIDmode && mode != BLKmode
6425 && mode != TYPE_MODE (TREE_TYPE (exp)))
6426 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6427
6428 /* If the modes of TEMP and TARGET are both BLKmode, both
6429 must be in memory and BITPOS must be aligned on a byte
6430 boundary. If so, we simply do a block copy. Likewise
6431 for a BLKmode-like TARGET. */
6432 if (GET_MODE (temp) == BLKmode
6433 && (GET_MODE (target) == BLKmode
6434 || (MEM_P (target)
6435 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6436 && (bitpos % BITS_PER_UNIT) == 0
6437 && (bitsize % BITS_PER_UNIT) == 0)))
6438 {
6439 gcc_assert (MEM_P (target) && MEM_P (temp)
6440 && (bitpos % BITS_PER_UNIT) == 0);
6441
6442 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6443 emit_block_move (target, temp,
6444 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6445 / BITS_PER_UNIT),
6446 BLOCK_OP_NORMAL);
6447
6448 return const0_rtx;
6449 }
6450
6451 /* Handle calls that return values in multiple non-contiguous locations.
6452 The Irix 6 ABI has examples of this. */
6453 if (GET_CODE (temp) == PARALLEL)
6454 {
6455 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6456 rtx temp_target;
6457 if (mode == BLKmode)
6458 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6459 temp_target = gen_reg_rtx (mode);
6460 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6461 temp = temp_target;
6462 }
6463 else if (mode == BLKmode)
6464 {
6465 /* Handle calls that return BLKmode values in registers. */
6466 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6467 {
6468 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6469 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6470 temp = temp_target;
6471 }
6472 else
6473 {
6474 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6475 rtx temp_target;
6476 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6477 temp_target = gen_reg_rtx (mode);
6478 temp_target
6479 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6480 temp_target, mode, mode);
6481 temp = temp_target;
6482 }
6483 }
6484
6485 /* Store the value in the bitfield. */
6486 store_bit_field (target, bitsize, bitpos,
6487 bitregion_start, bitregion_end,
6488 mode, temp);
6489
6490 return const0_rtx;
6491 }
6492 else
6493 {
6494 /* Now build a reference to just the desired component. */
6495 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6496
6497 if (to_rtx == target)
6498 to_rtx = copy_rtx (to_rtx);
6499
6500 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6501 set_mem_alias_set (to_rtx, alias_set);
6502
6503 return store_expr (exp, to_rtx, 0, nontemporal);
6504 }
6505 }
6506 \f
6507 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6508 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6509 codes and find the ultimate containing object, which we return.
6510
6511 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6512 bit position, and *PUNSIGNEDP to the signedness of the field.
6513 If the position of the field is variable, we store a tree
6514 giving the variable offset (in units) in *POFFSET.
6515 This offset is in addition to the bit position.
6516 If the position is not variable, we store 0 in *POFFSET.
6517
6518 If any of the extraction expressions is volatile,
6519 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6520
6521 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6522 Otherwise, it is a mode that can be used to access the field.
6523
6524 If the field describes a variable-sized object, *PMODE is set to
6525 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6526 this case, but the address of the object can be found.
6527
6528 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6529 look through nodes that serve as markers of a greater alignment than
6530 the one that can be deduced from the expression. These nodes make it
6531 possible for front-ends to prevent temporaries from being created by
6532 the middle-end on alignment considerations. For that purpose, the
6533 normal operating mode at high-level is to always pass FALSE so that
6534 the ultimate containing object is really returned; moreover, the
6535 associated predicate handled_component_p will always return TRUE
6536 on these nodes, thus indicating that they are essentially handled
6537 by get_inner_reference. TRUE should only be passed when the caller
6538 is scanning the expression in order to build another representation
6539 and specifically knows how to handle these nodes; as such, this is
6540 the normal operating mode in the RTL expanders. */
6541
6542 tree
6543 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6544 HOST_WIDE_INT *pbitpos, tree *poffset,
6545 enum machine_mode *pmode, int *punsignedp,
6546 int *pvolatilep, bool keep_aligning)
6547 {
6548 tree size_tree = 0;
6549 enum machine_mode mode = VOIDmode;
6550 bool blkmode_bitfield = false;
6551 tree offset = size_zero_node;
6552 double_int bit_offset = double_int_zero;
6553
6554 /* First get the mode, signedness, and size. We do this from just the
6555 outermost expression. */
6556 *pbitsize = -1;
6557 if (TREE_CODE (exp) == COMPONENT_REF)
6558 {
6559 tree field = TREE_OPERAND (exp, 1);
6560 size_tree = DECL_SIZE (field);
6561 if (flag_strict_volatile_bitfields > 0
6562 && TREE_THIS_VOLATILE (exp)
6563 && DECL_BIT_FIELD_TYPE (field)
6564 && DECL_MODE (field) != BLKmode)
6565 /* Volatile bitfields should be accessed in the mode of the
6566 field's type, not the mode computed based on the bit
6567 size. */
6568 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6569 else if (!DECL_BIT_FIELD (field))
6570 mode = DECL_MODE (field);
6571 else if (DECL_MODE (field) == BLKmode)
6572 blkmode_bitfield = true;
6573
6574 *punsignedp = DECL_UNSIGNED (field);
6575 }
6576 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6577 {
6578 size_tree = TREE_OPERAND (exp, 1);
6579 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6580 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6581
6582 /* For vector types, with the correct size of access, use the mode of
6583 inner type. */
6584 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6585 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6586 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6587 mode = TYPE_MODE (TREE_TYPE (exp));
6588 }
6589 else
6590 {
6591 mode = TYPE_MODE (TREE_TYPE (exp));
6592 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6593
6594 if (mode == BLKmode)
6595 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6596 else
6597 *pbitsize = GET_MODE_BITSIZE (mode);
6598 }
6599
6600 if (size_tree != 0)
6601 {
6602 if (! host_integerp (size_tree, 1))
6603 mode = BLKmode, *pbitsize = -1;
6604 else
6605 *pbitsize = tree_low_cst (size_tree, 1);
6606 }
6607
6608 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6609 and find the ultimate containing object. */
6610 while (1)
6611 {
6612 switch (TREE_CODE (exp))
6613 {
6614 case BIT_FIELD_REF:
6615 bit_offset += tree_to_double_int (TREE_OPERAND (exp, 2));
6616 break;
6617
6618 case COMPONENT_REF:
6619 {
6620 tree field = TREE_OPERAND (exp, 1);
6621 tree this_offset = component_ref_field_offset (exp);
6622
6623 /* If this field hasn't been filled in yet, don't go past it.
6624 This should only happen when folding expressions made during
6625 type construction. */
6626 if (this_offset == 0)
6627 break;
6628
6629 offset = size_binop (PLUS_EXPR, offset, this_offset);
6630 bit_offset += tree_to_double_int (DECL_FIELD_BIT_OFFSET (field));
6631
6632 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6633 }
6634 break;
6635
6636 case ARRAY_REF:
6637 case ARRAY_RANGE_REF:
6638 {
6639 tree index = TREE_OPERAND (exp, 1);
6640 tree low_bound = array_ref_low_bound (exp);
6641 tree unit_size = array_ref_element_size (exp);
6642
6643 /* We assume all arrays have sizes that are a multiple of a byte.
6644 First subtract the lower bound, if any, in the type of the
6645 index, then convert to sizetype and multiply by the size of
6646 the array element. */
6647 if (! integer_zerop (low_bound))
6648 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6649 index, low_bound);
6650
6651 offset = size_binop (PLUS_EXPR, offset,
6652 size_binop (MULT_EXPR,
6653 fold_convert (sizetype, index),
6654 unit_size));
6655 }
6656 break;
6657
6658 case REALPART_EXPR:
6659 break;
6660
6661 case IMAGPART_EXPR:
6662 bit_offset += double_int::from_uhwi (*pbitsize);
6663 break;
6664
6665 case VIEW_CONVERT_EXPR:
6666 if (keep_aligning && STRICT_ALIGNMENT
6667 && (TYPE_ALIGN (TREE_TYPE (exp))
6668 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6669 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6670 < BIGGEST_ALIGNMENT)
6671 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6672 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6673 goto done;
6674 break;
6675
6676 case MEM_REF:
6677 /* Hand back the decl for MEM[&decl, off]. */
6678 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6679 {
6680 tree off = TREE_OPERAND (exp, 1);
6681 if (!integer_zerop (off))
6682 {
6683 double_int boff, coff = mem_ref_offset (exp);
6684 boff = coff.lshift (BITS_PER_UNIT == 8
6685 ? 3 : exact_log2 (BITS_PER_UNIT));
6686 bit_offset += boff;
6687 }
6688 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6689 }
6690 goto done;
6691
6692 default:
6693 goto done;
6694 }
6695
6696 /* If any reference in the chain is volatile, the effect is volatile. */
6697 if (TREE_THIS_VOLATILE (exp))
6698 *pvolatilep = 1;
6699
6700 exp = TREE_OPERAND (exp, 0);
6701 }
6702 done:
6703
6704 /* If OFFSET is constant, see if we can return the whole thing as a
6705 constant bit position. Make sure to handle overflow during
6706 this conversion. */
6707 if (TREE_CODE (offset) == INTEGER_CST)
6708 {
6709 double_int tem = tree_to_double_int (offset);
6710 tem = tem.sext (TYPE_PRECISION (sizetype));
6711 tem = tem.lshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT));
6712 tem += bit_offset;
6713 if (tem.fits_shwi ())
6714 {
6715 *pbitpos = tem.to_shwi ();
6716 *poffset = offset = NULL_TREE;
6717 }
6718 }
6719
6720 /* Otherwise, split it up. */
6721 if (offset)
6722 {
6723 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6724 if (bit_offset.is_negative ())
6725 {
6726 double_int mask
6727 = double_int::mask (BITS_PER_UNIT == 8
6728 ? 3 : exact_log2 (BITS_PER_UNIT));
6729 double_int tem = bit_offset.and_not (mask);
6730 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6731 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6732 bit_offset -= tem;
6733 tem = tem.arshift (BITS_PER_UNIT == 8
6734 ? 3 : exact_log2 (BITS_PER_UNIT),
6735 HOST_BITS_PER_DOUBLE_INT);
6736 offset = size_binop (PLUS_EXPR, offset,
6737 double_int_to_tree (sizetype, tem));
6738 }
6739
6740 *pbitpos = bit_offset.to_shwi ();
6741 *poffset = offset;
6742 }
6743
6744 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6745 if (mode == VOIDmode
6746 && blkmode_bitfield
6747 && (*pbitpos % BITS_PER_UNIT) == 0
6748 && (*pbitsize % BITS_PER_UNIT) == 0)
6749 *pmode = BLKmode;
6750 else
6751 *pmode = mode;
6752
6753 return exp;
6754 }
6755
6756 /* Return a tree of sizetype representing the size, in bytes, of the element
6757 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6758
6759 tree
6760 array_ref_element_size (tree exp)
6761 {
6762 tree aligned_size = TREE_OPERAND (exp, 3);
6763 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6764 location_t loc = EXPR_LOCATION (exp);
6765
6766 /* If a size was specified in the ARRAY_REF, it's the size measured
6767 in alignment units of the element type. So multiply by that value. */
6768 if (aligned_size)
6769 {
6770 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6771 sizetype from another type of the same width and signedness. */
6772 if (TREE_TYPE (aligned_size) != sizetype)
6773 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6774 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6775 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6776 }
6777
6778 /* Otherwise, take the size from that of the element type. Substitute
6779 any PLACEHOLDER_EXPR that we have. */
6780 else
6781 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6782 }
6783
6784 /* Return a tree representing the lower bound of the array mentioned in
6785 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6786
6787 tree
6788 array_ref_low_bound (tree exp)
6789 {
6790 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6791
6792 /* If a lower bound is specified in EXP, use it. */
6793 if (TREE_OPERAND (exp, 2))
6794 return TREE_OPERAND (exp, 2);
6795
6796 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6797 substituting for a PLACEHOLDER_EXPR as needed. */
6798 if (domain_type && TYPE_MIN_VALUE (domain_type))
6799 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6800
6801 /* Otherwise, return a zero of the appropriate type. */
6802 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6803 }
6804
6805 /* Returns true if REF is an array reference to an array at the end of
6806 a structure. If this is the case, the array may be allocated larger
6807 than its upper bound implies. */
6808
6809 bool
6810 array_at_struct_end_p (tree ref)
6811 {
6812 if (TREE_CODE (ref) != ARRAY_REF
6813 && TREE_CODE (ref) != ARRAY_RANGE_REF)
6814 return false;
6815
6816 while (handled_component_p (ref))
6817 {
6818 /* If the reference chain contains a component reference to a
6819 non-union type and there follows another field the reference
6820 is not at the end of a structure. */
6821 if (TREE_CODE (ref) == COMPONENT_REF
6822 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
6823 {
6824 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
6825 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
6826 nextf = DECL_CHAIN (nextf);
6827 if (nextf)
6828 return false;
6829 }
6830
6831 ref = TREE_OPERAND (ref, 0);
6832 }
6833
6834 /* If the reference is based on a declared entity, the size of the array
6835 is constrained by its given domain. */
6836 if (DECL_P (ref))
6837 return false;
6838
6839 return true;
6840 }
6841
6842 /* Return a tree representing the upper bound of the array mentioned in
6843 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6844
6845 tree
6846 array_ref_up_bound (tree exp)
6847 {
6848 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6849
6850 /* If there is a domain type and it has an upper bound, use it, substituting
6851 for a PLACEHOLDER_EXPR as needed. */
6852 if (domain_type && TYPE_MAX_VALUE (domain_type))
6853 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6854
6855 /* Otherwise fail. */
6856 return NULL_TREE;
6857 }
6858
6859 /* Return a tree representing the offset, in bytes, of the field referenced
6860 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6861
6862 tree
6863 component_ref_field_offset (tree exp)
6864 {
6865 tree aligned_offset = TREE_OPERAND (exp, 2);
6866 tree field = TREE_OPERAND (exp, 1);
6867 location_t loc = EXPR_LOCATION (exp);
6868
6869 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6870 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6871 value. */
6872 if (aligned_offset)
6873 {
6874 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6875 sizetype from another type of the same width and signedness. */
6876 if (TREE_TYPE (aligned_offset) != sizetype)
6877 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6878 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
6879 size_int (DECL_OFFSET_ALIGN (field)
6880 / BITS_PER_UNIT));
6881 }
6882
6883 /* Otherwise, take the offset from that of the field. Substitute
6884 any PLACEHOLDER_EXPR that we have. */
6885 else
6886 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6887 }
6888
6889 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6890
6891 static unsigned HOST_WIDE_INT
6892 target_align (const_tree target)
6893 {
6894 /* We might have a chain of nested references with intermediate misaligning
6895 bitfields components, so need to recurse to find out. */
6896
6897 unsigned HOST_WIDE_INT this_align, outer_align;
6898
6899 switch (TREE_CODE (target))
6900 {
6901 case BIT_FIELD_REF:
6902 return 1;
6903
6904 case COMPONENT_REF:
6905 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
6906 outer_align = target_align (TREE_OPERAND (target, 0));
6907 return MIN (this_align, outer_align);
6908
6909 case ARRAY_REF:
6910 case ARRAY_RANGE_REF:
6911 this_align = TYPE_ALIGN (TREE_TYPE (target));
6912 outer_align = target_align (TREE_OPERAND (target, 0));
6913 return MIN (this_align, outer_align);
6914
6915 CASE_CONVERT:
6916 case NON_LVALUE_EXPR:
6917 case VIEW_CONVERT_EXPR:
6918 this_align = TYPE_ALIGN (TREE_TYPE (target));
6919 outer_align = target_align (TREE_OPERAND (target, 0));
6920 return MAX (this_align, outer_align);
6921
6922 default:
6923 return TYPE_ALIGN (TREE_TYPE (target));
6924 }
6925 }
6926
6927 \f
6928 /* Given an rtx VALUE that may contain additions and multiplications, return
6929 an equivalent value that just refers to a register, memory, or constant.
6930 This is done by generating instructions to perform the arithmetic and
6931 returning a pseudo-register containing the value.
6932
6933 The returned value may be a REG, SUBREG, MEM or constant. */
6934
6935 rtx
6936 force_operand (rtx value, rtx target)
6937 {
6938 rtx op1, op2;
6939 /* Use subtarget as the target for operand 0 of a binary operation. */
6940 rtx subtarget = get_subtarget (target);
6941 enum rtx_code code = GET_CODE (value);
6942
6943 /* Check for subreg applied to an expression produced by loop optimizer. */
6944 if (code == SUBREG
6945 && !REG_P (SUBREG_REG (value))
6946 && !MEM_P (SUBREG_REG (value)))
6947 {
6948 value
6949 = simplify_gen_subreg (GET_MODE (value),
6950 force_reg (GET_MODE (SUBREG_REG (value)),
6951 force_operand (SUBREG_REG (value),
6952 NULL_RTX)),
6953 GET_MODE (SUBREG_REG (value)),
6954 SUBREG_BYTE (value));
6955 code = GET_CODE (value);
6956 }
6957
6958 /* Check for a PIC address load. */
6959 if ((code == PLUS || code == MINUS)
6960 && XEXP (value, 0) == pic_offset_table_rtx
6961 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6962 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6963 || GET_CODE (XEXP (value, 1)) == CONST))
6964 {
6965 if (!subtarget)
6966 subtarget = gen_reg_rtx (GET_MODE (value));
6967 emit_move_insn (subtarget, value);
6968 return subtarget;
6969 }
6970
6971 if (ARITHMETIC_P (value))
6972 {
6973 op2 = XEXP (value, 1);
6974 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6975 subtarget = 0;
6976 if (code == MINUS && CONST_INT_P (op2))
6977 {
6978 code = PLUS;
6979 op2 = negate_rtx (GET_MODE (value), op2);
6980 }
6981
6982 /* Check for an addition with OP2 a constant integer and our first
6983 operand a PLUS of a virtual register and something else. In that
6984 case, we want to emit the sum of the virtual register and the
6985 constant first and then add the other value. This allows virtual
6986 register instantiation to simply modify the constant rather than
6987 creating another one around this addition. */
6988 if (code == PLUS && CONST_INT_P (op2)
6989 && GET_CODE (XEXP (value, 0)) == PLUS
6990 && REG_P (XEXP (XEXP (value, 0), 0))
6991 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6992 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6993 {
6994 rtx temp = expand_simple_binop (GET_MODE (value), code,
6995 XEXP (XEXP (value, 0), 0), op2,
6996 subtarget, 0, OPTAB_LIB_WIDEN);
6997 return expand_simple_binop (GET_MODE (value), code, temp,
6998 force_operand (XEXP (XEXP (value,
6999 0), 1), 0),
7000 target, 0, OPTAB_LIB_WIDEN);
7001 }
7002
7003 op1 = force_operand (XEXP (value, 0), subtarget);
7004 op2 = force_operand (op2, NULL_RTX);
7005 switch (code)
7006 {
7007 case MULT:
7008 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7009 case DIV:
7010 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7011 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7012 target, 1, OPTAB_LIB_WIDEN);
7013 else
7014 return expand_divmod (0,
7015 FLOAT_MODE_P (GET_MODE (value))
7016 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7017 GET_MODE (value), op1, op2, target, 0);
7018 case MOD:
7019 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7020 target, 0);
7021 case UDIV:
7022 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7023 target, 1);
7024 case UMOD:
7025 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7026 target, 1);
7027 case ASHIFTRT:
7028 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7029 target, 0, OPTAB_LIB_WIDEN);
7030 default:
7031 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7032 target, 1, OPTAB_LIB_WIDEN);
7033 }
7034 }
7035 if (UNARY_P (value))
7036 {
7037 if (!target)
7038 target = gen_reg_rtx (GET_MODE (value));
7039 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7040 switch (code)
7041 {
7042 case ZERO_EXTEND:
7043 case SIGN_EXTEND:
7044 case TRUNCATE:
7045 case FLOAT_EXTEND:
7046 case FLOAT_TRUNCATE:
7047 convert_move (target, op1, code == ZERO_EXTEND);
7048 return target;
7049
7050 case FIX:
7051 case UNSIGNED_FIX:
7052 expand_fix (target, op1, code == UNSIGNED_FIX);
7053 return target;
7054
7055 case FLOAT:
7056 case UNSIGNED_FLOAT:
7057 expand_float (target, op1, code == UNSIGNED_FLOAT);
7058 return target;
7059
7060 default:
7061 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7062 }
7063 }
7064
7065 #ifdef INSN_SCHEDULING
7066 /* On machines that have insn scheduling, we want all memory reference to be
7067 explicit, so we need to deal with such paradoxical SUBREGs. */
7068 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7069 value
7070 = simplify_gen_subreg (GET_MODE (value),
7071 force_reg (GET_MODE (SUBREG_REG (value)),
7072 force_operand (SUBREG_REG (value),
7073 NULL_RTX)),
7074 GET_MODE (SUBREG_REG (value)),
7075 SUBREG_BYTE (value));
7076 #endif
7077
7078 return value;
7079 }
7080 \f
7081 /* Subroutine of expand_expr: return nonzero iff there is no way that
7082 EXP can reference X, which is being modified. TOP_P is nonzero if this
7083 call is going to be used to determine whether we need a temporary
7084 for EXP, as opposed to a recursive call to this function.
7085
7086 It is always safe for this routine to return zero since it merely
7087 searches for optimization opportunities. */
7088
7089 int
7090 safe_from_p (const_rtx x, tree exp, int top_p)
7091 {
7092 rtx exp_rtl = 0;
7093 int i, nops;
7094
7095 if (x == 0
7096 /* If EXP has varying size, we MUST use a target since we currently
7097 have no way of allocating temporaries of variable size
7098 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7099 So we assume here that something at a higher level has prevented a
7100 clash. This is somewhat bogus, but the best we can do. Only
7101 do this when X is BLKmode and when we are at the top level. */
7102 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7103 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7104 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7105 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7106 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7107 != INTEGER_CST)
7108 && GET_MODE (x) == BLKmode)
7109 /* If X is in the outgoing argument area, it is always safe. */
7110 || (MEM_P (x)
7111 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7112 || (GET_CODE (XEXP (x, 0)) == PLUS
7113 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7114 return 1;
7115
7116 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7117 find the underlying pseudo. */
7118 if (GET_CODE (x) == SUBREG)
7119 {
7120 x = SUBREG_REG (x);
7121 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7122 return 0;
7123 }
7124
7125 /* Now look at our tree code and possibly recurse. */
7126 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7127 {
7128 case tcc_declaration:
7129 exp_rtl = DECL_RTL_IF_SET (exp);
7130 break;
7131
7132 case tcc_constant:
7133 return 1;
7134
7135 case tcc_exceptional:
7136 if (TREE_CODE (exp) == TREE_LIST)
7137 {
7138 while (1)
7139 {
7140 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7141 return 0;
7142 exp = TREE_CHAIN (exp);
7143 if (!exp)
7144 return 1;
7145 if (TREE_CODE (exp) != TREE_LIST)
7146 return safe_from_p (x, exp, 0);
7147 }
7148 }
7149 else if (TREE_CODE (exp) == CONSTRUCTOR)
7150 {
7151 constructor_elt *ce;
7152 unsigned HOST_WIDE_INT idx;
7153
7154 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7155 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7156 || !safe_from_p (x, ce->value, 0))
7157 return 0;
7158 return 1;
7159 }
7160 else if (TREE_CODE (exp) == ERROR_MARK)
7161 return 1; /* An already-visited SAVE_EXPR? */
7162 else
7163 return 0;
7164
7165 case tcc_statement:
7166 /* The only case we look at here is the DECL_INITIAL inside a
7167 DECL_EXPR. */
7168 return (TREE_CODE (exp) != DECL_EXPR
7169 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7170 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7171 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7172
7173 case tcc_binary:
7174 case tcc_comparison:
7175 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7176 return 0;
7177 /* Fall through. */
7178
7179 case tcc_unary:
7180 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7181
7182 case tcc_expression:
7183 case tcc_reference:
7184 case tcc_vl_exp:
7185 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7186 the expression. If it is set, we conflict iff we are that rtx or
7187 both are in memory. Otherwise, we check all operands of the
7188 expression recursively. */
7189
7190 switch (TREE_CODE (exp))
7191 {
7192 case ADDR_EXPR:
7193 /* If the operand is static or we are static, we can't conflict.
7194 Likewise if we don't conflict with the operand at all. */
7195 if (staticp (TREE_OPERAND (exp, 0))
7196 || TREE_STATIC (exp)
7197 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7198 return 1;
7199
7200 /* Otherwise, the only way this can conflict is if we are taking
7201 the address of a DECL a that address if part of X, which is
7202 very rare. */
7203 exp = TREE_OPERAND (exp, 0);
7204 if (DECL_P (exp))
7205 {
7206 if (!DECL_RTL_SET_P (exp)
7207 || !MEM_P (DECL_RTL (exp)))
7208 return 0;
7209 else
7210 exp_rtl = XEXP (DECL_RTL (exp), 0);
7211 }
7212 break;
7213
7214 case MEM_REF:
7215 if (MEM_P (x)
7216 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7217 get_alias_set (exp)))
7218 return 0;
7219 break;
7220
7221 case CALL_EXPR:
7222 /* Assume that the call will clobber all hard registers and
7223 all of memory. */
7224 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7225 || MEM_P (x))
7226 return 0;
7227 break;
7228
7229 case WITH_CLEANUP_EXPR:
7230 case CLEANUP_POINT_EXPR:
7231 /* Lowered by gimplify.c. */
7232 gcc_unreachable ();
7233
7234 case SAVE_EXPR:
7235 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7236
7237 default:
7238 break;
7239 }
7240
7241 /* If we have an rtx, we do not need to scan our operands. */
7242 if (exp_rtl)
7243 break;
7244
7245 nops = TREE_OPERAND_LENGTH (exp);
7246 for (i = 0; i < nops; i++)
7247 if (TREE_OPERAND (exp, i) != 0
7248 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7249 return 0;
7250
7251 break;
7252
7253 case tcc_type:
7254 /* Should never get a type here. */
7255 gcc_unreachable ();
7256 }
7257
7258 /* If we have an rtl, find any enclosed object. Then see if we conflict
7259 with it. */
7260 if (exp_rtl)
7261 {
7262 if (GET_CODE (exp_rtl) == SUBREG)
7263 {
7264 exp_rtl = SUBREG_REG (exp_rtl);
7265 if (REG_P (exp_rtl)
7266 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7267 return 0;
7268 }
7269
7270 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7271 are memory and they conflict. */
7272 return ! (rtx_equal_p (x, exp_rtl)
7273 || (MEM_P (x) && MEM_P (exp_rtl)
7274 && true_dependence (exp_rtl, VOIDmode, x)));
7275 }
7276
7277 /* If we reach here, it is safe. */
7278 return 1;
7279 }
7280
7281 \f
7282 /* Return the highest power of two that EXP is known to be a multiple of.
7283 This is used in updating alignment of MEMs in array references. */
7284
7285 unsigned HOST_WIDE_INT
7286 highest_pow2_factor (const_tree exp)
7287 {
7288 unsigned HOST_WIDE_INT ret;
7289 int trailing_zeros = tree_ctz (exp);
7290 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7291 return BIGGEST_ALIGNMENT;
7292 ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7293 if (ret > BIGGEST_ALIGNMENT)
7294 return BIGGEST_ALIGNMENT;
7295 return ret;
7296 }
7297
7298 /* Similar, except that the alignment requirements of TARGET are
7299 taken into account. Assume it is at least as aligned as its
7300 type, unless it is a COMPONENT_REF in which case the layout of
7301 the structure gives the alignment. */
7302
7303 static unsigned HOST_WIDE_INT
7304 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7305 {
7306 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7307 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7308
7309 return MAX (factor, talign);
7310 }
7311 \f
7312 #ifdef HAVE_conditional_move
7313 /* Convert the tree comparison code TCODE to the rtl one where the
7314 signedness is UNSIGNEDP. */
7315
7316 static enum rtx_code
7317 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7318 {
7319 enum rtx_code code;
7320 switch (tcode)
7321 {
7322 case EQ_EXPR:
7323 code = EQ;
7324 break;
7325 case NE_EXPR:
7326 code = NE;
7327 break;
7328 case LT_EXPR:
7329 code = unsignedp ? LTU : LT;
7330 break;
7331 case LE_EXPR:
7332 code = unsignedp ? LEU : LE;
7333 break;
7334 case GT_EXPR:
7335 code = unsignedp ? GTU : GT;
7336 break;
7337 case GE_EXPR:
7338 code = unsignedp ? GEU : GE;
7339 break;
7340 case UNORDERED_EXPR:
7341 code = UNORDERED;
7342 break;
7343 case ORDERED_EXPR:
7344 code = ORDERED;
7345 break;
7346 case UNLT_EXPR:
7347 code = UNLT;
7348 break;
7349 case UNLE_EXPR:
7350 code = UNLE;
7351 break;
7352 case UNGT_EXPR:
7353 code = UNGT;
7354 break;
7355 case UNGE_EXPR:
7356 code = UNGE;
7357 break;
7358 case UNEQ_EXPR:
7359 code = UNEQ;
7360 break;
7361 case LTGT_EXPR:
7362 code = LTGT;
7363 break;
7364
7365 default:
7366 gcc_unreachable ();
7367 }
7368 return code;
7369 }
7370 #endif
7371
7372 /* Subroutine of expand_expr. Expand the two operands of a binary
7373 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7374 The value may be stored in TARGET if TARGET is nonzero. The
7375 MODIFIER argument is as documented by expand_expr. */
7376
7377 static void
7378 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7379 enum expand_modifier modifier)
7380 {
7381 if (! safe_from_p (target, exp1, 1))
7382 target = 0;
7383 if (operand_equal_p (exp0, exp1, 0))
7384 {
7385 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7386 *op1 = copy_rtx (*op0);
7387 }
7388 else
7389 {
7390 /* If we need to preserve evaluation order, copy exp0 into its own
7391 temporary variable so that it can't be clobbered by exp1. */
7392 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7393 exp0 = save_expr (exp0);
7394 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7395 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7396 }
7397 }
7398
7399 \f
7400 /* Return a MEM that contains constant EXP. DEFER is as for
7401 output_constant_def and MODIFIER is as for expand_expr. */
7402
7403 static rtx
7404 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7405 {
7406 rtx mem;
7407
7408 mem = output_constant_def (exp, defer);
7409 if (modifier != EXPAND_INITIALIZER)
7410 mem = use_anchored_address (mem);
7411 return mem;
7412 }
7413
7414 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7415 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7416
7417 static rtx
7418 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
7419 enum expand_modifier modifier, addr_space_t as)
7420 {
7421 rtx result, subtarget;
7422 tree inner, offset;
7423 HOST_WIDE_INT bitsize, bitpos;
7424 int volatilep, unsignedp;
7425 enum machine_mode mode1;
7426
7427 /* If we are taking the address of a constant and are at the top level,
7428 we have to use output_constant_def since we can't call force_const_mem
7429 at top level. */
7430 /* ??? This should be considered a front-end bug. We should not be
7431 generating ADDR_EXPR of something that isn't an LVALUE. The only
7432 exception here is STRING_CST. */
7433 if (CONSTANT_CLASS_P (exp))
7434 {
7435 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7436 if (modifier < EXPAND_SUM)
7437 result = force_operand (result, target);
7438 return result;
7439 }
7440
7441 /* Everything must be something allowed by is_gimple_addressable. */
7442 switch (TREE_CODE (exp))
7443 {
7444 case INDIRECT_REF:
7445 /* This case will happen via recursion for &a->b. */
7446 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7447
7448 case MEM_REF:
7449 {
7450 tree tem = TREE_OPERAND (exp, 0);
7451 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7452 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7453 return expand_expr (tem, target, tmode, modifier);
7454 }
7455
7456 case CONST_DECL:
7457 /* Expand the initializer like constants above. */
7458 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7459 0, modifier), 0);
7460 if (modifier < EXPAND_SUM)
7461 result = force_operand (result, target);
7462 return result;
7463
7464 case REALPART_EXPR:
7465 /* The real part of the complex number is always first, therefore
7466 the address is the same as the address of the parent object. */
7467 offset = 0;
7468 bitpos = 0;
7469 inner = TREE_OPERAND (exp, 0);
7470 break;
7471
7472 case IMAGPART_EXPR:
7473 /* The imaginary part of the complex number is always second.
7474 The expression is therefore always offset by the size of the
7475 scalar type. */
7476 offset = 0;
7477 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7478 inner = TREE_OPERAND (exp, 0);
7479 break;
7480
7481 case COMPOUND_LITERAL_EXPR:
7482 /* Allow COMPOUND_LITERAL_EXPR in initializers, if e.g.
7483 rtl_for_decl_init is called on DECL_INITIAL with
7484 COMPOUNT_LITERAL_EXPRs in it, they aren't gimplified. */
7485 if (modifier == EXPAND_INITIALIZER
7486 && COMPOUND_LITERAL_EXPR_DECL (exp))
7487 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7488 target, tmode, modifier, as);
7489 /* FALLTHRU */
7490 default:
7491 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7492 expand_expr, as that can have various side effects; LABEL_DECLs for
7493 example, may not have their DECL_RTL set yet. Expand the rtl of
7494 CONSTRUCTORs too, which should yield a memory reference for the
7495 constructor's contents. Assume language specific tree nodes can
7496 be expanded in some interesting way. */
7497 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7498 if (DECL_P (exp)
7499 || TREE_CODE (exp) == CONSTRUCTOR
7500 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7501 {
7502 result = expand_expr (exp, target, tmode,
7503 modifier == EXPAND_INITIALIZER
7504 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7505
7506 /* If the DECL isn't in memory, then the DECL wasn't properly
7507 marked TREE_ADDRESSABLE, which will be either a front-end
7508 or a tree optimizer bug. */
7509
7510 if (TREE_ADDRESSABLE (exp)
7511 && ! MEM_P (result)
7512 && ! targetm.calls.allocate_stack_slots_for_args ())
7513 {
7514 error ("local frame unavailable (naked function?)");
7515 return result;
7516 }
7517 else
7518 gcc_assert (MEM_P (result));
7519 result = XEXP (result, 0);
7520
7521 /* ??? Is this needed anymore? */
7522 if (DECL_P (exp))
7523 TREE_USED (exp) = 1;
7524
7525 if (modifier != EXPAND_INITIALIZER
7526 && modifier != EXPAND_CONST_ADDRESS
7527 && modifier != EXPAND_SUM)
7528 result = force_operand (result, target);
7529 return result;
7530 }
7531
7532 /* Pass FALSE as the last argument to get_inner_reference although
7533 we are expanding to RTL. The rationale is that we know how to
7534 handle "aligning nodes" here: we can just bypass them because
7535 they won't change the final object whose address will be returned
7536 (they actually exist only for that purpose). */
7537 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7538 &mode1, &unsignedp, &volatilep, false);
7539 break;
7540 }
7541
7542 /* We must have made progress. */
7543 gcc_assert (inner != exp);
7544
7545 subtarget = offset || bitpos ? NULL_RTX : target;
7546 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7547 inner alignment, force the inner to be sufficiently aligned. */
7548 if (CONSTANT_CLASS_P (inner)
7549 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7550 {
7551 inner = copy_node (inner);
7552 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7553 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7554 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7555 }
7556 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7557
7558 if (offset)
7559 {
7560 rtx tmp;
7561
7562 if (modifier != EXPAND_NORMAL)
7563 result = force_operand (result, NULL);
7564 tmp = expand_expr (offset, NULL_RTX, tmode,
7565 modifier == EXPAND_INITIALIZER
7566 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7567
7568 result = convert_memory_address_addr_space (tmode, result, as);
7569 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7570
7571 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7572 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7573 else
7574 {
7575 subtarget = bitpos ? NULL_RTX : target;
7576 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7577 1, OPTAB_LIB_WIDEN);
7578 }
7579 }
7580
7581 if (bitpos)
7582 {
7583 /* Someone beforehand should have rejected taking the address
7584 of such an object. */
7585 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7586
7587 result = convert_memory_address_addr_space (tmode, result, as);
7588 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7589 if (modifier < EXPAND_SUM)
7590 result = force_operand (result, target);
7591 }
7592
7593 return result;
7594 }
7595
7596 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7597 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7598
7599 static rtx
7600 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7601 enum expand_modifier modifier)
7602 {
7603 addr_space_t as = ADDR_SPACE_GENERIC;
7604 enum machine_mode address_mode = Pmode;
7605 enum machine_mode pointer_mode = ptr_mode;
7606 enum machine_mode rmode;
7607 rtx result;
7608
7609 /* Target mode of VOIDmode says "whatever's natural". */
7610 if (tmode == VOIDmode)
7611 tmode = TYPE_MODE (TREE_TYPE (exp));
7612
7613 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7614 {
7615 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7616 address_mode = targetm.addr_space.address_mode (as);
7617 pointer_mode = targetm.addr_space.pointer_mode (as);
7618 }
7619
7620 /* We can get called with some Weird Things if the user does silliness
7621 like "(short) &a". In that case, convert_memory_address won't do
7622 the right thing, so ignore the given target mode. */
7623 if (tmode != address_mode && tmode != pointer_mode)
7624 tmode = address_mode;
7625
7626 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7627 tmode, modifier, as);
7628
7629 /* Despite expand_expr claims concerning ignoring TMODE when not
7630 strictly convenient, stuff breaks if we don't honor it. Note
7631 that combined with the above, we only do this for pointer modes. */
7632 rmode = GET_MODE (result);
7633 if (rmode == VOIDmode)
7634 rmode = tmode;
7635 if (rmode != tmode)
7636 result = convert_memory_address_addr_space (tmode, result, as);
7637
7638 return result;
7639 }
7640
7641 /* Generate code for computing CONSTRUCTOR EXP.
7642 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7643 is TRUE, instead of creating a temporary variable in memory
7644 NULL is returned and the caller needs to handle it differently. */
7645
7646 static rtx
7647 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7648 bool avoid_temp_mem)
7649 {
7650 tree type = TREE_TYPE (exp);
7651 enum machine_mode mode = TYPE_MODE (type);
7652
7653 /* Try to avoid creating a temporary at all. This is possible
7654 if all of the initializer is zero.
7655 FIXME: try to handle all [0..255] initializers we can handle
7656 with memset. */
7657 if (TREE_STATIC (exp)
7658 && !TREE_ADDRESSABLE (exp)
7659 && target != 0 && mode == BLKmode
7660 && all_zeros_p (exp))
7661 {
7662 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7663 return target;
7664 }
7665
7666 /* All elts simple constants => refer to a constant in memory. But
7667 if this is a non-BLKmode mode, let it store a field at a time
7668 since that should make a CONST_INT or CONST_DOUBLE when we
7669 fold. Likewise, if we have a target we can use, it is best to
7670 store directly into the target unless the type is large enough
7671 that memcpy will be used. If we are making an initializer and
7672 all operands are constant, put it in memory as well.
7673
7674 FIXME: Avoid trying to fill vector constructors piece-meal.
7675 Output them with output_constant_def below unless we're sure
7676 they're zeros. This should go away when vector initializers
7677 are treated like VECTOR_CST instead of arrays. */
7678 if ((TREE_STATIC (exp)
7679 && ((mode == BLKmode
7680 && ! (target != 0 && safe_from_p (target, exp, 1)))
7681 || TREE_ADDRESSABLE (exp)
7682 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7683 && (! MOVE_BY_PIECES_P
7684 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7685 TYPE_ALIGN (type)))
7686 && ! mostly_zeros_p (exp))))
7687 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7688 && TREE_CONSTANT (exp)))
7689 {
7690 rtx constructor;
7691
7692 if (avoid_temp_mem)
7693 return NULL_RTX;
7694
7695 constructor = expand_expr_constant (exp, 1, modifier);
7696
7697 if (modifier != EXPAND_CONST_ADDRESS
7698 && modifier != EXPAND_INITIALIZER
7699 && modifier != EXPAND_SUM)
7700 constructor = validize_mem (constructor);
7701
7702 return constructor;
7703 }
7704
7705 /* Handle calls that pass values in multiple non-contiguous
7706 locations. The Irix 6 ABI has examples of this. */
7707 if (target == 0 || ! safe_from_p (target, exp, 1)
7708 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7709 {
7710 if (avoid_temp_mem)
7711 return NULL_RTX;
7712
7713 target
7714 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7715 | (TREE_READONLY (exp)
7716 * TYPE_QUAL_CONST))),
7717 TREE_ADDRESSABLE (exp), 1);
7718 }
7719
7720 store_constructor (exp, target, 0, int_expr_size (exp));
7721 return target;
7722 }
7723
7724
7725 /* expand_expr: generate code for computing expression EXP.
7726 An rtx for the computed value is returned. The value is never null.
7727 In the case of a void EXP, const0_rtx is returned.
7728
7729 The value may be stored in TARGET if TARGET is nonzero.
7730 TARGET is just a suggestion; callers must assume that
7731 the rtx returned may not be the same as TARGET.
7732
7733 If TARGET is CONST0_RTX, it means that the value will be ignored.
7734
7735 If TMODE is not VOIDmode, it suggests generating the
7736 result in mode TMODE. But this is done only when convenient.
7737 Otherwise, TMODE is ignored and the value generated in its natural mode.
7738 TMODE is just a suggestion; callers must assume that
7739 the rtx returned may not have mode TMODE.
7740
7741 Note that TARGET may have neither TMODE nor MODE. In that case, it
7742 probably will not be used.
7743
7744 If MODIFIER is EXPAND_SUM then when EXP is an addition
7745 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7746 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7747 products as above, or REG or MEM, or constant.
7748 Ordinarily in such cases we would output mul or add instructions
7749 and then return a pseudo reg containing the sum.
7750
7751 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7752 it also marks a label as absolutely required (it can't be dead).
7753 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7754 This is used for outputting expressions used in initializers.
7755
7756 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7757 with a constant address even if that address is not normally legitimate.
7758 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7759
7760 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7761 a call parameter. Such targets require special care as we haven't yet
7762 marked TARGET so that it's safe from being trashed by libcalls. We
7763 don't want to use TARGET for anything but the final result;
7764 Intermediate values must go elsewhere. Additionally, calls to
7765 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7766
7767 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7768 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7769 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7770 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7771 recursively. */
7772
7773 rtx
7774 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7775 enum expand_modifier modifier, rtx *alt_rtl)
7776 {
7777 rtx ret;
7778
7779 /* Handle ERROR_MARK before anybody tries to access its type. */
7780 if (TREE_CODE (exp) == ERROR_MARK
7781 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7782 {
7783 ret = CONST0_RTX (tmode);
7784 return ret ? ret : const0_rtx;
7785 }
7786
7787 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7788 return ret;
7789 }
7790
7791 /* Try to expand the conditional expression which is represented by
7792 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
7793 return the rtl reg which repsents the result. Otherwise return
7794 NULL_RTL. */
7795
7796 static rtx
7797 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7798 tree treeop1 ATTRIBUTE_UNUSED,
7799 tree treeop2 ATTRIBUTE_UNUSED)
7800 {
7801 #ifdef HAVE_conditional_move
7802 rtx insn;
7803 rtx op00, op01, op1, op2;
7804 enum rtx_code comparison_code;
7805 enum machine_mode comparison_mode;
7806 gimple srcstmt;
7807 rtx temp;
7808 tree type = TREE_TYPE (treeop1);
7809 int unsignedp = TYPE_UNSIGNED (type);
7810 enum machine_mode mode = TYPE_MODE (type);
7811 enum machine_mode orig_mode = mode;
7812
7813 /* If we cannot do a conditional move on the mode, try doing it
7814 with the promoted mode. */
7815 if (!can_conditionally_move_p (mode))
7816 {
7817 mode = promote_mode (type, mode, &unsignedp);
7818 if (!can_conditionally_move_p (mode))
7819 return NULL_RTX;
7820 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
7821 }
7822 else
7823 temp = assign_temp (type, 0, 1);
7824
7825 start_sequence ();
7826 expand_operands (treeop1, treeop2,
7827 temp, &op1, &op2, EXPAND_NORMAL);
7828
7829 if (TREE_CODE (treeop0) == SSA_NAME
7830 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
7831 {
7832 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
7833 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
7834 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
7835 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
7836 comparison_mode = TYPE_MODE (type);
7837 unsignedp = TYPE_UNSIGNED (type);
7838 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7839 }
7840 else if (TREE_CODE_CLASS (TREE_CODE (treeop0)) == tcc_comparison)
7841 {
7842 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
7843 enum tree_code cmpcode = TREE_CODE (treeop0);
7844 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
7845 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
7846 unsignedp = TYPE_UNSIGNED (type);
7847 comparison_mode = TYPE_MODE (type);
7848 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7849 }
7850 else
7851 {
7852 op00 = expand_normal (treeop0);
7853 op01 = const0_rtx;
7854 comparison_code = NE;
7855 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
7856 }
7857
7858 if (GET_MODE (op1) != mode)
7859 op1 = gen_lowpart (mode, op1);
7860
7861 if (GET_MODE (op2) != mode)
7862 op2 = gen_lowpart (mode, op2);
7863
7864 /* Try to emit the conditional move. */
7865 insn = emit_conditional_move (temp, comparison_code,
7866 op00, op01, comparison_mode,
7867 op1, op2, mode,
7868 unsignedp);
7869
7870 /* If we could do the conditional move, emit the sequence,
7871 and return. */
7872 if (insn)
7873 {
7874 rtx seq = get_insns ();
7875 end_sequence ();
7876 emit_insn (seq);
7877 return convert_modes (orig_mode, mode, temp, 0);
7878 }
7879
7880 /* Otherwise discard the sequence and fall back to code with
7881 branches. */
7882 end_sequence ();
7883 #endif
7884 return NULL_RTX;
7885 }
7886
7887 rtx
7888 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
7889 enum expand_modifier modifier)
7890 {
7891 rtx op0, op1, op2, temp;
7892 tree type;
7893 int unsignedp;
7894 enum machine_mode mode;
7895 enum tree_code code = ops->code;
7896 optab this_optab;
7897 rtx subtarget, original_target;
7898 int ignore;
7899 bool reduce_bit_field;
7900 location_t loc = ops->location;
7901 tree treeop0, treeop1, treeop2;
7902 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7903 ? reduce_to_bit_field_precision ((expr), \
7904 target, \
7905 type) \
7906 : (expr))
7907
7908 type = ops->type;
7909 mode = TYPE_MODE (type);
7910 unsignedp = TYPE_UNSIGNED (type);
7911
7912 treeop0 = ops->op0;
7913 treeop1 = ops->op1;
7914 treeop2 = ops->op2;
7915
7916 /* We should be called only on simple (binary or unary) expressions,
7917 exactly those that are valid in gimple expressions that aren't
7918 GIMPLE_SINGLE_RHS (or invalid). */
7919 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
7920 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
7921 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
7922
7923 ignore = (target == const0_rtx
7924 || ((CONVERT_EXPR_CODE_P (code)
7925 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7926 && TREE_CODE (type) == VOID_TYPE));
7927
7928 /* We should be called only if we need the result. */
7929 gcc_assert (!ignore);
7930
7931 /* An operation in what may be a bit-field type needs the
7932 result to be reduced to the precision of the bit-field type,
7933 which is narrower than that of the type's mode. */
7934 reduce_bit_field = (INTEGRAL_TYPE_P (type)
7935 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
7936
7937 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
7938 target = 0;
7939
7940 /* Use subtarget as the target for operand 0 of a binary operation. */
7941 subtarget = get_subtarget (target);
7942 original_target = target;
7943
7944 switch (code)
7945 {
7946 case NON_LVALUE_EXPR:
7947 case PAREN_EXPR:
7948 CASE_CONVERT:
7949 if (treeop0 == error_mark_node)
7950 return const0_rtx;
7951
7952 if (TREE_CODE (type) == UNION_TYPE)
7953 {
7954 tree valtype = TREE_TYPE (treeop0);
7955
7956 /* If both input and output are BLKmode, this conversion isn't doing
7957 anything except possibly changing memory attribute. */
7958 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7959 {
7960 rtx result = expand_expr (treeop0, target, tmode,
7961 modifier);
7962
7963 result = copy_rtx (result);
7964 set_mem_attributes (result, type, 0);
7965 return result;
7966 }
7967
7968 if (target == 0)
7969 {
7970 if (TYPE_MODE (type) != BLKmode)
7971 target = gen_reg_rtx (TYPE_MODE (type));
7972 else
7973 target = assign_temp (type, 1, 1);
7974 }
7975
7976 if (MEM_P (target))
7977 /* Store data into beginning of memory target. */
7978 store_expr (treeop0,
7979 adjust_address (target, TYPE_MODE (valtype), 0),
7980 modifier == EXPAND_STACK_PARM,
7981 false);
7982
7983 else
7984 {
7985 gcc_assert (REG_P (target));
7986
7987 /* Store this field into a union of the proper type. */
7988 store_field (target,
7989 MIN ((int_size_in_bytes (TREE_TYPE
7990 (treeop0))
7991 * BITS_PER_UNIT),
7992 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7993 0, 0, 0, TYPE_MODE (valtype), treeop0, 0, false);
7994 }
7995
7996 /* Return the entire union. */
7997 return target;
7998 }
7999
8000 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8001 {
8002 op0 = expand_expr (treeop0, target, VOIDmode,
8003 modifier);
8004
8005 /* If the signedness of the conversion differs and OP0 is
8006 a promoted SUBREG, clear that indication since we now
8007 have to do the proper extension. */
8008 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8009 && GET_CODE (op0) == SUBREG)
8010 SUBREG_PROMOTED_VAR_P (op0) = 0;
8011
8012 return REDUCE_BIT_FIELD (op0);
8013 }
8014
8015 op0 = expand_expr (treeop0, NULL_RTX, mode,
8016 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8017 if (GET_MODE (op0) == mode)
8018 ;
8019
8020 /* If OP0 is a constant, just convert it into the proper mode. */
8021 else if (CONSTANT_P (op0))
8022 {
8023 tree inner_type = TREE_TYPE (treeop0);
8024 enum machine_mode inner_mode = GET_MODE (op0);
8025
8026 if (inner_mode == VOIDmode)
8027 inner_mode = TYPE_MODE (inner_type);
8028
8029 if (modifier == EXPAND_INITIALIZER)
8030 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8031 subreg_lowpart_offset (mode,
8032 inner_mode));
8033 else
8034 op0= convert_modes (mode, inner_mode, op0,
8035 TYPE_UNSIGNED (inner_type));
8036 }
8037
8038 else if (modifier == EXPAND_INITIALIZER)
8039 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8040
8041 else if (target == 0)
8042 op0 = convert_to_mode (mode, op0,
8043 TYPE_UNSIGNED (TREE_TYPE
8044 (treeop0)));
8045 else
8046 {
8047 convert_move (target, op0,
8048 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8049 op0 = target;
8050 }
8051
8052 return REDUCE_BIT_FIELD (op0);
8053
8054 case ADDR_SPACE_CONVERT_EXPR:
8055 {
8056 tree treeop0_type = TREE_TYPE (treeop0);
8057 addr_space_t as_to;
8058 addr_space_t as_from;
8059
8060 gcc_assert (POINTER_TYPE_P (type));
8061 gcc_assert (POINTER_TYPE_P (treeop0_type));
8062
8063 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8064 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8065
8066 /* Conversions between pointers to the same address space should
8067 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8068 gcc_assert (as_to != as_from);
8069
8070 /* Ask target code to handle conversion between pointers
8071 to overlapping address spaces. */
8072 if (targetm.addr_space.subset_p (as_to, as_from)
8073 || targetm.addr_space.subset_p (as_from, as_to))
8074 {
8075 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8076 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8077 gcc_assert (op0);
8078 return op0;
8079 }
8080
8081 /* For disjoint address spaces, converting anything but
8082 a null pointer invokes undefined behaviour. We simply
8083 always return a null pointer here. */
8084 return CONST0_RTX (mode);
8085 }
8086
8087 case POINTER_PLUS_EXPR:
8088 /* Even though the sizetype mode and the pointer's mode can be different
8089 expand is able to handle this correctly and get the correct result out
8090 of the PLUS_EXPR code. */
8091 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8092 if sizetype precision is smaller than pointer precision. */
8093 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8094 treeop1 = fold_convert_loc (loc, type,
8095 fold_convert_loc (loc, ssizetype,
8096 treeop1));
8097 /* If sizetype precision is larger than pointer precision, truncate the
8098 offset to have matching modes. */
8099 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8100 treeop1 = fold_convert_loc (loc, type, treeop1);
8101
8102 case PLUS_EXPR:
8103 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8104 something else, make sure we add the register to the constant and
8105 then to the other thing. This case can occur during strength
8106 reduction and doing it this way will produce better code if the
8107 frame pointer or argument pointer is eliminated.
8108
8109 fold-const.c will ensure that the constant is always in the inner
8110 PLUS_EXPR, so the only case we need to do anything about is if
8111 sp, ap, or fp is our second argument, in which case we must swap
8112 the innermost first argument and our second argument. */
8113
8114 if (TREE_CODE (treeop0) == PLUS_EXPR
8115 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8116 && TREE_CODE (treeop1) == VAR_DECL
8117 && (DECL_RTL (treeop1) == frame_pointer_rtx
8118 || DECL_RTL (treeop1) == stack_pointer_rtx
8119 || DECL_RTL (treeop1) == arg_pointer_rtx))
8120 {
8121 gcc_unreachable ();
8122 }
8123
8124 /* If the result is to be ptr_mode and we are adding an integer to
8125 something, we might be forming a constant. So try to use
8126 plus_constant. If it produces a sum and we can't accept it,
8127 use force_operand. This allows P = &ARR[const] to generate
8128 efficient code on machines where a SYMBOL_REF is not a valid
8129 address.
8130
8131 If this is an EXPAND_SUM call, always return the sum. */
8132 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8133 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8134 {
8135 if (modifier == EXPAND_STACK_PARM)
8136 target = 0;
8137 if (TREE_CODE (treeop0) == INTEGER_CST
8138 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8139 && TREE_CONSTANT (treeop1))
8140 {
8141 rtx constant_part;
8142
8143 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8144 EXPAND_SUM);
8145 /* Use immed_double_const to ensure that the constant is
8146 truncated according to the mode of OP1, then sign extended
8147 to a HOST_WIDE_INT. Using the constant directly can result
8148 in non-canonical RTL in a 64x32 cross compile. */
8149 constant_part
8150 = immed_double_const (TREE_INT_CST_LOW (treeop0),
8151 (HOST_WIDE_INT) 0,
8152 TYPE_MODE (TREE_TYPE (treeop1)));
8153 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8154 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8155 op1 = force_operand (op1, target);
8156 return REDUCE_BIT_FIELD (op1);
8157 }
8158
8159 else if (TREE_CODE (treeop1) == INTEGER_CST
8160 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8161 && TREE_CONSTANT (treeop0))
8162 {
8163 rtx constant_part;
8164
8165 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8166 (modifier == EXPAND_INITIALIZER
8167 ? EXPAND_INITIALIZER : EXPAND_SUM));
8168 if (! CONSTANT_P (op0))
8169 {
8170 op1 = expand_expr (treeop1, NULL_RTX,
8171 VOIDmode, modifier);
8172 /* Return a PLUS if modifier says it's OK. */
8173 if (modifier == EXPAND_SUM
8174 || modifier == EXPAND_INITIALIZER)
8175 return simplify_gen_binary (PLUS, mode, op0, op1);
8176 goto binop2;
8177 }
8178 /* Use immed_double_const to ensure that the constant is
8179 truncated according to the mode of OP1, then sign extended
8180 to a HOST_WIDE_INT. Using the constant directly can result
8181 in non-canonical RTL in a 64x32 cross compile. */
8182 constant_part
8183 = immed_double_const (TREE_INT_CST_LOW (treeop1),
8184 (HOST_WIDE_INT) 0,
8185 TYPE_MODE (TREE_TYPE (treeop0)));
8186 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8187 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8188 op0 = force_operand (op0, target);
8189 return REDUCE_BIT_FIELD (op0);
8190 }
8191 }
8192
8193 /* Use TER to expand pointer addition of a negated value
8194 as pointer subtraction. */
8195 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8196 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8197 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8198 && TREE_CODE (treeop1) == SSA_NAME
8199 && TYPE_MODE (TREE_TYPE (treeop0))
8200 == TYPE_MODE (TREE_TYPE (treeop1)))
8201 {
8202 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8203 if (def)
8204 {
8205 treeop1 = gimple_assign_rhs1 (def);
8206 code = MINUS_EXPR;
8207 goto do_minus;
8208 }
8209 }
8210
8211 /* No sense saving up arithmetic to be done
8212 if it's all in the wrong mode to form part of an address.
8213 And force_operand won't know whether to sign-extend or
8214 zero-extend. */
8215 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8216 || mode != ptr_mode)
8217 {
8218 expand_operands (treeop0, treeop1,
8219 subtarget, &op0, &op1, EXPAND_NORMAL);
8220 if (op0 == const0_rtx)
8221 return op1;
8222 if (op1 == const0_rtx)
8223 return op0;
8224 goto binop2;
8225 }
8226
8227 expand_operands (treeop0, treeop1,
8228 subtarget, &op0, &op1, modifier);
8229 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8230
8231 case MINUS_EXPR:
8232 do_minus:
8233 /* For initializers, we are allowed to return a MINUS of two
8234 symbolic constants. Here we handle all cases when both operands
8235 are constant. */
8236 /* Handle difference of two symbolic constants,
8237 for the sake of an initializer. */
8238 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8239 && really_constant_p (treeop0)
8240 && really_constant_p (treeop1))
8241 {
8242 expand_operands (treeop0, treeop1,
8243 NULL_RTX, &op0, &op1, modifier);
8244
8245 /* If the last operand is a CONST_INT, use plus_constant of
8246 the negated constant. Else make the MINUS. */
8247 if (CONST_INT_P (op1))
8248 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8249 -INTVAL (op1)));
8250 else
8251 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8252 }
8253
8254 /* No sense saving up arithmetic to be done
8255 if it's all in the wrong mode to form part of an address.
8256 And force_operand won't know whether to sign-extend or
8257 zero-extend. */
8258 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8259 || mode != ptr_mode)
8260 goto binop;
8261
8262 expand_operands (treeop0, treeop1,
8263 subtarget, &op0, &op1, modifier);
8264
8265 /* Convert A - const to A + (-const). */
8266 if (CONST_INT_P (op1))
8267 {
8268 op1 = negate_rtx (mode, op1);
8269 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8270 }
8271
8272 goto binop2;
8273
8274 case WIDEN_MULT_PLUS_EXPR:
8275 case WIDEN_MULT_MINUS_EXPR:
8276 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8277 op2 = expand_normal (treeop2);
8278 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8279 target, unsignedp);
8280 return target;
8281
8282 case WIDEN_MULT_EXPR:
8283 /* If first operand is constant, swap them.
8284 Thus the following special case checks need only
8285 check the second operand. */
8286 if (TREE_CODE (treeop0) == INTEGER_CST)
8287 {
8288 tree t1 = treeop0;
8289 treeop0 = treeop1;
8290 treeop1 = t1;
8291 }
8292
8293 /* First, check if we have a multiplication of one signed and one
8294 unsigned operand. */
8295 if (TREE_CODE (treeop1) != INTEGER_CST
8296 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8297 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8298 {
8299 enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8300 this_optab = usmul_widen_optab;
8301 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8302 != CODE_FOR_nothing)
8303 {
8304 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8305 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8306 EXPAND_NORMAL);
8307 else
8308 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8309 EXPAND_NORMAL);
8310 /* op0 and op1 might still be constant, despite the above
8311 != INTEGER_CST check. Handle it. */
8312 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8313 {
8314 op0 = convert_modes (innermode, mode, op0, true);
8315 op1 = convert_modes (innermode, mode, op1, false);
8316 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8317 target, unsignedp));
8318 }
8319 goto binop3;
8320 }
8321 }
8322 /* Check for a multiplication with matching signedness. */
8323 else if ((TREE_CODE (treeop1) == INTEGER_CST
8324 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8325 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8326 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8327 {
8328 tree op0type = TREE_TYPE (treeop0);
8329 enum machine_mode innermode = TYPE_MODE (op0type);
8330 bool zextend_p = TYPE_UNSIGNED (op0type);
8331 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8332 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8333
8334 if (TREE_CODE (treeop0) != INTEGER_CST)
8335 {
8336 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8337 != CODE_FOR_nothing)
8338 {
8339 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8340 EXPAND_NORMAL);
8341 /* op0 and op1 might still be constant, despite the above
8342 != INTEGER_CST check. Handle it. */
8343 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8344 {
8345 widen_mult_const:
8346 op0 = convert_modes (innermode, mode, op0, zextend_p);
8347 op1
8348 = convert_modes (innermode, mode, op1,
8349 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8350 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8351 target,
8352 unsignedp));
8353 }
8354 temp = expand_widening_mult (mode, op0, op1, target,
8355 unsignedp, this_optab);
8356 return REDUCE_BIT_FIELD (temp);
8357 }
8358 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8359 != CODE_FOR_nothing
8360 && innermode == word_mode)
8361 {
8362 rtx htem, hipart;
8363 op0 = expand_normal (treeop0);
8364 if (TREE_CODE (treeop1) == INTEGER_CST)
8365 op1 = convert_modes (innermode, mode,
8366 expand_normal (treeop1),
8367 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8368 else
8369 op1 = expand_normal (treeop1);
8370 /* op0 and op1 might still be constant, despite the above
8371 != INTEGER_CST check. Handle it. */
8372 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8373 goto widen_mult_const;
8374 temp = expand_binop (mode, other_optab, op0, op1, target,
8375 unsignedp, OPTAB_LIB_WIDEN);
8376 hipart = gen_highpart (innermode, temp);
8377 htem = expand_mult_highpart_adjust (innermode, hipart,
8378 op0, op1, hipart,
8379 zextend_p);
8380 if (htem != hipart)
8381 emit_move_insn (hipart, htem);
8382 return REDUCE_BIT_FIELD (temp);
8383 }
8384 }
8385 }
8386 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8387 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8388 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8389 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8390
8391 case FMA_EXPR:
8392 {
8393 optab opt = fma_optab;
8394 gimple def0, def2;
8395
8396 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8397 call. */
8398 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8399 {
8400 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8401 tree call_expr;
8402
8403 gcc_assert (fn != NULL_TREE);
8404 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8405 return expand_builtin (call_expr, target, subtarget, mode, false);
8406 }
8407
8408 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8409 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8410
8411 op0 = op2 = NULL;
8412
8413 if (def0 && def2
8414 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8415 {
8416 opt = fnms_optab;
8417 op0 = expand_normal (gimple_assign_rhs1 (def0));
8418 op2 = expand_normal (gimple_assign_rhs1 (def2));
8419 }
8420 else if (def0
8421 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8422 {
8423 opt = fnma_optab;
8424 op0 = expand_normal (gimple_assign_rhs1 (def0));
8425 }
8426 else if (def2
8427 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8428 {
8429 opt = fms_optab;
8430 op2 = expand_normal (gimple_assign_rhs1 (def2));
8431 }
8432
8433 if (op0 == NULL)
8434 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8435 if (op2 == NULL)
8436 op2 = expand_normal (treeop2);
8437 op1 = expand_normal (treeop1);
8438
8439 return expand_ternary_op (TYPE_MODE (type), opt,
8440 op0, op1, op2, target, 0);
8441 }
8442
8443 case MULT_EXPR:
8444 /* If this is a fixed-point operation, then we cannot use the code
8445 below because "expand_mult" doesn't support sat/no-sat fixed-point
8446 multiplications. */
8447 if (ALL_FIXED_POINT_MODE_P (mode))
8448 goto binop;
8449
8450 /* If first operand is constant, swap them.
8451 Thus the following special case checks need only
8452 check the second operand. */
8453 if (TREE_CODE (treeop0) == INTEGER_CST)
8454 {
8455 tree t1 = treeop0;
8456 treeop0 = treeop1;
8457 treeop1 = t1;
8458 }
8459
8460 /* Attempt to return something suitable for generating an
8461 indexed address, for machines that support that. */
8462
8463 if (modifier == EXPAND_SUM && mode == ptr_mode
8464 && host_integerp (treeop1, 0))
8465 {
8466 tree exp1 = treeop1;
8467
8468 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8469 EXPAND_SUM);
8470
8471 if (!REG_P (op0))
8472 op0 = force_operand (op0, NULL_RTX);
8473 if (!REG_P (op0))
8474 op0 = copy_to_mode_reg (mode, op0);
8475
8476 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8477 gen_int_mode (tree_low_cst (exp1, 0),
8478 TYPE_MODE (TREE_TYPE (exp1)))));
8479 }
8480
8481 if (modifier == EXPAND_STACK_PARM)
8482 target = 0;
8483
8484 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8485 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8486
8487 case TRUNC_DIV_EXPR:
8488 case FLOOR_DIV_EXPR:
8489 case CEIL_DIV_EXPR:
8490 case ROUND_DIV_EXPR:
8491 case EXACT_DIV_EXPR:
8492 /* If this is a fixed-point operation, then we cannot use the code
8493 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8494 divisions. */
8495 if (ALL_FIXED_POINT_MODE_P (mode))
8496 goto binop;
8497
8498 if (modifier == EXPAND_STACK_PARM)
8499 target = 0;
8500 /* Possible optimization: compute the dividend with EXPAND_SUM
8501 then if the divisor is constant can optimize the case
8502 where some terms of the dividend have coeffs divisible by it. */
8503 expand_operands (treeop0, treeop1,
8504 subtarget, &op0, &op1, EXPAND_NORMAL);
8505 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8506
8507 case RDIV_EXPR:
8508 goto binop;
8509
8510 case MULT_HIGHPART_EXPR:
8511 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8512 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8513 gcc_assert (temp);
8514 return temp;
8515
8516 case TRUNC_MOD_EXPR:
8517 case FLOOR_MOD_EXPR:
8518 case CEIL_MOD_EXPR:
8519 case ROUND_MOD_EXPR:
8520 if (modifier == EXPAND_STACK_PARM)
8521 target = 0;
8522 expand_operands (treeop0, treeop1,
8523 subtarget, &op0, &op1, EXPAND_NORMAL);
8524 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8525
8526 case FIXED_CONVERT_EXPR:
8527 op0 = expand_normal (treeop0);
8528 if (target == 0 || modifier == EXPAND_STACK_PARM)
8529 target = gen_reg_rtx (mode);
8530
8531 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8532 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8533 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8534 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8535 else
8536 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8537 return target;
8538
8539 case FIX_TRUNC_EXPR:
8540 op0 = expand_normal (treeop0);
8541 if (target == 0 || modifier == EXPAND_STACK_PARM)
8542 target = gen_reg_rtx (mode);
8543 expand_fix (target, op0, unsignedp);
8544 return target;
8545
8546 case FLOAT_EXPR:
8547 op0 = expand_normal (treeop0);
8548 if (target == 0 || modifier == EXPAND_STACK_PARM)
8549 target = gen_reg_rtx (mode);
8550 /* expand_float can't figure out what to do if FROM has VOIDmode.
8551 So give it the correct mode. With -O, cse will optimize this. */
8552 if (GET_MODE (op0) == VOIDmode)
8553 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8554 op0);
8555 expand_float (target, op0,
8556 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8557 return target;
8558
8559 case NEGATE_EXPR:
8560 op0 = expand_expr (treeop0, subtarget,
8561 VOIDmode, EXPAND_NORMAL);
8562 if (modifier == EXPAND_STACK_PARM)
8563 target = 0;
8564 temp = expand_unop (mode,
8565 optab_for_tree_code (NEGATE_EXPR, type,
8566 optab_default),
8567 op0, target, 0);
8568 gcc_assert (temp);
8569 return REDUCE_BIT_FIELD (temp);
8570
8571 case ABS_EXPR:
8572 op0 = expand_expr (treeop0, subtarget,
8573 VOIDmode, EXPAND_NORMAL);
8574 if (modifier == EXPAND_STACK_PARM)
8575 target = 0;
8576
8577 /* ABS_EXPR is not valid for complex arguments. */
8578 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8579 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8580
8581 /* Unsigned abs is simply the operand. Testing here means we don't
8582 risk generating incorrect code below. */
8583 if (TYPE_UNSIGNED (type))
8584 return op0;
8585
8586 return expand_abs (mode, op0, target, unsignedp,
8587 safe_from_p (target, treeop0, 1));
8588
8589 case MAX_EXPR:
8590 case MIN_EXPR:
8591 target = original_target;
8592 if (target == 0
8593 || modifier == EXPAND_STACK_PARM
8594 || (MEM_P (target) && MEM_VOLATILE_P (target))
8595 || GET_MODE (target) != mode
8596 || (REG_P (target)
8597 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8598 target = gen_reg_rtx (mode);
8599 expand_operands (treeop0, treeop1,
8600 target, &op0, &op1, EXPAND_NORMAL);
8601
8602 /* First try to do it with a special MIN or MAX instruction.
8603 If that does not win, use a conditional jump to select the proper
8604 value. */
8605 this_optab = optab_for_tree_code (code, type, optab_default);
8606 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8607 OPTAB_WIDEN);
8608 if (temp != 0)
8609 return temp;
8610
8611 /* At this point, a MEM target is no longer useful; we will get better
8612 code without it. */
8613
8614 if (! REG_P (target))
8615 target = gen_reg_rtx (mode);
8616
8617 /* If op1 was placed in target, swap op0 and op1. */
8618 if (target != op0 && target == op1)
8619 {
8620 temp = op0;
8621 op0 = op1;
8622 op1 = temp;
8623 }
8624
8625 /* We generate better code and avoid problems with op1 mentioning
8626 target by forcing op1 into a pseudo if it isn't a constant. */
8627 if (! CONSTANT_P (op1))
8628 op1 = force_reg (mode, op1);
8629
8630 {
8631 enum rtx_code comparison_code;
8632 rtx cmpop1 = op1;
8633
8634 if (code == MAX_EXPR)
8635 comparison_code = unsignedp ? GEU : GE;
8636 else
8637 comparison_code = unsignedp ? LEU : LE;
8638
8639 /* Canonicalize to comparisons against 0. */
8640 if (op1 == const1_rtx)
8641 {
8642 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8643 or (a != 0 ? a : 1) for unsigned.
8644 For MIN we are safe converting (a <= 1 ? a : 1)
8645 into (a <= 0 ? a : 1) */
8646 cmpop1 = const0_rtx;
8647 if (code == MAX_EXPR)
8648 comparison_code = unsignedp ? NE : GT;
8649 }
8650 if (op1 == constm1_rtx && !unsignedp)
8651 {
8652 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8653 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8654 cmpop1 = const0_rtx;
8655 if (code == MIN_EXPR)
8656 comparison_code = LT;
8657 }
8658 #ifdef HAVE_conditional_move
8659 /* Use a conditional move if possible. */
8660 if (can_conditionally_move_p (mode))
8661 {
8662 rtx insn;
8663
8664 /* ??? Same problem as in expmed.c: emit_conditional_move
8665 forces a stack adjustment via compare_from_rtx, and we
8666 lose the stack adjustment if the sequence we are about
8667 to create is discarded. */
8668 do_pending_stack_adjust ();
8669
8670 start_sequence ();
8671
8672 /* Try to emit the conditional move. */
8673 insn = emit_conditional_move (target, comparison_code,
8674 op0, cmpop1, mode,
8675 op0, op1, mode,
8676 unsignedp);
8677
8678 /* If we could do the conditional move, emit the sequence,
8679 and return. */
8680 if (insn)
8681 {
8682 rtx seq = get_insns ();
8683 end_sequence ();
8684 emit_insn (seq);
8685 return target;
8686 }
8687
8688 /* Otherwise discard the sequence and fall back to code with
8689 branches. */
8690 end_sequence ();
8691 }
8692 #endif
8693 if (target != op0)
8694 emit_move_insn (target, op0);
8695
8696 temp = gen_label_rtx ();
8697 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8698 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8699 -1);
8700 }
8701 emit_move_insn (target, op1);
8702 emit_label (temp);
8703 return target;
8704
8705 case BIT_NOT_EXPR:
8706 op0 = expand_expr (treeop0, subtarget,
8707 VOIDmode, EXPAND_NORMAL);
8708 if (modifier == EXPAND_STACK_PARM)
8709 target = 0;
8710 /* In case we have to reduce the result to bitfield precision
8711 for unsigned bitfield expand this as XOR with a proper constant
8712 instead. */
8713 if (reduce_bit_field && TYPE_UNSIGNED (type))
8714 temp = expand_binop (mode, xor_optab, op0,
8715 immed_double_int_const
8716 (double_int::mask (TYPE_PRECISION (type)), mode),
8717 target, 1, OPTAB_LIB_WIDEN);
8718 else
8719 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8720 gcc_assert (temp);
8721 return temp;
8722
8723 /* ??? Can optimize bitwise operations with one arg constant.
8724 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8725 and (a bitwise1 b) bitwise2 b (etc)
8726 but that is probably not worth while. */
8727
8728 case BIT_AND_EXPR:
8729 case BIT_IOR_EXPR:
8730 case BIT_XOR_EXPR:
8731 goto binop;
8732
8733 case LROTATE_EXPR:
8734 case RROTATE_EXPR:
8735 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8736 || (GET_MODE_PRECISION (TYPE_MODE (type))
8737 == TYPE_PRECISION (type)));
8738 /* fall through */
8739
8740 case LSHIFT_EXPR:
8741 case RSHIFT_EXPR:
8742 /* If this is a fixed-point operation, then we cannot use the code
8743 below because "expand_shift" doesn't support sat/no-sat fixed-point
8744 shifts. */
8745 if (ALL_FIXED_POINT_MODE_P (mode))
8746 goto binop;
8747
8748 if (! safe_from_p (subtarget, treeop1, 1))
8749 subtarget = 0;
8750 if (modifier == EXPAND_STACK_PARM)
8751 target = 0;
8752 op0 = expand_expr (treeop0, subtarget,
8753 VOIDmode, EXPAND_NORMAL);
8754 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8755 unsignedp);
8756 if (code == LSHIFT_EXPR)
8757 temp = REDUCE_BIT_FIELD (temp);
8758 return temp;
8759
8760 /* Could determine the answer when only additive constants differ. Also,
8761 the addition of one can be handled by changing the condition. */
8762 case LT_EXPR:
8763 case LE_EXPR:
8764 case GT_EXPR:
8765 case GE_EXPR:
8766 case EQ_EXPR:
8767 case NE_EXPR:
8768 case UNORDERED_EXPR:
8769 case ORDERED_EXPR:
8770 case UNLT_EXPR:
8771 case UNLE_EXPR:
8772 case UNGT_EXPR:
8773 case UNGE_EXPR:
8774 case UNEQ_EXPR:
8775 case LTGT_EXPR:
8776 temp = do_store_flag (ops,
8777 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8778 tmode != VOIDmode ? tmode : mode);
8779 if (temp)
8780 return temp;
8781
8782 /* Use a compare and a jump for BLKmode comparisons, or for function
8783 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8784
8785 if ((target == 0
8786 || modifier == EXPAND_STACK_PARM
8787 || ! safe_from_p (target, treeop0, 1)
8788 || ! safe_from_p (target, treeop1, 1)
8789 /* Make sure we don't have a hard reg (such as function's return
8790 value) live across basic blocks, if not optimizing. */
8791 || (!optimize && REG_P (target)
8792 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8793 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8794
8795 emit_move_insn (target, const0_rtx);
8796
8797 op1 = gen_label_rtx ();
8798 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8799
8800 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8801 emit_move_insn (target, constm1_rtx);
8802 else
8803 emit_move_insn (target, const1_rtx);
8804
8805 emit_label (op1);
8806 return target;
8807
8808 case COMPLEX_EXPR:
8809 /* Get the rtx code of the operands. */
8810 op0 = expand_normal (treeop0);
8811 op1 = expand_normal (treeop1);
8812
8813 if (!target)
8814 target = gen_reg_rtx (TYPE_MODE (type));
8815 else
8816 /* If target overlaps with op1, then either we need to force
8817 op1 into a pseudo (if target also overlaps with op0),
8818 or write the complex parts in reverse order. */
8819 switch (GET_CODE (target))
8820 {
8821 case CONCAT:
8822 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
8823 {
8824 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
8825 {
8826 complex_expr_force_op1:
8827 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
8828 emit_move_insn (temp, op1);
8829 op1 = temp;
8830 break;
8831 }
8832 complex_expr_swap_order:
8833 /* Move the imaginary (op1) and real (op0) parts to their
8834 location. */
8835 write_complex_part (target, op1, true);
8836 write_complex_part (target, op0, false);
8837
8838 return target;
8839 }
8840 break;
8841 case MEM:
8842 temp = adjust_address_nv (target,
8843 GET_MODE_INNER (GET_MODE (target)), 0);
8844 if (reg_overlap_mentioned_p (temp, op1))
8845 {
8846 enum machine_mode imode = GET_MODE_INNER (GET_MODE (target));
8847 temp = adjust_address_nv (target, imode,
8848 GET_MODE_SIZE (imode));
8849 if (reg_overlap_mentioned_p (temp, op0))
8850 goto complex_expr_force_op1;
8851 goto complex_expr_swap_order;
8852 }
8853 break;
8854 default:
8855 if (reg_overlap_mentioned_p (target, op1))
8856 {
8857 if (reg_overlap_mentioned_p (target, op0))
8858 goto complex_expr_force_op1;
8859 goto complex_expr_swap_order;
8860 }
8861 break;
8862 }
8863
8864 /* Move the real (op0) and imaginary (op1) parts to their location. */
8865 write_complex_part (target, op0, false);
8866 write_complex_part (target, op1, true);
8867
8868 return target;
8869
8870 case WIDEN_SUM_EXPR:
8871 {
8872 tree oprnd0 = treeop0;
8873 tree oprnd1 = treeop1;
8874
8875 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8876 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8877 target, unsignedp);
8878 return target;
8879 }
8880
8881 case REDUC_MAX_EXPR:
8882 case REDUC_MIN_EXPR:
8883 case REDUC_PLUS_EXPR:
8884 {
8885 op0 = expand_normal (treeop0);
8886 this_optab = optab_for_tree_code (code, type, optab_default);
8887 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8888 gcc_assert (temp);
8889 return temp;
8890 }
8891
8892 case VEC_LSHIFT_EXPR:
8893 case VEC_RSHIFT_EXPR:
8894 {
8895 target = expand_vec_shift_expr (ops, target);
8896 return target;
8897 }
8898
8899 case VEC_UNPACK_HI_EXPR:
8900 case VEC_UNPACK_LO_EXPR:
8901 {
8902 op0 = expand_normal (treeop0);
8903 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
8904 target, unsignedp);
8905 gcc_assert (temp);
8906 return temp;
8907 }
8908
8909 case VEC_UNPACK_FLOAT_HI_EXPR:
8910 case VEC_UNPACK_FLOAT_LO_EXPR:
8911 {
8912 op0 = expand_normal (treeop0);
8913 /* The signedness is determined from input operand. */
8914 temp = expand_widen_pattern_expr
8915 (ops, op0, NULL_RTX, NULL_RTX,
8916 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8917
8918 gcc_assert (temp);
8919 return temp;
8920 }
8921
8922 case VEC_WIDEN_MULT_HI_EXPR:
8923 case VEC_WIDEN_MULT_LO_EXPR:
8924 case VEC_WIDEN_MULT_EVEN_EXPR:
8925 case VEC_WIDEN_MULT_ODD_EXPR:
8926 case VEC_WIDEN_LSHIFT_HI_EXPR:
8927 case VEC_WIDEN_LSHIFT_LO_EXPR:
8928 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8929 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8930 target, unsignedp);
8931 gcc_assert (target);
8932 return target;
8933
8934 case VEC_PACK_TRUNC_EXPR:
8935 case VEC_PACK_SAT_EXPR:
8936 case VEC_PACK_FIX_TRUNC_EXPR:
8937 mode = TYPE_MODE (TREE_TYPE (treeop0));
8938 goto binop;
8939
8940 case VEC_PERM_EXPR:
8941 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
8942 op2 = expand_normal (treeop2);
8943
8944 /* Careful here: if the target doesn't support integral vector modes,
8945 a constant selection vector could wind up smooshed into a normal
8946 integral constant. */
8947 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
8948 {
8949 tree sel_type = TREE_TYPE (treeop2);
8950 enum machine_mode vmode
8951 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
8952 TYPE_VECTOR_SUBPARTS (sel_type));
8953 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
8954 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
8955 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
8956 }
8957 else
8958 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
8959
8960 temp = expand_vec_perm (mode, op0, op1, op2, target);
8961 gcc_assert (temp);
8962 return temp;
8963
8964 case DOT_PROD_EXPR:
8965 {
8966 tree oprnd0 = treeop0;
8967 tree oprnd1 = treeop1;
8968 tree oprnd2 = treeop2;
8969 rtx op2;
8970
8971 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8972 op2 = expand_normal (oprnd2);
8973 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8974 target, unsignedp);
8975 return target;
8976 }
8977
8978 case REALIGN_LOAD_EXPR:
8979 {
8980 tree oprnd0 = treeop0;
8981 tree oprnd1 = treeop1;
8982 tree oprnd2 = treeop2;
8983 rtx op2;
8984
8985 this_optab = optab_for_tree_code (code, type, optab_default);
8986 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8987 op2 = expand_normal (oprnd2);
8988 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8989 target, unsignedp);
8990 gcc_assert (temp);
8991 return temp;
8992 }
8993
8994 case COND_EXPR:
8995 /* A COND_EXPR with its type being VOID_TYPE represents a
8996 conditional jump and is handled in
8997 expand_gimple_cond_expr. */
8998 gcc_assert (!VOID_TYPE_P (type));
8999
9000 /* Note that COND_EXPRs whose type is a structure or union
9001 are required to be constructed to contain assignments of
9002 a temporary variable, so that we can evaluate them here
9003 for side effect only. If type is void, we must do likewise. */
9004
9005 gcc_assert (!TREE_ADDRESSABLE (type)
9006 && !ignore
9007 && TREE_TYPE (treeop1) != void_type_node
9008 && TREE_TYPE (treeop2) != void_type_node);
9009
9010 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9011 if (temp)
9012 return temp;
9013
9014 /* If we are not to produce a result, we have no target. Otherwise,
9015 if a target was specified use it; it will not be used as an
9016 intermediate target unless it is safe. If no target, use a
9017 temporary. */
9018
9019 if (modifier != EXPAND_STACK_PARM
9020 && original_target
9021 && safe_from_p (original_target, treeop0, 1)
9022 && GET_MODE (original_target) == mode
9023 && !MEM_P (original_target))
9024 temp = original_target;
9025 else
9026 temp = assign_temp (type, 0, 1);
9027
9028 do_pending_stack_adjust ();
9029 NO_DEFER_POP;
9030 op0 = gen_label_rtx ();
9031 op1 = gen_label_rtx ();
9032 jumpifnot (treeop0, op0, -1);
9033 store_expr (treeop1, temp,
9034 modifier == EXPAND_STACK_PARM,
9035 false);
9036
9037 emit_jump_insn (gen_jump (op1));
9038 emit_barrier ();
9039 emit_label (op0);
9040 store_expr (treeop2, temp,
9041 modifier == EXPAND_STACK_PARM,
9042 false);
9043
9044 emit_label (op1);
9045 OK_DEFER_POP;
9046 return temp;
9047
9048 case VEC_COND_EXPR:
9049 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9050 return target;
9051
9052 default:
9053 gcc_unreachable ();
9054 }
9055
9056 /* Here to do an ordinary binary operator. */
9057 binop:
9058 expand_operands (treeop0, treeop1,
9059 subtarget, &op0, &op1, EXPAND_NORMAL);
9060 binop2:
9061 this_optab = optab_for_tree_code (code, type, optab_default);
9062 binop3:
9063 if (modifier == EXPAND_STACK_PARM)
9064 target = 0;
9065 temp = expand_binop (mode, this_optab, op0, op1, target,
9066 unsignedp, OPTAB_LIB_WIDEN);
9067 gcc_assert (temp);
9068 /* Bitwise operations do not need bitfield reduction as we expect their
9069 operands being properly truncated. */
9070 if (code == BIT_XOR_EXPR
9071 || code == BIT_AND_EXPR
9072 || code == BIT_IOR_EXPR)
9073 return temp;
9074 return REDUCE_BIT_FIELD (temp);
9075 }
9076 #undef REDUCE_BIT_FIELD
9077
9078
9079 /* Return TRUE if expression STMT is suitable for replacement.
9080 Never consider memory loads as replaceable, because those don't ever lead
9081 into constant expressions. */
9082
9083 static bool
9084 stmt_is_replaceable_p (gimple stmt)
9085 {
9086 if (ssa_is_replaceable_p (stmt))
9087 {
9088 /* Don't move around loads. */
9089 if (!gimple_assign_single_p (stmt)
9090 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9091 return true;
9092 }
9093 return false;
9094 }
9095
9096 rtx
9097 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
9098 enum expand_modifier modifier, rtx *alt_rtl)
9099 {
9100 rtx op0, op1, temp, decl_rtl;
9101 tree type;
9102 int unsignedp;
9103 enum machine_mode mode;
9104 enum tree_code code = TREE_CODE (exp);
9105 rtx subtarget, original_target;
9106 int ignore;
9107 tree context;
9108 bool reduce_bit_field;
9109 location_t loc = EXPR_LOCATION (exp);
9110 struct separate_ops ops;
9111 tree treeop0, treeop1, treeop2;
9112 tree ssa_name = NULL_TREE;
9113 gimple g;
9114
9115 type = TREE_TYPE (exp);
9116 mode = TYPE_MODE (type);
9117 unsignedp = TYPE_UNSIGNED (type);
9118
9119 treeop0 = treeop1 = treeop2 = NULL_TREE;
9120 if (!VL_EXP_CLASS_P (exp))
9121 switch (TREE_CODE_LENGTH (code))
9122 {
9123 default:
9124 case 3: treeop2 = TREE_OPERAND (exp, 2);
9125 case 2: treeop1 = TREE_OPERAND (exp, 1);
9126 case 1: treeop0 = TREE_OPERAND (exp, 0);
9127 case 0: break;
9128 }
9129 ops.code = code;
9130 ops.type = type;
9131 ops.op0 = treeop0;
9132 ops.op1 = treeop1;
9133 ops.op2 = treeop2;
9134 ops.location = loc;
9135
9136 ignore = (target == const0_rtx
9137 || ((CONVERT_EXPR_CODE_P (code)
9138 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9139 && TREE_CODE (type) == VOID_TYPE));
9140
9141 /* An operation in what may be a bit-field type needs the
9142 result to be reduced to the precision of the bit-field type,
9143 which is narrower than that of the type's mode. */
9144 reduce_bit_field = (!ignore
9145 && INTEGRAL_TYPE_P (type)
9146 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9147
9148 /* If we are going to ignore this result, we need only do something
9149 if there is a side-effect somewhere in the expression. If there
9150 is, short-circuit the most common cases here. Note that we must
9151 not call expand_expr with anything but const0_rtx in case this
9152 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9153
9154 if (ignore)
9155 {
9156 if (! TREE_SIDE_EFFECTS (exp))
9157 return const0_rtx;
9158
9159 /* Ensure we reference a volatile object even if value is ignored, but
9160 don't do this if all we are doing is taking its address. */
9161 if (TREE_THIS_VOLATILE (exp)
9162 && TREE_CODE (exp) != FUNCTION_DECL
9163 && mode != VOIDmode && mode != BLKmode
9164 && modifier != EXPAND_CONST_ADDRESS)
9165 {
9166 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9167 if (MEM_P (temp))
9168 copy_to_reg (temp);
9169 return const0_rtx;
9170 }
9171
9172 if (TREE_CODE_CLASS (code) == tcc_unary
9173 || code == BIT_FIELD_REF
9174 || code == COMPONENT_REF
9175 || code == INDIRECT_REF)
9176 return expand_expr (treeop0, const0_rtx, VOIDmode,
9177 modifier);
9178
9179 else if (TREE_CODE_CLASS (code) == tcc_binary
9180 || TREE_CODE_CLASS (code) == tcc_comparison
9181 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9182 {
9183 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9184 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9185 return const0_rtx;
9186 }
9187
9188 target = 0;
9189 }
9190
9191 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9192 target = 0;
9193
9194 /* Use subtarget as the target for operand 0 of a binary operation. */
9195 subtarget = get_subtarget (target);
9196 original_target = target;
9197
9198 switch (code)
9199 {
9200 case LABEL_DECL:
9201 {
9202 tree function = decl_function_context (exp);
9203
9204 temp = label_rtx (exp);
9205 temp = gen_rtx_LABEL_REF (Pmode, temp);
9206
9207 if (function != current_function_decl
9208 && function != 0)
9209 LABEL_REF_NONLOCAL_P (temp) = 1;
9210
9211 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9212 return temp;
9213 }
9214
9215 case SSA_NAME:
9216 /* ??? ivopts calls expander, without any preparation from
9217 out-of-ssa. So fake instructions as if this was an access to the
9218 base variable. This unnecessarily allocates a pseudo, see how we can
9219 reuse it, if partition base vars have it set already. */
9220 if (!currently_expanding_to_rtl)
9221 {
9222 tree var = SSA_NAME_VAR (exp);
9223 if (var && DECL_RTL_SET_P (var))
9224 return DECL_RTL (var);
9225 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9226 LAST_VIRTUAL_REGISTER + 1);
9227 }
9228
9229 g = get_gimple_for_ssa_name (exp);
9230 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9231 if (g == NULL
9232 && modifier == EXPAND_INITIALIZER
9233 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9234 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9235 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9236 g = SSA_NAME_DEF_STMT (exp);
9237 if (g)
9238 {
9239 rtx r;
9240 location_t saved_loc = curr_insn_location ();
9241
9242 set_curr_insn_location (gimple_location (g));
9243 r = expand_expr_real (gimple_assign_rhs_to_tree (g), target,
9244 tmode, modifier, NULL);
9245 set_curr_insn_location (saved_loc);
9246 if (REG_P (r) && !REG_EXPR (r))
9247 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9248 return r;
9249 }
9250
9251 ssa_name = exp;
9252 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9253 exp = SSA_NAME_VAR (ssa_name);
9254 goto expand_decl_rtl;
9255
9256 case PARM_DECL:
9257 case VAR_DECL:
9258 /* If a static var's type was incomplete when the decl was written,
9259 but the type is complete now, lay out the decl now. */
9260 if (DECL_SIZE (exp) == 0
9261 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9262 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9263 layout_decl (exp, 0);
9264
9265 /* ... fall through ... */
9266
9267 case FUNCTION_DECL:
9268 case RESULT_DECL:
9269 decl_rtl = DECL_RTL (exp);
9270 expand_decl_rtl:
9271 gcc_assert (decl_rtl);
9272 decl_rtl = copy_rtx (decl_rtl);
9273 /* Record writes to register variables. */
9274 if (modifier == EXPAND_WRITE
9275 && REG_P (decl_rtl)
9276 && HARD_REGISTER_P (decl_rtl))
9277 add_to_hard_reg_set (&crtl->asm_clobbers,
9278 GET_MODE (decl_rtl), REGNO (decl_rtl));
9279
9280 /* Ensure variable marked as used even if it doesn't go through
9281 a parser. If it hasn't be used yet, write out an external
9282 definition. */
9283 TREE_USED (exp) = 1;
9284
9285 /* Show we haven't gotten RTL for this yet. */
9286 temp = 0;
9287
9288 /* Variables inherited from containing functions should have
9289 been lowered by this point. */
9290 context = decl_function_context (exp);
9291 gcc_assert (SCOPE_FILE_SCOPE_P (context)
9292 || context == current_function_decl
9293 || TREE_STATIC (exp)
9294 || DECL_EXTERNAL (exp)
9295 /* ??? C++ creates functions that are not TREE_STATIC. */
9296 || TREE_CODE (exp) == FUNCTION_DECL);
9297
9298 /* This is the case of an array whose size is to be determined
9299 from its initializer, while the initializer is still being parsed.
9300 ??? We aren't parsing while expanding anymore. */
9301
9302 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9303 temp = validize_mem (decl_rtl);
9304
9305 /* If DECL_RTL is memory, we are in the normal case and the
9306 address is not valid, get the address into a register. */
9307
9308 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9309 {
9310 if (alt_rtl)
9311 *alt_rtl = decl_rtl;
9312 decl_rtl = use_anchored_address (decl_rtl);
9313 if (modifier != EXPAND_CONST_ADDRESS
9314 && modifier != EXPAND_SUM
9315 && !memory_address_addr_space_p (DECL_MODE (exp),
9316 XEXP (decl_rtl, 0),
9317 MEM_ADDR_SPACE (decl_rtl)))
9318 temp = replace_equiv_address (decl_rtl,
9319 copy_rtx (XEXP (decl_rtl, 0)));
9320 }
9321
9322 /* If we got something, return it. But first, set the alignment
9323 if the address is a register. */
9324 if (temp != 0)
9325 {
9326 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9327 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9328
9329 return temp;
9330 }
9331
9332 /* If the mode of DECL_RTL does not match that of the decl,
9333 there are two cases: we are dealing with a BLKmode value
9334 that is returned in a register, or we are dealing with
9335 a promoted value. In the latter case, return a SUBREG
9336 of the wanted mode, but mark it so that we know that it
9337 was already extended. */
9338 if (REG_P (decl_rtl)
9339 && DECL_MODE (exp) != BLKmode
9340 && GET_MODE (decl_rtl) != DECL_MODE (exp))
9341 {
9342 enum machine_mode pmode;
9343
9344 /* Get the signedness to be used for this variable. Ensure we get
9345 the same mode we got when the variable was declared. */
9346 if (code == SSA_NAME
9347 && (g = SSA_NAME_DEF_STMT (ssa_name))
9348 && gimple_code (g) == GIMPLE_CALL)
9349 {
9350 gcc_assert (!gimple_call_internal_p (g));
9351 pmode = promote_function_mode (type, mode, &unsignedp,
9352 gimple_call_fntype (g),
9353 2);
9354 }
9355 else
9356 pmode = promote_decl_mode (exp, &unsignedp);
9357 gcc_assert (GET_MODE (decl_rtl) == pmode);
9358
9359 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9360 SUBREG_PROMOTED_VAR_P (temp) = 1;
9361 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
9362 return temp;
9363 }
9364
9365 return decl_rtl;
9366
9367 case INTEGER_CST:
9368 temp = immed_double_const (TREE_INT_CST_LOW (exp),
9369 TREE_INT_CST_HIGH (exp), mode);
9370
9371 return temp;
9372
9373 case VECTOR_CST:
9374 {
9375 tree tmp = NULL_TREE;
9376 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9377 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9378 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9379 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9380 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9381 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9382 return const_vector_from_tree (exp);
9383 if (GET_MODE_CLASS (mode) == MODE_INT)
9384 {
9385 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9386 if (type_for_mode)
9387 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9388 }
9389 if (!tmp)
9390 {
9391 vec<constructor_elt, va_gc> *v;
9392 unsigned i;
9393 vec_alloc (v, VECTOR_CST_NELTS (exp));
9394 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9395 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9396 tmp = build_constructor (type, v);
9397 }
9398 return expand_expr (tmp, ignore ? const0_rtx : target,
9399 tmode, modifier);
9400 }
9401
9402 case CONST_DECL:
9403 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9404
9405 case REAL_CST:
9406 /* If optimized, generate immediate CONST_DOUBLE
9407 which will be turned into memory by reload if necessary.
9408
9409 We used to force a register so that loop.c could see it. But
9410 this does not allow gen_* patterns to perform optimizations with
9411 the constants. It also produces two insns in cases like "x = 1.0;".
9412 On most machines, floating-point constants are not permitted in
9413 many insns, so we'd end up copying it to a register in any case.
9414
9415 Now, we do the copying in expand_binop, if appropriate. */
9416 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9417 TYPE_MODE (TREE_TYPE (exp)));
9418
9419 case FIXED_CST:
9420 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9421 TYPE_MODE (TREE_TYPE (exp)));
9422
9423 case COMPLEX_CST:
9424 /* Handle evaluating a complex constant in a CONCAT target. */
9425 if (original_target && GET_CODE (original_target) == CONCAT)
9426 {
9427 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9428 rtx rtarg, itarg;
9429
9430 rtarg = XEXP (original_target, 0);
9431 itarg = XEXP (original_target, 1);
9432
9433 /* Move the real and imaginary parts separately. */
9434 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9435 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9436
9437 if (op0 != rtarg)
9438 emit_move_insn (rtarg, op0);
9439 if (op1 != itarg)
9440 emit_move_insn (itarg, op1);
9441
9442 return original_target;
9443 }
9444
9445 /* ... fall through ... */
9446
9447 case STRING_CST:
9448 temp = expand_expr_constant (exp, 1, modifier);
9449
9450 /* temp contains a constant address.
9451 On RISC machines where a constant address isn't valid,
9452 make some insns to get that address into a register. */
9453 if (modifier != EXPAND_CONST_ADDRESS
9454 && modifier != EXPAND_INITIALIZER
9455 && modifier != EXPAND_SUM
9456 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9457 MEM_ADDR_SPACE (temp)))
9458 return replace_equiv_address (temp,
9459 copy_rtx (XEXP (temp, 0)));
9460 return temp;
9461
9462 case SAVE_EXPR:
9463 {
9464 tree val = treeop0;
9465 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
9466
9467 if (!SAVE_EXPR_RESOLVED_P (exp))
9468 {
9469 /* We can indeed still hit this case, typically via builtin
9470 expanders calling save_expr immediately before expanding
9471 something. Assume this means that we only have to deal
9472 with non-BLKmode values. */
9473 gcc_assert (GET_MODE (ret) != BLKmode);
9474
9475 val = build_decl (curr_insn_location (),
9476 VAR_DECL, NULL, TREE_TYPE (exp));
9477 DECL_ARTIFICIAL (val) = 1;
9478 DECL_IGNORED_P (val) = 1;
9479 treeop0 = val;
9480 TREE_OPERAND (exp, 0) = treeop0;
9481 SAVE_EXPR_RESOLVED_P (exp) = 1;
9482
9483 if (!CONSTANT_P (ret))
9484 ret = copy_to_reg (ret);
9485 SET_DECL_RTL (val, ret);
9486 }
9487
9488 return ret;
9489 }
9490
9491
9492 case CONSTRUCTOR:
9493 /* If we don't need the result, just ensure we evaluate any
9494 subexpressions. */
9495 if (ignore)
9496 {
9497 unsigned HOST_WIDE_INT idx;
9498 tree value;
9499
9500 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9501 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9502
9503 return const0_rtx;
9504 }
9505
9506 return expand_constructor (exp, target, modifier, false);
9507
9508 case TARGET_MEM_REF:
9509 {
9510 addr_space_t as
9511 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9512 enum insn_code icode;
9513 unsigned int align;
9514
9515 op0 = addr_for_mem_ref (exp, as, true);
9516 op0 = memory_address_addr_space (mode, op0, as);
9517 temp = gen_rtx_MEM (mode, op0);
9518 set_mem_attributes (temp, exp, 0);
9519 set_mem_addr_space (temp, as);
9520 align = get_object_alignment (exp);
9521 if (modifier != EXPAND_WRITE
9522 && modifier != EXPAND_MEMORY
9523 && mode != BLKmode
9524 && align < GET_MODE_ALIGNMENT (mode)
9525 /* If the target does not have special handling for unaligned
9526 loads of mode then it can use regular moves for them. */
9527 && ((icode = optab_handler (movmisalign_optab, mode))
9528 != CODE_FOR_nothing))
9529 {
9530 struct expand_operand ops[2];
9531
9532 /* We've already validated the memory, and we're creating a
9533 new pseudo destination. The predicates really can't fail,
9534 nor can the generator. */
9535 create_output_operand (&ops[0], NULL_RTX, mode);
9536 create_fixed_operand (&ops[1], temp);
9537 expand_insn (icode, 2, ops);
9538 temp = ops[0].value;
9539 }
9540 return temp;
9541 }
9542
9543 case MEM_REF:
9544 {
9545 addr_space_t as
9546 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9547 enum machine_mode address_mode;
9548 tree base = TREE_OPERAND (exp, 0);
9549 gimple def_stmt;
9550 enum insn_code icode;
9551 unsigned align;
9552 /* Handle expansion of non-aliased memory with non-BLKmode. That
9553 might end up in a register. */
9554 if (mem_ref_refers_to_non_mem_p (exp))
9555 {
9556 HOST_WIDE_INT offset = mem_ref_offset (exp).low;
9557 base = TREE_OPERAND (base, 0);
9558 if (offset == 0
9559 && host_integerp (TYPE_SIZE (type), 1)
9560 && (GET_MODE_BITSIZE (DECL_MODE (base))
9561 == TREE_INT_CST_LOW (TYPE_SIZE (type))))
9562 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9563 target, tmode, modifier);
9564 if (TYPE_MODE (type) == BLKmode)
9565 {
9566 temp = assign_stack_temp (DECL_MODE (base),
9567 GET_MODE_SIZE (DECL_MODE (base)));
9568 store_expr (base, temp, 0, false);
9569 temp = adjust_address (temp, BLKmode, offset);
9570 set_mem_size (temp, int_size_in_bytes (type));
9571 return temp;
9572 }
9573 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9574 bitsize_int (offset * BITS_PER_UNIT));
9575 return expand_expr (exp, target, tmode, modifier);
9576 }
9577 address_mode = targetm.addr_space.address_mode (as);
9578 base = TREE_OPERAND (exp, 0);
9579 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9580 {
9581 tree mask = gimple_assign_rhs2 (def_stmt);
9582 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9583 gimple_assign_rhs1 (def_stmt), mask);
9584 TREE_OPERAND (exp, 0) = base;
9585 }
9586 align = get_object_alignment (exp);
9587 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9588 op0 = memory_address_addr_space (mode, op0, as);
9589 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9590 {
9591 rtx off
9592 = immed_double_int_const (mem_ref_offset (exp), address_mode);
9593 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9594 op0 = memory_address_addr_space (mode, op0, as);
9595 }
9596 temp = gen_rtx_MEM (mode, op0);
9597 set_mem_attributes (temp, exp, 0);
9598 set_mem_addr_space (temp, as);
9599 if (TREE_THIS_VOLATILE (exp))
9600 MEM_VOLATILE_P (temp) = 1;
9601 if (modifier != EXPAND_WRITE
9602 && modifier != EXPAND_MEMORY
9603 && mode != BLKmode
9604 && align < GET_MODE_ALIGNMENT (mode))
9605 {
9606 if ((icode = optab_handler (movmisalign_optab, mode))
9607 != CODE_FOR_nothing)
9608 {
9609 struct expand_operand ops[2];
9610
9611 /* We've already validated the memory, and we're creating a
9612 new pseudo destination. The predicates really can't fail,
9613 nor can the generator. */
9614 create_output_operand (&ops[0], NULL_RTX, mode);
9615 create_fixed_operand (&ops[1], temp);
9616 expand_insn (icode, 2, ops);
9617 temp = ops[0].value;
9618 }
9619 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9620 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9621 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9622 (modifier == EXPAND_STACK_PARM
9623 ? NULL_RTX : target),
9624 mode, mode);
9625 }
9626 return temp;
9627 }
9628
9629 case ARRAY_REF:
9630
9631 {
9632 tree array = treeop0;
9633 tree index = treeop1;
9634 tree init;
9635
9636 /* Fold an expression like: "foo"[2].
9637 This is not done in fold so it won't happen inside &.
9638 Don't fold if this is for wide characters since it's too
9639 difficult to do correctly and this is a very rare case. */
9640
9641 if (modifier != EXPAND_CONST_ADDRESS
9642 && modifier != EXPAND_INITIALIZER
9643 && modifier != EXPAND_MEMORY)
9644 {
9645 tree t = fold_read_from_constant_string (exp);
9646
9647 if (t)
9648 return expand_expr (t, target, tmode, modifier);
9649 }
9650
9651 /* If this is a constant index into a constant array,
9652 just get the value from the array. Handle both the cases when
9653 we have an explicit constructor and when our operand is a variable
9654 that was declared const. */
9655
9656 if (modifier != EXPAND_CONST_ADDRESS
9657 && modifier != EXPAND_INITIALIZER
9658 && modifier != EXPAND_MEMORY
9659 && TREE_CODE (array) == CONSTRUCTOR
9660 && ! TREE_SIDE_EFFECTS (array)
9661 && TREE_CODE (index) == INTEGER_CST)
9662 {
9663 unsigned HOST_WIDE_INT ix;
9664 tree field, value;
9665
9666 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9667 field, value)
9668 if (tree_int_cst_equal (field, index))
9669 {
9670 if (!TREE_SIDE_EFFECTS (value))
9671 return expand_expr (fold (value), target, tmode, modifier);
9672 break;
9673 }
9674 }
9675
9676 else if (optimize >= 1
9677 && modifier != EXPAND_CONST_ADDRESS
9678 && modifier != EXPAND_INITIALIZER
9679 && modifier != EXPAND_MEMORY
9680 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9681 && TREE_CODE (index) == INTEGER_CST
9682 && (TREE_CODE (array) == VAR_DECL
9683 || TREE_CODE (array) == CONST_DECL)
9684 && (init = ctor_for_folding (array)) != error_mark_node)
9685 {
9686 if (TREE_CODE (init) == CONSTRUCTOR)
9687 {
9688 unsigned HOST_WIDE_INT ix;
9689 tree field, value;
9690
9691 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
9692 field, value)
9693 if (tree_int_cst_equal (field, index))
9694 {
9695 if (TREE_SIDE_EFFECTS (value))
9696 break;
9697
9698 if (TREE_CODE (value) == CONSTRUCTOR)
9699 {
9700 /* If VALUE is a CONSTRUCTOR, this
9701 optimization is only useful if
9702 this doesn't store the CONSTRUCTOR
9703 into memory. If it does, it is more
9704 efficient to just load the data from
9705 the array directly. */
9706 rtx ret = expand_constructor (value, target,
9707 modifier, true);
9708 if (ret == NULL_RTX)
9709 break;
9710 }
9711
9712 return
9713 expand_expr (fold (value), target, tmode, modifier);
9714 }
9715 }
9716 else if (TREE_CODE (init) == STRING_CST)
9717 {
9718 tree low_bound = array_ref_low_bound (exp);
9719 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
9720
9721 /* Optimize the special case of a zero lower bound.
9722
9723 We convert the lower bound to sizetype to avoid problems
9724 with constant folding. E.g. suppose the lower bound is
9725 1 and its mode is QI. Without the conversion
9726 (ARRAY + (INDEX - (unsigned char)1))
9727 becomes
9728 (ARRAY + (-(unsigned char)1) + INDEX)
9729 which becomes
9730 (ARRAY + 255 + INDEX). Oops! */
9731 if (!integer_zerop (low_bound))
9732 index1 = size_diffop_loc (loc, index1,
9733 fold_convert_loc (loc, sizetype,
9734 low_bound));
9735
9736 if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
9737 {
9738 tree type = TREE_TYPE (TREE_TYPE (init));
9739 enum machine_mode mode = TYPE_MODE (type);
9740
9741 if (GET_MODE_CLASS (mode) == MODE_INT
9742 && GET_MODE_SIZE (mode) == 1)
9743 return gen_int_mode (TREE_STRING_POINTER (init)
9744 [TREE_INT_CST_LOW (index1)],
9745 mode);
9746 }
9747 }
9748 }
9749 }
9750 goto normal_inner_ref;
9751
9752 case COMPONENT_REF:
9753 /* If the operand is a CONSTRUCTOR, we can just extract the
9754 appropriate field if it is present. */
9755 if (TREE_CODE (treeop0) == CONSTRUCTOR)
9756 {
9757 unsigned HOST_WIDE_INT idx;
9758 tree field, value;
9759
9760 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
9761 idx, field, value)
9762 if (field == treeop1
9763 /* We can normally use the value of the field in the
9764 CONSTRUCTOR. However, if this is a bitfield in
9765 an integral mode that we can fit in a HOST_WIDE_INT,
9766 we must mask only the number of bits in the bitfield,
9767 since this is done implicitly by the constructor. If
9768 the bitfield does not meet either of those conditions,
9769 we can't do this optimization. */
9770 && (! DECL_BIT_FIELD (field)
9771 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
9772 && (GET_MODE_PRECISION (DECL_MODE (field))
9773 <= HOST_BITS_PER_WIDE_INT))))
9774 {
9775 if (DECL_BIT_FIELD (field)
9776 && modifier == EXPAND_STACK_PARM)
9777 target = 0;
9778 op0 = expand_expr (value, target, tmode, modifier);
9779 if (DECL_BIT_FIELD (field))
9780 {
9781 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
9782 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
9783
9784 if (TYPE_UNSIGNED (TREE_TYPE (field)))
9785 {
9786 op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
9787 imode);
9788 op0 = expand_and (imode, op0, op1, target);
9789 }
9790 else
9791 {
9792 int count = GET_MODE_PRECISION (imode) - bitsize;
9793
9794 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
9795 target, 0);
9796 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
9797 target, 0);
9798 }
9799 }
9800
9801 return op0;
9802 }
9803 }
9804 goto normal_inner_ref;
9805
9806 case BIT_FIELD_REF:
9807 case ARRAY_RANGE_REF:
9808 normal_inner_ref:
9809 {
9810 enum machine_mode mode1, mode2;
9811 HOST_WIDE_INT bitsize, bitpos;
9812 tree offset;
9813 int volatilep = 0, must_force_mem;
9814 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9815 &mode1, &unsignedp, &volatilep, true);
9816 rtx orig_op0, memloc;
9817 bool mem_attrs_from_type = false;
9818
9819 /* If we got back the original object, something is wrong. Perhaps
9820 we are evaluating an expression too early. In any event, don't
9821 infinitely recurse. */
9822 gcc_assert (tem != exp);
9823
9824 /* If TEM's type is a union of variable size, pass TARGET to the inner
9825 computation, since it will need a temporary and TARGET is known
9826 to have to do. This occurs in unchecked conversion in Ada. */
9827 orig_op0 = op0
9828 = expand_expr (tem,
9829 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9830 && COMPLETE_TYPE_P (TREE_TYPE (tem))
9831 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9832 != INTEGER_CST)
9833 && modifier != EXPAND_STACK_PARM
9834 ? target : NULL_RTX),
9835 VOIDmode,
9836 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
9837
9838 /* If the bitfield is volatile, we want to access it in the
9839 field's mode, not the computed mode.
9840 If a MEM has VOIDmode (external with incomplete type),
9841 use BLKmode for it instead. */
9842 if (MEM_P (op0))
9843 {
9844 if (volatilep && flag_strict_volatile_bitfields > 0)
9845 op0 = adjust_address (op0, mode1, 0);
9846 else if (GET_MODE (op0) == VOIDmode)
9847 op0 = adjust_address (op0, BLKmode, 0);
9848 }
9849
9850 mode2
9851 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
9852
9853 /* If we have either an offset, a BLKmode result, or a reference
9854 outside the underlying object, we must force it to memory.
9855 Such a case can occur in Ada if we have unchecked conversion
9856 of an expression from a scalar type to an aggregate type or
9857 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
9858 passed a partially uninitialized object or a view-conversion
9859 to a larger size. */
9860 must_force_mem = (offset
9861 || mode1 == BLKmode
9862 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
9863
9864 /* Handle CONCAT first. */
9865 if (GET_CODE (op0) == CONCAT && !must_force_mem)
9866 {
9867 if (bitpos == 0
9868 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
9869 return op0;
9870 if (bitpos == 0
9871 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9872 && bitsize)
9873 {
9874 op0 = XEXP (op0, 0);
9875 mode2 = GET_MODE (op0);
9876 }
9877 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9878 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
9879 && bitpos
9880 && bitsize)
9881 {
9882 op0 = XEXP (op0, 1);
9883 bitpos = 0;
9884 mode2 = GET_MODE (op0);
9885 }
9886 else
9887 /* Otherwise force into memory. */
9888 must_force_mem = 1;
9889 }
9890
9891 /* If this is a constant, put it in a register if it is a legitimate
9892 constant and we don't need a memory reference. */
9893 if (CONSTANT_P (op0)
9894 && mode2 != BLKmode
9895 && targetm.legitimate_constant_p (mode2, op0)
9896 && !must_force_mem)
9897 op0 = force_reg (mode2, op0);
9898
9899 /* Otherwise, if this is a constant, try to force it to the constant
9900 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
9901 is a legitimate constant. */
9902 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
9903 op0 = validize_mem (memloc);
9904
9905 /* Otherwise, if this is a constant or the object is not in memory
9906 and need be, put it there. */
9907 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
9908 {
9909 tree nt = build_qualified_type (TREE_TYPE (tem),
9910 (TYPE_QUALS (TREE_TYPE (tem))
9911 | TYPE_QUAL_CONST));
9912 memloc = assign_temp (nt, 1, 1);
9913 emit_move_insn (memloc, op0);
9914 op0 = memloc;
9915 mem_attrs_from_type = true;
9916 }
9917
9918 if (offset)
9919 {
9920 enum machine_mode address_mode;
9921 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
9922 EXPAND_SUM);
9923
9924 gcc_assert (MEM_P (op0));
9925
9926 address_mode = get_address_mode (op0);
9927 if (GET_MODE (offset_rtx) != address_mode)
9928 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
9929
9930 if (GET_MODE (op0) == BLKmode
9931 /* A constant address in OP0 can have VOIDmode, we must
9932 not try to call force_reg in that case. */
9933 && GET_MODE (XEXP (op0, 0)) != VOIDmode
9934 && bitsize != 0
9935 && (bitpos % bitsize) == 0
9936 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
9937 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
9938 {
9939 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9940 bitpos = 0;
9941 }
9942
9943 op0 = offset_address (op0, offset_rtx,
9944 highest_pow2_factor (offset));
9945 }
9946
9947 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
9948 record its alignment as BIGGEST_ALIGNMENT. */
9949 if (MEM_P (op0) && bitpos == 0 && offset != 0
9950 && is_aligning_offset (offset, tem))
9951 set_mem_align (op0, BIGGEST_ALIGNMENT);
9952
9953 /* Don't forget about volatility even if this is a bitfield. */
9954 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
9955 {
9956 if (op0 == orig_op0)
9957 op0 = copy_rtx (op0);
9958
9959 MEM_VOLATILE_P (op0) = 1;
9960 }
9961
9962 /* In cases where an aligned union has an unaligned object
9963 as a field, we might be extracting a BLKmode value from
9964 an integer-mode (e.g., SImode) object. Handle this case
9965 by doing the extract into an object as wide as the field
9966 (which we know to be the width of a basic mode), then
9967 storing into memory, and changing the mode to BLKmode. */
9968 if (mode1 == VOIDmode
9969 || REG_P (op0) || GET_CODE (op0) == SUBREG
9970 || (mode1 != BLKmode && ! direct_load[(int) mode1]
9971 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9972 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
9973 && modifier != EXPAND_CONST_ADDRESS
9974 && modifier != EXPAND_INITIALIZER
9975 && modifier != EXPAND_MEMORY)
9976 /* If the field is volatile, we always want an aligned
9977 access. Do this in following two situations:
9978 1. the access is not already naturally
9979 aligned, otherwise "normal" (non-bitfield) volatile fields
9980 become non-addressable.
9981 2. the bitsize is narrower than the access size. Need
9982 to extract bitfields from the access. */
9983 || (volatilep && flag_strict_volatile_bitfields > 0
9984 && (bitpos % GET_MODE_ALIGNMENT (mode) != 0
9985 || (mode1 != BLKmode
9986 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)))
9987 /* If the field isn't aligned enough to fetch as a memref,
9988 fetch it as a bit field. */
9989 || (mode1 != BLKmode
9990 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
9991 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
9992 || (MEM_P (op0)
9993 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
9994 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
9995 && modifier != EXPAND_MEMORY
9996 && ((modifier == EXPAND_CONST_ADDRESS
9997 || modifier == EXPAND_INITIALIZER)
9998 ? STRICT_ALIGNMENT
9999 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10000 || (bitpos % BITS_PER_UNIT != 0)))
10001 /* If the type and the field are a constant size and the
10002 size of the type isn't the same size as the bitfield,
10003 we must use bitfield operations. */
10004 || (bitsize >= 0
10005 && TYPE_SIZE (TREE_TYPE (exp))
10006 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10007 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10008 bitsize)))
10009 {
10010 enum machine_mode ext_mode = mode;
10011
10012 if (ext_mode == BLKmode
10013 && ! (target != 0 && MEM_P (op0)
10014 && MEM_P (target)
10015 && bitpos % BITS_PER_UNIT == 0))
10016 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10017
10018 if (ext_mode == BLKmode)
10019 {
10020 if (target == 0)
10021 target = assign_temp (type, 1, 1);
10022
10023 if (bitsize == 0)
10024 return target;
10025
10026 /* In this case, BITPOS must start at a byte boundary and
10027 TARGET, if specified, must be a MEM. */
10028 gcc_assert (MEM_P (op0)
10029 && (!target || MEM_P (target))
10030 && !(bitpos % BITS_PER_UNIT));
10031
10032 emit_block_move (target,
10033 adjust_address (op0, VOIDmode,
10034 bitpos / BITS_PER_UNIT),
10035 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10036 / BITS_PER_UNIT),
10037 (modifier == EXPAND_STACK_PARM
10038 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10039
10040 return target;
10041 }
10042
10043 op0 = validize_mem (op0);
10044
10045 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10046 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10047
10048 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10049 (modifier == EXPAND_STACK_PARM
10050 ? NULL_RTX : target),
10051 ext_mode, ext_mode);
10052
10053 /* If the result is a record type and BITSIZE is narrower than
10054 the mode of OP0, an integral mode, and this is a big endian
10055 machine, we must put the field into the high-order bits. */
10056 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10057 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10058 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10059 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10060 GET_MODE_BITSIZE (GET_MODE (op0))
10061 - bitsize, op0, 1);
10062
10063 /* If the result type is BLKmode, store the data into a temporary
10064 of the appropriate type, but with the mode corresponding to the
10065 mode for the data we have (op0's mode). It's tempting to make
10066 this a constant type, since we know it's only being stored once,
10067 but that can cause problems if we are taking the address of this
10068 COMPONENT_REF because the MEM of any reference via that address
10069 will have flags corresponding to the type, which will not
10070 necessarily be constant. */
10071 if (mode == BLKmode)
10072 {
10073 rtx new_rtx;
10074
10075 new_rtx = assign_stack_temp_for_type (ext_mode,
10076 GET_MODE_BITSIZE (ext_mode),
10077 type);
10078 emit_move_insn (new_rtx, op0);
10079 op0 = copy_rtx (new_rtx);
10080 PUT_MODE (op0, BLKmode);
10081 }
10082
10083 return op0;
10084 }
10085
10086 /* If the result is BLKmode, use that to access the object
10087 now as well. */
10088 if (mode == BLKmode)
10089 mode1 = BLKmode;
10090
10091 /* Get a reference to just this component. */
10092 if (modifier == EXPAND_CONST_ADDRESS
10093 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10094 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10095 else
10096 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10097
10098 if (op0 == orig_op0)
10099 op0 = copy_rtx (op0);
10100
10101 /* If op0 is a temporary because of forcing to memory, pass only the
10102 type to set_mem_attributes so that the original expression is never
10103 marked as ADDRESSABLE through MEM_EXPR of the temporary. */
10104 if (mem_attrs_from_type)
10105 set_mem_attributes (op0, type, 0);
10106 else
10107 set_mem_attributes (op0, exp, 0);
10108
10109 if (REG_P (XEXP (op0, 0)))
10110 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10111
10112 MEM_VOLATILE_P (op0) |= volatilep;
10113 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10114 || modifier == EXPAND_CONST_ADDRESS
10115 || modifier == EXPAND_INITIALIZER)
10116 return op0;
10117
10118 if (target == 0)
10119 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10120
10121 convert_move (target, op0, unsignedp);
10122 return target;
10123 }
10124
10125 case OBJ_TYPE_REF:
10126 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10127
10128 case CALL_EXPR:
10129 /* All valid uses of __builtin_va_arg_pack () are removed during
10130 inlining. */
10131 if (CALL_EXPR_VA_ARG_PACK (exp))
10132 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10133 {
10134 tree fndecl = get_callee_fndecl (exp), attr;
10135
10136 if (fndecl
10137 && (attr = lookup_attribute ("error",
10138 DECL_ATTRIBUTES (fndecl))) != NULL)
10139 error ("%Kcall to %qs declared with attribute error: %s",
10140 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10141 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10142 if (fndecl
10143 && (attr = lookup_attribute ("warning",
10144 DECL_ATTRIBUTES (fndecl))) != NULL)
10145 warning_at (tree_nonartificial_location (exp),
10146 0, "%Kcall to %qs declared with attribute warning: %s",
10147 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10148 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10149
10150 /* Check for a built-in function. */
10151 if (fndecl && DECL_BUILT_IN (fndecl))
10152 {
10153 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10154 return expand_builtin (exp, target, subtarget, tmode, ignore);
10155 }
10156 }
10157 return expand_call (exp, target, ignore);
10158
10159 case VIEW_CONVERT_EXPR:
10160 op0 = NULL_RTX;
10161
10162 /* If we are converting to BLKmode, try to avoid an intermediate
10163 temporary by fetching an inner memory reference. */
10164 if (mode == BLKmode
10165 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10166 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10167 && handled_component_p (treeop0))
10168 {
10169 enum machine_mode mode1;
10170 HOST_WIDE_INT bitsize, bitpos;
10171 tree offset;
10172 int unsignedp;
10173 int volatilep = 0;
10174 tree tem
10175 = get_inner_reference (treeop0, &bitsize, &bitpos,
10176 &offset, &mode1, &unsignedp, &volatilep,
10177 true);
10178 rtx orig_op0;
10179
10180 /* ??? We should work harder and deal with non-zero offsets. */
10181 if (!offset
10182 && (bitpos % BITS_PER_UNIT) == 0
10183 && bitsize >= 0
10184 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10185 {
10186 /* See the normal_inner_ref case for the rationale. */
10187 orig_op0
10188 = expand_expr (tem,
10189 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10190 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10191 != INTEGER_CST)
10192 && modifier != EXPAND_STACK_PARM
10193 ? target : NULL_RTX),
10194 VOIDmode,
10195 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
10196
10197 if (MEM_P (orig_op0))
10198 {
10199 op0 = orig_op0;
10200
10201 /* Get a reference to just this component. */
10202 if (modifier == EXPAND_CONST_ADDRESS
10203 || modifier == EXPAND_SUM
10204 || modifier == EXPAND_INITIALIZER)
10205 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10206 else
10207 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10208
10209 if (op0 == orig_op0)
10210 op0 = copy_rtx (op0);
10211
10212 set_mem_attributes (op0, treeop0, 0);
10213 if (REG_P (XEXP (op0, 0)))
10214 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10215
10216 MEM_VOLATILE_P (op0) |= volatilep;
10217 }
10218 }
10219 }
10220
10221 if (!op0)
10222 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
10223
10224 /* If the input and output modes are both the same, we are done. */
10225 if (mode == GET_MODE (op0))
10226 ;
10227 /* If neither mode is BLKmode, and both modes are the same size
10228 then we can use gen_lowpart. */
10229 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10230 && (GET_MODE_PRECISION (mode)
10231 == GET_MODE_PRECISION (GET_MODE (op0)))
10232 && !COMPLEX_MODE_P (GET_MODE (op0)))
10233 {
10234 if (GET_CODE (op0) == SUBREG)
10235 op0 = force_reg (GET_MODE (op0), op0);
10236 temp = gen_lowpart_common (mode, op0);
10237 if (temp)
10238 op0 = temp;
10239 else
10240 {
10241 if (!REG_P (op0) && !MEM_P (op0))
10242 op0 = force_reg (GET_MODE (op0), op0);
10243 op0 = gen_lowpart (mode, op0);
10244 }
10245 }
10246 /* If both types are integral, convert from one mode to the other. */
10247 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10248 op0 = convert_modes (mode, GET_MODE (op0), op0,
10249 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10250 /* As a last resort, spill op0 to memory, and reload it in a
10251 different mode. */
10252 else if (!MEM_P (op0))
10253 {
10254 /* If the operand is not a MEM, force it into memory. Since we
10255 are going to be changing the mode of the MEM, don't call
10256 force_const_mem for constants because we don't allow pool
10257 constants to change mode. */
10258 tree inner_type = TREE_TYPE (treeop0);
10259
10260 gcc_assert (!TREE_ADDRESSABLE (exp));
10261
10262 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10263 target
10264 = assign_stack_temp_for_type
10265 (TYPE_MODE (inner_type),
10266 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10267
10268 emit_move_insn (target, op0);
10269 op0 = target;
10270 }
10271
10272 /* At this point, OP0 is in the correct mode. If the output type is
10273 such that the operand is known to be aligned, indicate that it is.
10274 Otherwise, we need only be concerned about alignment for non-BLKmode
10275 results. */
10276 if (MEM_P (op0))
10277 {
10278 enum insn_code icode;
10279
10280 if (TYPE_ALIGN_OK (type))
10281 {
10282 /* ??? Copying the MEM without substantially changing it might
10283 run afoul of the code handling volatile memory references in
10284 store_expr, which assumes that TARGET is returned unmodified
10285 if it has been used. */
10286 op0 = copy_rtx (op0);
10287 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10288 }
10289 else if (mode != BLKmode
10290 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode)
10291 /* If the target does have special handling for unaligned
10292 loads of mode then use them. */
10293 && ((icode = optab_handler (movmisalign_optab, mode))
10294 != CODE_FOR_nothing))
10295 {
10296 rtx reg, insn;
10297
10298 op0 = adjust_address (op0, mode, 0);
10299 /* We've already validated the memory, and we're creating a
10300 new pseudo destination. The predicates really can't
10301 fail. */
10302 reg = gen_reg_rtx (mode);
10303
10304 /* Nor can the insn generator. */
10305 insn = GEN_FCN (icode) (reg, op0);
10306 emit_insn (insn);
10307 return reg;
10308 }
10309 else if (STRICT_ALIGNMENT
10310 && mode != BLKmode
10311 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10312 {
10313 tree inner_type = TREE_TYPE (treeop0);
10314 HOST_WIDE_INT temp_size
10315 = MAX (int_size_in_bytes (inner_type),
10316 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10317 rtx new_rtx
10318 = assign_stack_temp_for_type (mode, temp_size, type);
10319 rtx new_with_op0_mode
10320 = adjust_address (new_rtx, GET_MODE (op0), 0);
10321
10322 gcc_assert (!TREE_ADDRESSABLE (exp));
10323
10324 if (GET_MODE (op0) == BLKmode)
10325 emit_block_move (new_with_op0_mode, op0,
10326 GEN_INT (GET_MODE_SIZE (mode)),
10327 (modifier == EXPAND_STACK_PARM
10328 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10329 else
10330 emit_move_insn (new_with_op0_mode, op0);
10331
10332 op0 = new_rtx;
10333 }
10334
10335 op0 = adjust_address (op0, mode, 0);
10336 }
10337
10338 return op0;
10339
10340 case MODIFY_EXPR:
10341 {
10342 tree lhs = treeop0;
10343 tree rhs = treeop1;
10344 gcc_assert (ignore);
10345
10346 /* Check for |= or &= of a bitfield of size one into another bitfield
10347 of size 1. In this case, (unless we need the result of the
10348 assignment) we can do this more efficiently with a
10349 test followed by an assignment, if necessary.
10350
10351 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10352 things change so we do, this code should be enhanced to
10353 support it. */
10354 if (TREE_CODE (lhs) == COMPONENT_REF
10355 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10356 || TREE_CODE (rhs) == BIT_AND_EXPR)
10357 && TREE_OPERAND (rhs, 0) == lhs
10358 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10359 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10360 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10361 {
10362 rtx label = gen_label_rtx ();
10363 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10364 do_jump (TREE_OPERAND (rhs, 1),
10365 value ? label : 0,
10366 value ? 0 : label, -1);
10367 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10368 false);
10369 do_pending_stack_adjust ();
10370 emit_label (label);
10371 return const0_rtx;
10372 }
10373
10374 expand_assignment (lhs, rhs, false);
10375 return const0_rtx;
10376 }
10377
10378 case ADDR_EXPR:
10379 return expand_expr_addr_expr (exp, target, tmode, modifier);
10380
10381 case REALPART_EXPR:
10382 op0 = expand_normal (treeop0);
10383 return read_complex_part (op0, false);
10384
10385 case IMAGPART_EXPR:
10386 op0 = expand_normal (treeop0);
10387 return read_complex_part (op0, true);
10388
10389 case RETURN_EXPR:
10390 case LABEL_EXPR:
10391 case GOTO_EXPR:
10392 case SWITCH_EXPR:
10393 case ASM_EXPR:
10394 /* Expanded in cfgexpand.c. */
10395 gcc_unreachable ();
10396
10397 case TRY_CATCH_EXPR:
10398 case CATCH_EXPR:
10399 case EH_FILTER_EXPR:
10400 case TRY_FINALLY_EXPR:
10401 /* Lowered by tree-eh.c. */
10402 gcc_unreachable ();
10403
10404 case WITH_CLEANUP_EXPR:
10405 case CLEANUP_POINT_EXPR:
10406 case TARGET_EXPR:
10407 case CASE_LABEL_EXPR:
10408 case VA_ARG_EXPR:
10409 case BIND_EXPR:
10410 case INIT_EXPR:
10411 case CONJ_EXPR:
10412 case COMPOUND_EXPR:
10413 case PREINCREMENT_EXPR:
10414 case PREDECREMENT_EXPR:
10415 case POSTINCREMENT_EXPR:
10416 case POSTDECREMENT_EXPR:
10417 case LOOP_EXPR:
10418 case EXIT_EXPR:
10419 case COMPOUND_LITERAL_EXPR:
10420 /* Lowered by gimplify.c. */
10421 gcc_unreachable ();
10422
10423 case FDESC_EXPR:
10424 /* Function descriptors are not valid except for as
10425 initialization constants, and should not be expanded. */
10426 gcc_unreachable ();
10427
10428 case WITH_SIZE_EXPR:
10429 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10430 have pulled out the size to use in whatever context it needed. */
10431 return expand_expr_real (treeop0, original_target, tmode,
10432 modifier, alt_rtl);
10433
10434 default:
10435 return expand_expr_real_2 (&ops, target, tmode, modifier);
10436 }
10437 }
10438 \f
10439 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10440 signedness of TYPE), possibly returning the result in TARGET. */
10441 static rtx
10442 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10443 {
10444 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10445 if (target && GET_MODE (target) != GET_MODE (exp))
10446 target = 0;
10447 /* For constant values, reduce using build_int_cst_type. */
10448 if (CONST_INT_P (exp))
10449 {
10450 HOST_WIDE_INT value = INTVAL (exp);
10451 tree t = build_int_cst_type (type, value);
10452 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10453 }
10454 else if (TYPE_UNSIGNED (type))
10455 {
10456 rtx mask = immed_double_int_const (double_int::mask (prec),
10457 GET_MODE (exp));
10458 return expand_and (GET_MODE (exp), exp, mask, target);
10459 }
10460 else
10461 {
10462 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10463 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10464 exp, count, target, 0);
10465 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10466 exp, count, target, 0);
10467 }
10468 }
10469 \f
10470 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10471 when applied to the address of EXP produces an address known to be
10472 aligned more than BIGGEST_ALIGNMENT. */
10473
10474 static int
10475 is_aligning_offset (const_tree offset, const_tree exp)
10476 {
10477 /* Strip off any conversions. */
10478 while (CONVERT_EXPR_P (offset))
10479 offset = TREE_OPERAND (offset, 0);
10480
10481 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10482 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10483 if (TREE_CODE (offset) != BIT_AND_EXPR
10484 || !host_integerp (TREE_OPERAND (offset, 1), 1)
10485 || compare_tree_int (TREE_OPERAND (offset, 1),
10486 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10487 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
10488 return 0;
10489
10490 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10491 It must be NEGATE_EXPR. Then strip any more conversions. */
10492 offset = TREE_OPERAND (offset, 0);
10493 while (CONVERT_EXPR_P (offset))
10494 offset = TREE_OPERAND (offset, 0);
10495
10496 if (TREE_CODE (offset) != NEGATE_EXPR)
10497 return 0;
10498
10499 offset = TREE_OPERAND (offset, 0);
10500 while (CONVERT_EXPR_P (offset))
10501 offset = TREE_OPERAND (offset, 0);
10502
10503 /* This must now be the address of EXP. */
10504 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10505 }
10506 \f
10507 /* Return the tree node if an ARG corresponds to a string constant or zero
10508 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10509 in bytes within the string that ARG is accessing. The type of the
10510 offset will be `sizetype'. */
10511
10512 tree
10513 string_constant (tree arg, tree *ptr_offset)
10514 {
10515 tree array, offset, lower_bound;
10516 STRIP_NOPS (arg);
10517
10518 if (TREE_CODE (arg) == ADDR_EXPR)
10519 {
10520 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10521 {
10522 *ptr_offset = size_zero_node;
10523 return TREE_OPERAND (arg, 0);
10524 }
10525 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10526 {
10527 array = TREE_OPERAND (arg, 0);
10528 offset = size_zero_node;
10529 }
10530 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10531 {
10532 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10533 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10534 if (TREE_CODE (array) != STRING_CST
10535 && TREE_CODE (array) != VAR_DECL)
10536 return 0;
10537
10538 /* Check if the array has a nonzero lower bound. */
10539 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10540 if (!integer_zerop (lower_bound))
10541 {
10542 /* If the offset and base aren't both constants, return 0. */
10543 if (TREE_CODE (lower_bound) != INTEGER_CST)
10544 return 0;
10545 if (TREE_CODE (offset) != INTEGER_CST)
10546 return 0;
10547 /* Adjust offset by the lower bound. */
10548 offset = size_diffop (fold_convert (sizetype, offset),
10549 fold_convert (sizetype, lower_bound));
10550 }
10551 }
10552 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10553 {
10554 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10555 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10556 if (TREE_CODE (array) != ADDR_EXPR)
10557 return 0;
10558 array = TREE_OPERAND (array, 0);
10559 if (TREE_CODE (array) != STRING_CST
10560 && TREE_CODE (array) != VAR_DECL)
10561 return 0;
10562 }
10563 else
10564 return 0;
10565 }
10566 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10567 {
10568 tree arg0 = TREE_OPERAND (arg, 0);
10569 tree arg1 = TREE_OPERAND (arg, 1);
10570
10571 STRIP_NOPS (arg0);
10572 STRIP_NOPS (arg1);
10573
10574 if (TREE_CODE (arg0) == ADDR_EXPR
10575 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10576 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10577 {
10578 array = TREE_OPERAND (arg0, 0);
10579 offset = arg1;
10580 }
10581 else if (TREE_CODE (arg1) == ADDR_EXPR
10582 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10583 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10584 {
10585 array = TREE_OPERAND (arg1, 0);
10586 offset = arg0;
10587 }
10588 else
10589 return 0;
10590 }
10591 else
10592 return 0;
10593
10594 if (TREE_CODE (array) == STRING_CST)
10595 {
10596 *ptr_offset = fold_convert (sizetype, offset);
10597 return array;
10598 }
10599 else if (TREE_CODE (array) == VAR_DECL
10600 || TREE_CODE (array) == CONST_DECL)
10601 {
10602 int length;
10603 tree init = ctor_for_folding (array);
10604
10605 /* Variables initialized to string literals can be handled too. */
10606 if (init == error_mark_node
10607 || !init
10608 || TREE_CODE (init) != STRING_CST)
10609 return 0;
10610
10611 /* Avoid const char foo[4] = "abcde"; */
10612 if (DECL_SIZE_UNIT (array) == NULL_TREE
10613 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10614 || (length = TREE_STRING_LENGTH (init)) <= 0
10615 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10616 return 0;
10617
10618 /* If variable is bigger than the string literal, OFFSET must be constant
10619 and inside of the bounds of the string literal. */
10620 offset = fold_convert (sizetype, offset);
10621 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10622 && (! host_integerp (offset, 1)
10623 || compare_tree_int (offset, length) >= 0))
10624 return 0;
10625
10626 *ptr_offset = offset;
10627 return init;
10628 }
10629
10630 return 0;
10631 }
10632 \f
10633 /* Generate code to calculate OPS, and exploded expression
10634 using a store-flag instruction and return an rtx for the result.
10635 OPS reflects a comparison.
10636
10637 If TARGET is nonzero, store the result there if convenient.
10638
10639 Return zero if there is no suitable set-flag instruction
10640 available on this machine.
10641
10642 Once expand_expr has been called on the arguments of the comparison,
10643 we are committed to doing the store flag, since it is not safe to
10644 re-evaluate the expression. We emit the store-flag insn by calling
10645 emit_store_flag, but only expand the arguments if we have a reason
10646 to believe that emit_store_flag will be successful. If we think that
10647 it will, but it isn't, we have to simulate the store-flag with a
10648 set/jump/set sequence. */
10649
10650 static rtx
10651 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
10652 {
10653 enum rtx_code code;
10654 tree arg0, arg1, type;
10655 tree tem;
10656 enum machine_mode operand_mode;
10657 int unsignedp;
10658 rtx op0, op1;
10659 rtx subtarget = target;
10660 location_t loc = ops->location;
10661
10662 arg0 = ops->op0;
10663 arg1 = ops->op1;
10664
10665 /* Don't crash if the comparison was erroneous. */
10666 if (arg0 == error_mark_node || arg1 == error_mark_node)
10667 return const0_rtx;
10668
10669 type = TREE_TYPE (arg0);
10670 operand_mode = TYPE_MODE (type);
10671 unsignedp = TYPE_UNSIGNED (type);
10672
10673 /* We won't bother with BLKmode store-flag operations because it would mean
10674 passing a lot of information to emit_store_flag. */
10675 if (operand_mode == BLKmode)
10676 return 0;
10677
10678 /* We won't bother with store-flag operations involving function pointers
10679 when function pointers must be canonicalized before comparisons. */
10680 #ifdef HAVE_canonicalize_funcptr_for_compare
10681 if (HAVE_canonicalize_funcptr_for_compare
10682 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
10683 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
10684 == FUNCTION_TYPE))
10685 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
10686 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
10687 == FUNCTION_TYPE))))
10688 return 0;
10689 #endif
10690
10691 STRIP_NOPS (arg0);
10692 STRIP_NOPS (arg1);
10693
10694 /* For vector typed comparisons emit code to generate the desired
10695 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10696 expander for this. */
10697 if (TREE_CODE (ops->type) == VECTOR_TYPE)
10698 {
10699 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10700 tree if_true = constant_boolean_node (true, ops->type);
10701 tree if_false = constant_boolean_node (false, ops->type);
10702 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10703 }
10704
10705 /* Get the rtx comparison code to use. We know that EXP is a comparison
10706 operation of some type. Some comparisons against 1 and -1 can be
10707 converted to comparisons with zero. Do so here so that the tests
10708 below will be aware that we have a comparison with zero. These
10709 tests will not catch constants in the first operand, but constants
10710 are rarely passed as the first operand. */
10711
10712 switch (ops->code)
10713 {
10714 case EQ_EXPR:
10715 code = EQ;
10716 break;
10717 case NE_EXPR:
10718 code = NE;
10719 break;
10720 case LT_EXPR:
10721 if (integer_onep (arg1))
10722 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10723 else
10724 code = unsignedp ? LTU : LT;
10725 break;
10726 case LE_EXPR:
10727 if (! unsignedp && integer_all_onesp (arg1))
10728 arg1 = integer_zero_node, code = LT;
10729 else
10730 code = unsignedp ? LEU : LE;
10731 break;
10732 case GT_EXPR:
10733 if (! unsignedp && integer_all_onesp (arg1))
10734 arg1 = integer_zero_node, code = GE;
10735 else
10736 code = unsignedp ? GTU : GT;
10737 break;
10738 case GE_EXPR:
10739 if (integer_onep (arg1))
10740 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10741 else
10742 code = unsignedp ? GEU : GE;
10743 break;
10744
10745 case UNORDERED_EXPR:
10746 code = UNORDERED;
10747 break;
10748 case ORDERED_EXPR:
10749 code = ORDERED;
10750 break;
10751 case UNLT_EXPR:
10752 code = UNLT;
10753 break;
10754 case UNLE_EXPR:
10755 code = UNLE;
10756 break;
10757 case UNGT_EXPR:
10758 code = UNGT;
10759 break;
10760 case UNGE_EXPR:
10761 code = UNGE;
10762 break;
10763 case UNEQ_EXPR:
10764 code = UNEQ;
10765 break;
10766 case LTGT_EXPR:
10767 code = LTGT;
10768 break;
10769
10770 default:
10771 gcc_unreachable ();
10772 }
10773
10774 /* Put a constant second. */
10775 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10776 || TREE_CODE (arg0) == FIXED_CST)
10777 {
10778 tem = arg0; arg0 = arg1; arg1 = tem;
10779 code = swap_condition (code);
10780 }
10781
10782 /* If this is an equality or inequality test of a single bit, we can
10783 do this by shifting the bit being tested to the low-order bit and
10784 masking the result with the constant 1. If the condition was EQ,
10785 we xor it with 1. This does not require an scc insn and is faster
10786 than an scc insn even if we have it.
10787
10788 The code to make this transformation was moved into fold_single_bit_test,
10789 so we just call into the folder and expand its result. */
10790
10791 if ((code == NE || code == EQ)
10792 && integer_zerop (arg1)
10793 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
10794 {
10795 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
10796 if (srcstmt
10797 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
10798 {
10799 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
10800 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
10801 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
10802 gimple_assign_rhs1 (srcstmt),
10803 gimple_assign_rhs2 (srcstmt));
10804 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
10805 if (temp)
10806 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
10807 }
10808 }
10809
10810 if (! get_subtarget (target)
10811 || GET_MODE (subtarget) != operand_mode)
10812 subtarget = 0;
10813
10814 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
10815
10816 if (target == 0)
10817 target = gen_reg_rtx (mode);
10818
10819 /* Try a cstore if possible. */
10820 return emit_store_flag_force (target, code, op0, op1,
10821 operand_mode, unsignedp,
10822 (TYPE_PRECISION (ops->type) == 1
10823 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
10824 }
10825 \f
10826
10827 /* Stubs in case we haven't got a casesi insn. */
10828 #ifndef HAVE_casesi
10829 # define HAVE_casesi 0
10830 # define gen_casesi(a, b, c, d, e) (0)
10831 # define CODE_FOR_casesi CODE_FOR_nothing
10832 #endif
10833
10834 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10835 0 otherwise (i.e. if there is no casesi instruction).
10836
10837 DEFAULT_PROBABILITY is the probability of jumping to the default
10838 label. */
10839 int
10840 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10841 rtx table_label, rtx default_label, rtx fallback_label,
10842 int default_probability)
10843 {
10844 struct expand_operand ops[5];
10845 enum machine_mode index_mode = SImode;
10846 rtx op1, op2, index;
10847
10848 if (! HAVE_casesi)
10849 return 0;
10850
10851 /* Convert the index to SImode. */
10852 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10853 {
10854 enum machine_mode omode = TYPE_MODE (index_type);
10855 rtx rangertx = expand_normal (range);
10856
10857 /* We must handle the endpoints in the original mode. */
10858 index_expr = build2 (MINUS_EXPR, index_type,
10859 index_expr, minval);
10860 minval = integer_zero_node;
10861 index = expand_normal (index_expr);
10862 if (default_label)
10863 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10864 omode, 1, default_label,
10865 default_probability);
10866 /* Now we can safely truncate. */
10867 index = convert_to_mode (index_mode, index, 0);
10868 }
10869 else
10870 {
10871 if (TYPE_MODE (index_type) != index_mode)
10872 {
10873 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
10874 index_expr = fold_convert (index_type, index_expr);
10875 }
10876
10877 index = expand_normal (index_expr);
10878 }
10879
10880 do_pending_stack_adjust ();
10881
10882 op1 = expand_normal (minval);
10883 op2 = expand_normal (range);
10884
10885 create_input_operand (&ops[0], index, index_mode);
10886 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
10887 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
10888 create_fixed_operand (&ops[3], table_label);
10889 create_fixed_operand (&ops[4], (default_label
10890 ? default_label
10891 : fallback_label));
10892 expand_jump_insn (CODE_FOR_casesi, 5, ops);
10893 return 1;
10894 }
10895
10896 /* Attempt to generate a tablejump instruction; same concept. */
10897 #ifndef HAVE_tablejump
10898 #define HAVE_tablejump 0
10899 #define gen_tablejump(x, y) (0)
10900 #endif
10901
10902 /* Subroutine of the next function.
10903
10904 INDEX is the value being switched on, with the lowest value
10905 in the table already subtracted.
10906 MODE is its expected mode (needed if INDEX is constant).
10907 RANGE is the length of the jump table.
10908 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10909
10910 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10911 index value is out of range.
10912 DEFAULT_PROBABILITY is the probability of jumping to
10913 the default label. */
10914
10915 static void
10916 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10917 rtx default_label, int default_probability)
10918 {
10919 rtx temp, vector;
10920
10921 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
10922 cfun->cfg->max_jumptable_ents = INTVAL (range);
10923
10924 /* Do an unsigned comparison (in the proper mode) between the index
10925 expression and the value which represents the length of the range.
10926 Since we just finished subtracting the lower bound of the range
10927 from the index expression, this comparison allows us to simultaneously
10928 check that the original index expression value is both greater than
10929 or equal to the minimum value of the range and less than or equal to
10930 the maximum value of the range. */
10931
10932 if (default_label)
10933 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10934 default_label, default_probability);
10935
10936
10937 /* If index is in range, it must fit in Pmode.
10938 Convert to Pmode so we can index with it. */
10939 if (mode != Pmode)
10940 index = convert_to_mode (Pmode, index, 1);
10941
10942 /* Don't let a MEM slip through, because then INDEX that comes
10943 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10944 and break_out_memory_refs will go to work on it and mess it up. */
10945 #ifdef PIC_CASE_VECTOR_ADDRESS
10946 if (flag_pic && !REG_P (index))
10947 index = copy_to_mode_reg (Pmode, index);
10948 #endif
10949
10950 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10951 GET_MODE_SIZE, because this indicates how large insns are. The other
10952 uses should all be Pmode, because they are addresses. This code
10953 could fail if addresses and insns are not the same size. */
10954 index = gen_rtx_PLUS
10955 (Pmode,
10956 gen_rtx_MULT (Pmode, index,
10957 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE), Pmode)),
10958 gen_rtx_LABEL_REF (Pmode, table_label));
10959 #ifdef PIC_CASE_VECTOR_ADDRESS
10960 if (flag_pic)
10961 index = PIC_CASE_VECTOR_ADDRESS (index);
10962 else
10963 #endif
10964 index = memory_address (CASE_VECTOR_MODE, index);
10965 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10966 vector = gen_const_mem (CASE_VECTOR_MODE, index);
10967 convert_move (temp, vector, 0);
10968
10969 emit_jump_insn (gen_tablejump (temp, table_label));
10970
10971 /* If we are generating PIC code or if the table is PC-relative, the
10972 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10973 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10974 emit_barrier ();
10975 }
10976
10977 int
10978 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10979 rtx table_label, rtx default_label, int default_probability)
10980 {
10981 rtx index;
10982
10983 if (! HAVE_tablejump)
10984 return 0;
10985
10986 index_expr = fold_build2 (MINUS_EXPR, index_type,
10987 fold_convert (index_type, index_expr),
10988 fold_convert (index_type, minval));
10989 index = expand_normal (index_expr);
10990 do_pending_stack_adjust ();
10991
10992 do_tablejump (index, TYPE_MODE (index_type),
10993 convert_modes (TYPE_MODE (index_type),
10994 TYPE_MODE (TREE_TYPE (range)),
10995 expand_normal (range),
10996 TYPE_UNSIGNED (TREE_TYPE (range))),
10997 table_label, default_label, default_probability);
10998 return 1;
10999 }
11000
11001 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11002 static rtx
11003 const_vector_from_tree (tree exp)
11004 {
11005 rtvec v;
11006 unsigned i;
11007 int units;
11008 tree elt;
11009 enum machine_mode inner, mode;
11010
11011 mode = TYPE_MODE (TREE_TYPE (exp));
11012
11013 if (initializer_zerop (exp))
11014 return CONST0_RTX (mode);
11015
11016 units = GET_MODE_NUNITS (mode);
11017 inner = GET_MODE_INNER (mode);
11018
11019 v = rtvec_alloc (units);
11020
11021 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11022 {
11023 elt = VECTOR_CST_ELT (exp, i);
11024
11025 if (TREE_CODE (elt) == REAL_CST)
11026 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11027 inner);
11028 else if (TREE_CODE (elt) == FIXED_CST)
11029 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11030 inner);
11031 else
11032 RTVEC_ELT (v, i) = immed_double_int_const (tree_to_double_int (elt),
11033 inner);
11034 }
11035
11036 return gen_rtx_CONST_VECTOR (mode, v);
11037 }
11038
11039 /* Build a decl for a personality function given a language prefix. */
11040
11041 tree
11042 build_personality_function (const char *lang)
11043 {
11044 const char *unwind_and_version;
11045 tree decl, type;
11046 char *name;
11047
11048 switch (targetm_common.except_unwind_info (&global_options))
11049 {
11050 case UI_NONE:
11051 return NULL;
11052 case UI_SJLJ:
11053 unwind_and_version = "_sj0";
11054 break;
11055 case UI_DWARF2:
11056 case UI_TARGET:
11057 unwind_and_version = "_v0";
11058 break;
11059 case UI_SEH:
11060 unwind_and_version = "_seh0";
11061 break;
11062 default:
11063 gcc_unreachable ();
11064 }
11065
11066 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11067
11068 type = build_function_type_list (integer_type_node, integer_type_node,
11069 long_long_unsigned_type_node,
11070 ptr_type_node, ptr_type_node, NULL_TREE);
11071 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11072 get_identifier (name), type);
11073 DECL_ARTIFICIAL (decl) = 1;
11074 DECL_EXTERNAL (decl) = 1;
11075 TREE_PUBLIC (decl) = 1;
11076
11077 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11078 are the flags assigned by targetm.encode_section_info. */
11079 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11080
11081 return decl;
11082 }
11083
11084 /* Extracts the personality function of DECL and returns the corresponding
11085 libfunc. */
11086
11087 rtx
11088 get_personality_function (tree decl)
11089 {
11090 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11091 enum eh_personality_kind pk;
11092
11093 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11094 if (pk == eh_personality_none)
11095 return NULL;
11096
11097 if (!personality
11098 && pk == eh_personality_any)
11099 personality = lang_hooks.eh_personality ();
11100
11101 if (pk == eh_personality_lang)
11102 gcc_assert (personality != NULL_TREE);
11103
11104 return XEXP (DECL_RTL (personality), 0);
11105 }
11106
11107 #include "gt-expr.h"