Remove parameter keep_aligning from get_inner_reference.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "attribs.h"
30 #include "varasm.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
39 #include "expr.h"
40 #include "optabs.h"
41 #include "libfuncs.h"
42 #include "recog.h"
43 #include "reload.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "langhooks.h"
47 #include "intl.h"
48 #include "tm_p.h"
49 #include "tree-iterator.h"
50 #include "basic-block.h"
51 #include "tree-ssa-alias.h"
52 #include "internal-fn.h"
53 #include "gimple-expr.h"
54 #include "is-a.h"
55 #include "gimple.h"
56 #include "gimple-ssa.h"
57 #include "cgraph.h"
58 #include "tree-ssanames.h"
59 #include "target.h"
60 #include "common/common-target.h"
61 #include "timevar.h"
62 #include "df.h"
63 #include "diagnostic.h"
64 #include "tree-ssa-live.h"
65 #include "tree-outof-ssa.h"
66 #include "target-globals.h"
67 #include "params.h"
68 #include "tree-ssa-address.h"
69 #include "cfgexpand.h"
70
71 /* Decide whether a function's arguments should be processed
72 from first to last or from last to first.
73
74 They should if the stack and args grow in opposite directions, but
75 only if we have push insns. */
76
77 #ifdef PUSH_ROUNDING
78
79 #ifndef PUSH_ARGS_REVERSED
80 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
81 #define PUSH_ARGS_REVERSED /* If it's last to first. */
82 #endif
83 #endif
84
85 #endif
86
87 #ifndef STACK_PUSH_CODE
88 #ifdef STACK_GROWS_DOWNWARD
89 #define STACK_PUSH_CODE PRE_DEC
90 #else
91 #define STACK_PUSH_CODE PRE_INC
92 #endif
93 #endif
94
95
96 /* If this is nonzero, we do not bother generating VOLATILE
97 around volatile memory references, and we are willing to
98 output indirect addresses. If cse is to follow, we reject
99 indirect addresses so a useful potential cse is generated;
100 if it is used only once, instruction combination will produce
101 the same indirect address eventually. */
102 int cse_not_expected;
103
104 /* This structure is used by move_by_pieces to describe the move to
105 be performed. */
106 struct move_by_pieces_d
107 {
108 rtx to;
109 rtx to_addr;
110 int autinc_to;
111 int explicit_inc_to;
112 rtx from;
113 rtx from_addr;
114 int autinc_from;
115 int explicit_inc_from;
116 unsigned HOST_WIDE_INT len;
117 HOST_WIDE_INT offset;
118 int reverse;
119 };
120
121 /* This structure is used by store_by_pieces to describe the clear to
122 be performed. */
123
124 struct store_by_pieces_d
125 {
126 rtx to;
127 rtx to_addr;
128 int autinc_to;
129 int explicit_inc_to;
130 unsigned HOST_WIDE_INT len;
131 HOST_WIDE_INT offset;
132 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
133 void *constfundata;
134 int reverse;
135 };
136
137 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
138 struct move_by_pieces_d *);
139 static bool block_move_libcall_safe_for_call_parm (void);
140 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
141 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
142 unsigned HOST_WIDE_INT);
143 static tree emit_block_move_libcall_fn (int);
144 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
145 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
146 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
147 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
148 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
149 struct store_by_pieces_d *);
150 static tree clear_storage_libcall_fn (int);
151 static rtx compress_float_constant (rtx, rtx);
152 static rtx get_subtarget (rtx);
153 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
154 HOST_WIDE_INT, enum machine_mode,
155 tree, int, alias_set_type);
156 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
157 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
158 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
159 enum machine_mode, tree, alias_set_type, bool);
160
161 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
162
163 static int is_aligning_offset (const_tree, const_tree);
164 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
165 enum expand_modifier);
166 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
167 static rtx do_store_flag (sepops, rtx, enum machine_mode);
168 #ifdef PUSH_ROUNDING
169 static void emit_single_push_insn (enum machine_mode, rtx, tree);
170 #endif
171 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx, int);
172 static rtx const_vector_from_tree (tree);
173 static void write_complex_part (rtx, rtx, bool);
174
175 /* This macro is used to determine whether move_by_pieces should be called
176 to perform a structure copy. */
177 #ifndef MOVE_BY_PIECES_P
178 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
179 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
180 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
181 #endif
182
183 /* This macro is used to determine whether clear_by_pieces should be
184 called to clear storage. */
185 #ifndef CLEAR_BY_PIECES_P
186 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
187 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
188 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
189 #endif
190
191 /* This macro is used to determine whether store_by_pieces should be
192 called to "memset" storage with byte values other than zero. */
193 #ifndef SET_BY_PIECES_P
194 #define SET_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
197 #endif
198
199 /* This macro is used to determine whether store_by_pieces should be
200 called to "memcpy" storage when the source is a constant string. */
201 #ifndef STORE_BY_PIECES_P
202 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
203 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
204 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
205 #endif
206 \f
207 /* This is run to set up which modes can be used
208 directly in memory and to initialize the block move optab. It is run
209 at the beginning of compilation and when the target is reinitialized. */
210
211 void
212 init_expr_target (void)
213 {
214 rtx insn, pat;
215 enum machine_mode mode;
216 int num_clobbers;
217 rtx mem, mem1;
218 rtx reg;
219
220 /* Try indexing by frame ptr and try by stack ptr.
221 It is known that on the Convex the stack ptr isn't a valid index.
222 With luck, one or the other is valid on any machine. */
223 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
224 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
225
226 /* A scratch register we can modify in-place below to avoid
227 useless RTL allocations. */
228 reg = gen_rtx_REG (VOIDmode, -1);
229
230 insn = rtx_alloc (INSN);
231 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
232 PATTERN (insn) = pat;
233
234 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
235 mode = (enum machine_mode) ((int) mode + 1))
236 {
237 int regno;
238
239 direct_load[(int) mode] = direct_store[(int) mode] = 0;
240 PUT_MODE (mem, mode);
241 PUT_MODE (mem1, mode);
242 PUT_MODE (reg, mode);
243
244 /* See if there is some register that can be used in this mode and
245 directly loaded or stored from memory. */
246
247 if (mode != VOIDmode && mode != BLKmode)
248 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
249 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
250 regno++)
251 {
252 if (! HARD_REGNO_MODE_OK (regno, mode))
253 continue;
254
255 SET_REGNO (reg, regno);
256
257 SET_SRC (pat) = mem;
258 SET_DEST (pat) = reg;
259 if (recog (pat, insn, &num_clobbers) >= 0)
260 direct_load[(int) mode] = 1;
261
262 SET_SRC (pat) = mem1;
263 SET_DEST (pat) = reg;
264 if (recog (pat, insn, &num_clobbers) >= 0)
265 direct_load[(int) mode] = 1;
266
267 SET_SRC (pat) = reg;
268 SET_DEST (pat) = mem;
269 if (recog (pat, insn, &num_clobbers) >= 0)
270 direct_store[(int) mode] = 1;
271
272 SET_SRC (pat) = reg;
273 SET_DEST (pat) = mem1;
274 if (recog (pat, insn, &num_clobbers) >= 0)
275 direct_store[(int) mode] = 1;
276 }
277 }
278
279 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
280
281 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
282 mode = GET_MODE_WIDER_MODE (mode))
283 {
284 enum machine_mode srcmode;
285 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
286 srcmode = GET_MODE_WIDER_MODE (srcmode))
287 {
288 enum insn_code ic;
289
290 ic = can_extend_p (mode, srcmode, 0);
291 if (ic == CODE_FOR_nothing)
292 continue;
293
294 PUT_MODE (mem, srcmode);
295
296 if (insn_operand_matches (ic, 1, mem))
297 float_extend_from_mem[mode][srcmode] = true;
298 }
299 }
300 }
301
302 /* This is run at the start of compiling a function. */
303
304 void
305 init_expr (void)
306 {
307 memset (&crtl->expr, 0, sizeof (crtl->expr));
308 }
309 \f
310 /* Copy data from FROM to TO, where the machine modes are not the same.
311 Both modes may be integer, or both may be floating, or both may be
312 fixed-point.
313 UNSIGNEDP should be nonzero if FROM is an unsigned type.
314 This causes zero-extension instead of sign-extension. */
315
316 void
317 convert_move (rtx to, rtx from, int unsignedp)
318 {
319 enum machine_mode to_mode = GET_MODE (to);
320 enum machine_mode from_mode = GET_MODE (from);
321 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
322 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
323 enum insn_code code;
324 rtx libcall;
325
326 /* rtx code for making an equivalent value. */
327 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
328 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
329
330
331 gcc_assert (to_real == from_real);
332 gcc_assert (to_mode != BLKmode);
333 gcc_assert (from_mode != BLKmode);
334
335 /* If the source and destination are already the same, then there's
336 nothing to do. */
337 if (to == from)
338 return;
339
340 /* If FROM is a SUBREG that indicates that we have already done at least
341 the required extension, strip it. We don't handle such SUBREGs as
342 TO here. */
343
344 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
345 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
346 >= GET_MODE_PRECISION (to_mode))
347 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
348 from = gen_lowpart (to_mode, from), from_mode = to_mode;
349
350 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
351
352 if (to_mode == from_mode
353 || (from_mode == VOIDmode && CONSTANT_P (from)))
354 {
355 emit_move_insn (to, from);
356 return;
357 }
358
359 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
360 {
361 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
362
363 if (VECTOR_MODE_P (to_mode))
364 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
365 else
366 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
367
368 emit_move_insn (to, from);
369 return;
370 }
371
372 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
373 {
374 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
375 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
376 return;
377 }
378
379 if (to_real)
380 {
381 rtx value, insns;
382 convert_optab tab;
383
384 gcc_assert ((GET_MODE_PRECISION (from_mode)
385 != GET_MODE_PRECISION (to_mode))
386 || (DECIMAL_FLOAT_MODE_P (from_mode)
387 != DECIMAL_FLOAT_MODE_P (to_mode)));
388
389 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
390 /* Conversion between decimal float and binary float, same size. */
391 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
392 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
393 tab = sext_optab;
394 else
395 tab = trunc_optab;
396
397 /* Try converting directly if the insn is supported. */
398
399 code = convert_optab_handler (tab, to_mode, from_mode);
400 if (code != CODE_FOR_nothing)
401 {
402 emit_unop_insn (code, to, from,
403 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
404 return;
405 }
406
407 /* Otherwise use a libcall. */
408 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
409
410 /* Is this conversion implemented yet? */
411 gcc_assert (libcall);
412
413 start_sequence ();
414 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
415 1, from, from_mode);
416 insns = get_insns ();
417 end_sequence ();
418 emit_libcall_block (insns, to, value,
419 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
420 from)
421 : gen_rtx_FLOAT_EXTEND (to_mode, from));
422 return;
423 }
424
425 /* Handle pointer conversion. */ /* SPEE 900220. */
426 /* Targets are expected to provide conversion insns between PxImode and
427 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
428 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
429 {
430 enum machine_mode full_mode
431 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
432
433 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
434 != CODE_FOR_nothing);
435
436 if (full_mode != from_mode)
437 from = convert_to_mode (full_mode, from, unsignedp);
438 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
439 to, from, UNKNOWN);
440 return;
441 }
442 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
443 {
444 rtx new_from;
445 enum machine_mode full_mode
446 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
447 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
448 enum insn_code icode;
449
450 icode = convert_optab_handler (ctab, full_mode, from_mode);
451 gcc_assert (icode != CODE_FOR_nothing);
452
453 if (to_mode == full_mode)
454 {
455 emit_unop_insn (icode, to, from, UNKNOWN);
456 return;
457 }
458
459 new_from = gen_reg_rtx (full_mode);
460 emit_unop_insn (icode, new_from, from, UNKNOWN);
461
462 /* else proceed to integer conversions below. */
463 from_mode = full_mode;
464 from = new_from;
465 }
466
467 /* Make sure both are fixed-point modes or both are not. */
468 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
469 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
470 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
471 {
472 /* If we widen from_mode to to_mode and they are in the same class,
473 we won't saturate the result.
474 Otherwise, always saturate the result to play safe. */
475 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
476 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
477 expand_fixed_convert (to, from, 0, 0);
478 else
479 expand_fixed_convert (to, from, 0, 1);
480 return;
481 }
482
483 /* Now both modes are integers. */
484
485 /* Handle expanding beyond a word. */
486 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
487 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
488 {
489 rtx insns;
490 rtx lowpart;
491 rtx fill_value;
492 rtx lowfrom;
493 int i;
494 enum machine_mode lowpart_mode;
495 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
496
497 /* Try converting directly if the insn is supported. */
498 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
499 != CODE_FOR_nothing)
500 {
501 /* If FROM is a SUBREG, put it into a register. Do this
502 so that we always generate the same set of insns for
503 better cse'ing; if an intermediate assignment occurred,
504 we won't be doing the operation directly on the SUBREG. */
505 if (optimize > 0 && GET_CODE (from) == SUBREG)
506 from = force_reg (from_mode, from);
507 emit_unop_insn (code, to, from, equiv_code);
508 return;
509 }
510 /* Next, try converting via full word. */
511 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
512 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
513 != CODE_FOR_nothing))
514 {
515 rtx word_to = gen_reg_rtx (word_mode);
516 if (REG_P (to))
517 {
518 if (reg_overlap_mentioned_p (to, from))
519 from = force_reg (from_mode, from);
520 emit_clobber (to);
521 }
522 convert_move (word_to, from, unsignedp);
523 emit_unop_insn (code, to, word_to, equiv_code);
524 return;
525 }
526
527 /* No special multiword conversion insn; do it by hand. */
528 start_sequence ();
529
530 /* Since we will turn this into a no conflict block, we must ensure the
531 the source does not overlap the target so force it into an isolated
532 register when maybe so. Likewise for any MEM input, since the
533 conversion sequence might require several references to it and we
534 must ensure we're getting the same value every time. */
535
536 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
537 from = force_reg (from_mode, from);
538
539 /* Get a copy of FROM widened to a word, if necessary. */
540 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
541 lowpart_mode = word_mode;
542 else
543 lowpart_mode = from_mode;
544
545 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
546
547 lowpart = gen_lowpart (lowpart_mode, to);
548 emit_move_insn (lowpart, lowfrom);
549
550 /* Compute the value to put in each remaining word. */
551 if (unsignedp)
552 fill_value = const0_rtx;
553 else
554 fill_value = emit_store_flag (gen_reg_rtx (word_mode),
555 LT, lowfrom, const0_rtx,
556 VOIDmode, 0, -1);
557
558 /* Fill the remaining words. */
559 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
560 {
561 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
562 rtx subword = operand_subword (to, index, 1, to_mode);
563
564 gcc_assert (subword);
565
566 if (fill_value != subword)
567 emit_move_insn (subword, fill_value);
568 }
569
570 insns = get_insns ();
571 end_sequence ();
572
573 emit_insn (insns);
574 return;
575 }
576
577 /* Truncating multi-word to a word or less. */
578 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
579 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
580 {
581 if (!((MEM_P (from)
582 && ! MEM_VOLATILE_P (from)
583 && direct_load[(int) to_mode]
584 && ! mode_dependent_address_p (XEXP (from, 0),
585 MEM_ADDR_SPACE (from)))
586 || REG_P (from)
587 || GET_CODE (from) == SUBREG))
588 from = force_reg (from_mode, from);
589 convert_move (to, gen_lowpart (word_mode, from), 0);
590 return;
591 }
592
593 /* Now follow all the conversions between integers
594 no more than a word long. */
595
596 /* For truncation, usually we can just refer to FROM in a narrower mode. */
597 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
598 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
599 {
600 if (!((MEM_P (from)
601 && ! MEM_VOLATILE_P (from)
602 && direct_load[(int) to_mode]
603 && ! mode_dependent_address_p (XEXP (from, 0),
604 MEM_ADDR_SPACE (from)))
605 || REG_P (from)
606 || GET_CODE (from) == SUBREG))
607 from = force_reg (from_mode, from);
608 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
609 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
610 from = copy_to_reg (from);
611 emit_move_insn (to, gen_lowpart (to_mode, from));
612 return;
613 }
614
615 /* Handle extension. */
616 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
617 {
618 /* Convert directly if that works. */
619 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
620 != CODE_FOR_nothing)
621 {
622 emit_unop_insn (code, to, from, equiv_code);
623 return;
624 }
625 else
626 {
627 enum machine_mode intermediate;
628 rtx tmp;
629 int shift_amount;
630
631 /* Search for a mode to convert via. */
632 for (intermediate = from_mode; intermediate != VOIDmode;
633 intermediate = GET_MODE_WIDER_MODE (intermediate))
634 if (((can_extend_p (to_mode, intermediate, unsignedp)
635 != CODE_FOR_nothing)
636 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
637 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
638 && (can_extend_p (intermediate, from_mode, unsignedp)
639 != CODE_FOR_nothing))
640 {
641 convert_move (to, convert_to_mode (intermediate, from,
642 unsignedp), unsignedp);
643 return;
644 }
645
646 /* No suitable intermediate mode.
647 Generate what we need with shifts. */
648 shift_amount = (GET_MODE_PRECISION (to_mode)
649 - GET_MODE_PRECISION (from_mode));
650 from = gen_lowpart (to_mode, force_reg (from_mode, from));
651 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
652 to, unsignedp);
653 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
654 to, unsignedp);
655 if (tmp != to)
656 emit_move_insn (to, tmp);
657 return;
658 }
659 }
660
661 /* Support special truncate insns for certain modes. */
662 if (convert_optab_handler (trunc_optab, to_mode,
663 from_mode) != CODE_FOR_nothing)
664 {
665 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
666 to, from, UNKNOWN);
667 return;
668 }
669
670 /* Handle truncation of volatile memrefs, and so on;
671 the things that couldn't be truncated directly,
672 and for which there was no special instruction.
673
674 ??? Code above formerly short-circuited this, for most integer
675 mode pairs, with a force_reg in from_mode followed by a recursive
676 call to this routine. Appears always to have been wrong. */
677 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
678 {
679 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
680 emit_move_insn (to, temp);
681 return;
682 }
683
684 /* Mode combination is not recognized. */
685 gcc_unreachable ();
686 }
687
688 /* Return an rtx for a value that would result
689 from converting X to mode MODE.
690 Both X and MODE may be floating, or both integer.
691 UNSIGNEDP is nonzero if X is an unsigned value.
692 This can be done by referring to a part of X in place
693 or by copying to a new temporary with conversion. */
694
695 rtx
696 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
697 {
698 return convert_modes (mode, VOIDmode, x, unsignedp);
699 }
700
701 /* Return an rtx for a value that would result
702 from converting X from mode OLDMODE to mode MODE.
703 Both modes may be floating, or both integer.
704 UNSIGNEDP is nonzero if X is an unsigned value.
705
706 This can be done by referring to a part of X in place
707 or by copying to a new temporary with conversion.
708
709 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
710
711 rtx
712 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
713 {
714 rtx temp;
715
716 /* If FROM is a SUBREG that indicates that we have already done at least
717 the required extension, strip it. */
718
719 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
720 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
721 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
722 x = gen_lowpart (mode, x);
723
724 if (GET_MODE (x) != VOIDmode)
725 oldmode = GET_MODE (x);
726
727 if (mode == oldmode)
728 return x;
729
730 /* There is one case that we must handle specially: If we are converting
731 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
732 we are to interpret the constant as unsigned, gen_lowpart will do
733 the wrong if the constant appears negative. What we want to do is
734 make the high-order word of the constant zero, not all ones. */
735
736 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
737 && GET_MODE_BITSIZE (mode) == HOST_BITS_PER_DOUBLE_INT
738 && CONST_INT_P (x) && INTVAL (x) < 0)
739 {
740 double_int val = double_int::from_uhwi (INTVAL (x));
741
742 /* We need to zero extend VAL. */
743 if (oldmode != VOIDmode)
744 val = val.zext (GET_MODE_BITSIZE (oldmode));
745
746 return immed_double_int_const (val, mode);
747 }
748
749 /* We can do this with a gen_lowpart if both desired and current modes
750 are integer, and this is either a constant integer, a register, or a
751 non-volatile MEM. Except for the constant case where MODE is no
752 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
753
754 if ((CONST_INT_P (x)
755 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT)
756 || (GET_MODE_CLASS (mode) == MODE_INT
757 && GET_MODE_CLASS (oldmode) == MODE_INT
758 && (CONST_DOUBLE_AS_INT_P (x)
759 || (GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
760 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
761 && direct_load[(int) mode])
762 || (REG_P (x)
763 && (! HARD_REGISTER_P (x)
764 || HARD_REGNO_MODE_OK (REGNO (x), mode))
765 && TRULY_NOOP_TRUNCATION_MODES_P (mode,
766 GET_MODE (x))))))))
767 {
768 /* ?? If we don't know OLDMODE, we have to assume here that
769 X does not need sign- or zero-extension. This may not be
770 the case, but it's the best we can do. */
771 if (CONST_INT_P (x) && oldmode != VOIDmode
772 && GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (oldmode))
773 {
774 HOST_WIDE_INT val = INTVAL (x);
775
776 /* We must sign or zero-extend in this case. Start by
777 zero-extending, then sign extend if we need to. */
778 val &= GET_MODE_MASK (oldmode);
779 if (! unsignedp
780 && val_signbit_known_set_p (oldmode, val))
781 val |= ~GET_MODE_MASK (oldmode);
782
783 return gen_int_mode (val, mode);
784 }
785
786 return gen_lowpart (mode, x);
787 }
788
789 /* Converting from integer constant into mode is always equivalent to an
790 subreg operation. */
791 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
792 {
793 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
794 return simplify_gen_subreg (mode, x, oldmode, 0);
795 }
796
797 temp = gen_reg_rtx (mode);
798 convert_move (temp, x, unsignedp);
799 return temp;
800 }
801 \f
802 /* Return the largest alignment we can use for doing a move (or store)
803 of MAX_PIECES. ALIGN is the largest alignment we could use. */
804
805 static unsigned int
806 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
807 {
808 enum machine_mode tmode;
809
810 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
811 if (align >= GET_MODE_ALIGNMENT (tmode))
812 align = GET_MODE_ALIGNMENT (tmode);
813 else
814 {
815 enum machine_mode tmode, xmode;
816
817 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
818 tmode != VOIDmode;
819 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
820 if (GET_MODE_SIZE (tmode) > max_pieces
821 || SLOW_UNALIGNED_ACCESS (tmode, align))
822 break;
823
824 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
825 }
826
827 return align;
828 }
829
830 /* Return the widest integer mode no wider than SIZE. If no such mode
831 can be found, return VOIDmode. */
832
833 static enum machine_mode
834 widest_int_mode_for_size (unsigned int size)
835 {
836 enum machine_mode tmode, mode = VOIDmode;
837
838 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
839 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
840 if (GET_MODE_SIZE (tmode) < size)
841 mode = tmode;
842
843 return mode;
844 }
845
846 /* STORE_MAX_PIECES is the number of bytes at a time that we can
847 store efficiently. Due to internal GCC limitations, this is
848 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
849 for an immediate constant. */
850
851 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
852
853 /* Determine whether the LEN bytes can be moved by using several move
854 instructions. Return nonzero if a call to move_by_pieces should
855 succeed. */
856
857 int
858 can_move_by_pieces (unsigned HOST_WIDE_INT len ATTRIBUTE_UNUSED,
859 unsigned int align ATTRIBUTE_UNUSED)
860 {
861 return MOVE_BY_PIECES_P (len, align);
862 }
863
864 /* Generate several move instructions to copy LEN bytes from block FROM to
865 block TO. (These are MEM rtx's with BLKmode).
866
867 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
868 used to push FROM to the stack.
869
870 ALIGN is maximum stack alignment we can assume.
871
872 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
873 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
874 stpcpy. */
875
876 rtx
877 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
878 unsigned int align, int endp)
879 {
880 struct move_by_pieces_d data;
881 enum machine_mode to_addr_mode;
882 enum machine_mode from_addr_mode = get_address_mode (from);
883 rtx to_addr, from_addr = XEXP (from, 0);
884 unsigned int max_size = MOVE_MAX_PIECES + 1;
885 enum insn_code icode;
886
887 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
888
889 data.offset = 0;
890 data.from_addr = from_addr;
891 if (to)
892 {
893 to_addr_mode = get_address_mode (to);
894 to_addr = XEXP (to, 0);
895 data.to = to;
896 data.autinc_to
897 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
898 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
899 data.reverse
900 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
901 }
902 else
903 {
904 to_addr_mode = VOIDmode;
905 to_addr = NULL_RTX;
906 data.to = NULL_RTX;
907 data.autinc_to = 1;
908 #ifdef STACK_GROWS_DOWNWARD
909 data.reverse = 1;
910 #else
911 data.reverse = 0;
912 #endif
913 }
914 data.to_addr = to_addr;
915 data.from = from;
916 data.autinc_from
917 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
918 || GET_CODE (from_addr) == POST_INC
919 || GET_CODE (from_addr) == POST_DEC);
920
921 data.explicit_inc_from = 0;
922 data.explicit_inc_to = 0;
923 if (data.reverse) data.offset = len;
924 data.len = len;
925
926 /* If copying requires more than two move insns,
927 copy addresses to registers (to make displacements shorter)
928 and use post-increment if available. */
929 if (!(data.autinc_from && data.autinc_to)
930 && move_by_pieces_ninsns (len, align, max_size) > 2)
931 {
932 /* Find the mode of the largest move...
933 MODE might not be used depending on the definitions of the
934 USE_* macros below. */
935 enum machine_mode mode ATTRIBUTE_UNUSED
936 = widest_int_mode_for_size (max_size);
937
938 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
939 {
940 data.from_addr = copy_to_mode_reg (from_addr_mode,
941 plus_constant (from_addr_mode,
942 from_addr, len));
943 data.autinc_from = 1;
944 data.explicit_inc_from = -1;
945 }
946 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
947 {
948 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
949 data.autinc_from = 1;
950 data.explicit_inc_from = 1;
951 }
952 if (!data.autinc_from && CONSTANT_P (from_addr))
953 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
954 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
955 {
956 data.to_addr = copy_to_mode_reg (to_addr_mode,
957 plus_constant (to_addr_mode,
958 to_addr, len));
959 data.autinc_to = 1;
960 data.explicit_inc_to = -1;
961 }
962 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
963 {
964 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
965 data.autinc_to = 1;
966 data.explicit_inc_to = 1;
967 }
968 if (!data.autinc_to && CONSTANT_P (to_addr))
969 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
970 }
971
972 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
973
974 /* First move what we can in the largest integer mode, then go to
975 successively smaller modes. */
976
977 while (max_size > 1 && data.len > 0)
978 {
979 enum machine_mode mode = widest_int_mode_for_size (max_size);
980
981 if (mode == VOIDmode)
982 break;
983
984 icode = optab_handler (mov_optab, mode);
985 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
986 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
987
988 max_size = GET_MODE_SIZE (mode);
989 }
990
991 /* The code above should have handled everything. */
992 gcc_assert (!data.len);
993
994 if (endp)
995 {
996 rtx to1;
997
998 gcc_assert (!data.reverse);
999 if (data.autinc_to)
1000 {
1001 if (endp == 2)
1002 {
1003 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1004 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1005 else
1006 data.to_addr = copy_to_mode_reg (to_addr_mode,
1007 plus_constant (to_addr_mode,
1008 data.to_addr,
1009 -1));
1010 }
1011 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1012 data.offset);
1013 }
1014 else
1015 {
1016 if (endp == 2)
1017 --data.offset;
1018 to1 = adjust_address (data.to, QImode, data.offset);
1019 }
1020 return to1;
1021 }
1022 else
1023 return data.to;
1024 }
1025
1026 /* Return number of insns required to move L bytes by pieces.
1027 ALIGN (in bits) is maximum alignment we can assume. */
1028
1029 unsigned HOST_WIDE_INT
1030 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1031 unsigned int max_size)
1032 {
1033 unsigned HOST_WIDE_INT n_insns = 0;
1034
1035 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1036
1037 while (max_size > 1 && l > 0)
1038 {
1039 enum machine_mode mode;
1040 enum insn_code icode;
1041
1042 mode = widest_int_mode_for_size (max_size);
1043
1044 if (mode == VOIDmode)
1045 break;
1046
1047 icode = optab_handler (mov_optab, mode);
1048 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1049 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1050
1051 max_size = GET_MODE_SIZE (mode);
1052 }
1053
1054 gcc_assert (!l);
1055 return n_insns;
1056 }
1057
1058 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1059 with move instructions for mode MODE. GENFUN is the gen_... function
1060 to make a move insn for that mode. DATA has all the other info. */
1061
1062 static void
1063 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
1064 struct move_by_pieces_d *data)
1065 {
1066 unsigned int size = GET_MODE_SIZE (mode);
1067 rtx to1 = NULL_RTX, from1;
1068
1069 while (data->len >= size)
1070 {
1071 if (data->reverse)
1072 data->offset -= size;
1073
1074 if (data->to)
1075 {
1076 if (data->autinc_to)
1077 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1078 data->offset);
1079 else
1080 to1 = adjust_address (data->to, mode, data->offset);
1081 }
1082
1083 if (data->autinc_from)
1084 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1085 data->offset);
1086 else
1087 from1 = adjust_address (data->from, mode, data->offset);
1088
1089 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1090 emit_insn (gen_add2_insn (data->to_addr,
1091 gen_int_mode (-(HOST_WIDE_INT) size,
1092 GET_MODE (data->to_addr))));
1093 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1094 emit_insn (gen_add2_insn (data->from_addr,
1095 gen_int_mode (-(HOST_WIDE_INT) size,
1096 GET_MODE (data->from_addr))));
1097
1098 if (data->to)
1099 emit_insn ((*genfun) (to1, from1));
1100 else
1101 {
1102 #ifdef PUSH_ROUNDING
1103 emit_single_push_insn (mode, from1, NULL);
1104 #else
1105 gcc_unreachable ();
1106 #endif
1107 }
1108
1109 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1110 emit_insn (gen_add2_insn (data->to_addr,
1111 gen_int_mode (size,
1112 GET_MODE (data->to_addr))));
1113 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1114 emit_insn (gen_add2_insn (data->from_addr,
1115 gen_int_mode (size,
1116 GET_MODE (data->from_addr))));
1117
1118 if (! data->reverse)
1119 data->offset += size;
1120
1121 data->len -= size;
1122 }
1123 }
1124 \f
1125 /* Emit code to move a block Y to a block X. This may be done with
1126 string-move instructions, with multiple scalar move instructions,
1127 or with a library call.
1128
1129 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1130 SIZE is an rtx that says how long they are.
1131 ALIGN is the maximum alignment we can assume they have.
1132 METHOD describes what kind of copy this is, and what mechanisms may be used.
1133 MIN_SIZE is the minimal size of block to move
1134 MAX_SIZE is the maximal size of block to move, if it can not be represented
1135 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1136
1137 Return the address of the new block, if memcpy is called and returns it,
1138 0 otherwise. */
1139
1140 rtx
1141 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1142 unsigned int expected_align, HOST_WIDE_INT expected_size,
1143 unsigned HOST_WIDE_INT min_size,
1144 unsigned HOST_WIDE_INT max_size,
1145 unsigned HOST_WIDE_INT probable_max_size)
1146 {
1147 bool may_use_call;
1148 rtx retval = 0;
1149 unsigned int align;
1150
1151 gcc_assert (size);
1152 if (CONST_INT_P (size)
1153 && INTVAL (size) == 0)
1154 return 0;
1155
1156 switch (method)
1157 {
1158 case BLOCK_OP_NORMAL:
1159 case BLOCK_OP_TAILCALL:
1160 may_use_call = true;
1161 break;
1162
1163 case BLOCK_OP_CALL_PARM:
1164 may_use_call = block_move_libcall_safe_for_call_parm ();
1165
1166 /* Make inhibit_defer_pop nonzero around the library call
1167 to force it to pop the arguments right away. */
1168 NO_DEFER_POP;
1169 break;
1170
1171 case BLOCK_OP_NO_LIBCALL:
1172 may_use_call = false;
1173 break;
1174
1175 default:
1176 gcc_unreachable ();
1177 }
1178
1179 gcc_assert (MEM_P (x) && MEM_P (y));
1180 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1181 gcc_assert (align >= BITS_PER_UNIT);
1182
1183 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1184 block copy is more efficient for other large modes, e.g. DCmode. */
1185 x = adjust_address (x, BLKmode, 0);
1186 y = adjust_address (y, BLKmode, 0);
1187
1188 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1189 can be incorrect is coming from __builtin_memcpy. */
1190 if (CONST_INT_P (size))
1191 {
1192 x = shallow_copy_rtx (x);
1193 y = shallow_copy_rtx (y);
1194 set_mem_size (x, INTVAL (size));
1195 set_mem_size (y, INTVAL (size));
1196 }
1197
1198 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1199 move_by_pieces (x, y, INTVAL (size), align, 0);
1200 else if (emit_block_move_via_movmem (x, y, size, align,
1201 expected_align, expected_size,
1202 min_size, max_size, probable_max_size))
1203 ;
1204 else if (may_use_call
1205 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1206 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1207 {
1208 /* Since x and y are passed to a libcall, mark the corresponding
1209 tree EXPR as addressable. */
1210 tree y_expr = MEM_EXPR (y);
1211 tree x_expr = MEM_EXPR (x);
1212 if (y_expr)
1213 mark_addressable (y_expr);
1214 if (x_expr)
1215 mark_addressable (x_expr);
1216 retval = emit_block_move_via_libcall (x, y, size,
1217 method == BLOCK_OP_TAILCALL);
1218 }
1219
1220 else
1221 emit_block_move_via_loop (x, y, size, align);
1222
1223 if (method == BLOCK_OP_CALL_PARM)
1224 OK_DEFER_POP;
1225
1226 return retval;
1227 }
1228
1229 rtx
1230 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1231 {
1232 unsigned HOST_WIDE_INT max, min = 0;
1233 if (GET_CODE (size) == CONST_INT)
1234 min = max = UINTVAL (size);
1235 else
1236 max = GET_MODE_MASK (GET_MODE (size));
1237 return emit_block_move_hints (x, y, size, method, 0, -1,
1238 min, max, max);
1239 }
1240
1241 /* A subroutine of emit_block_move. Returns true if calling the
1242 block move libcall will not clobber any parameters which may have
1243 already been placed on the stack. */
1244
1245 static bool
1246 block_move_libcall_safe_for_call_parm (void)
1247 {
1248 #if defined (REG_PARM_STACK_SPACE)
1249 tree fn;
1250 #endif
1251
1252 /* If arguments are pushed on the stack, then they're safe. */
1253 if (PUSH_ARGS)
1254 return true;
1255
1256 /* If registers go on the stack anyway, any argument is sure to clobber
1257 an outgoing argument. */
1258 #if defined (REG_PARM_STACK_SPACE)
1259 fn = emit_block_move_libcall_fn (false);
1260 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1261 depend on its argument. */
1262 (void) fn;
1263 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1264 && REG_PARM_STACK_SPACE (fn) != 0)
1265 return false;
1266 #endif
1267
1268 /* If any argument goes in memory, then it might clobber an outgoing
1269 argument. */
1270 {
1271 CUMULATIVE_ARGS args_so_far_v;
1272 cumulative_args_t args_so_far;
1273 tree fn, arg;
1274
1275 fn = emit_block_move_libcall_fn (false);
1276 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1277 args_so_far = pack_cumulative_args (&args_so_far_v);
1278
1279 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1280 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1281 {
1282 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1283 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1284 NULL_TREE, true);
1285 if (!tmp || !REG_P (tmp))
1286 return false;
1287 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1288 return false;
1289 targetm.calls.function_arg_advance (args_so_far, mode,
1290 NULL_TREE, true);
1291 }
1292 }
1293 return true;
1294 }
1295
1296 /* A subroutine of emit_block_move. Expand a movmem pattern;
1297 return true if successful. */
1298
1299 static bool
1300 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1301 unsigned int expected_align, HOST_WIDE_INT expected_size,
1302 unsigned HOST_WIDE_INT min_size,
1303 unsigned HOST_WIDE_INT max_size,
1304 unsigned HOST_WIDE_INT probable_max_size)
1305 {
1306 int save_volatile_ok = volatile_ok;
1307 enum machine_mode mode;
1308
1309 if (expected_align < align)
1310 expected_align = align;
1311 if (expected_size != -1)
1312 {
1313 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1314 expected_size = probable_max_size;
1315 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1316 expected_size = min_size;
1317 }
1318
1319 /* Since this is a move insn, we don't care about volatility. */
1320 volatile_ok = 1;
1321
1322 /* Try the most limited insn first, because there's no point
1323 including more than one in the machine description unless
1324 the more limited one has some advantage. */
1325
1326 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1327 mode = GET_MODE_WIDER_MODE (mode))
1328 {
1329 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1330
1331 if (code != CODE_FOR_nothing
1332 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1333 here because if SIZE is less than the mode mask, as it is
1334 returned by the macro, it will definitely be less than the
1335 actual mode mask. Since SIZE is within the Pmode address
1336 space, we limit MODE to Pmode. */
1337 && ((CONST_INT_P (size)
1338 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1339 <= (GET_MODE_MASK (mode) >> 1)))
1340 || max_size <= (GET_MODE_MASK (mode) >> 1)
1341 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1342 {
1343 struct expand_operand ops[9];
1344 unsigned int nops;
1345
1346 /* ??? When called via emit_block_move_for_call, it'd be
1347 nice if there were some way to inform the backend, so
1348 that it doesn't fail the expansion because it thinks
1349 emitting the libcall would be more efficient. */
1350 nops = insn_data[(int) code].n_generator_args;
1351 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1352
1353 create_fixed_operand (&ops[0], x);
1354 create_fixed_operand (&ops[1], y);
1355 /* The check above guarantees that this size conversion is valid. */
1356 create_convert_operand_to (&ops[2], size, mode, true);
1357 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1358 if (nops >= 6)
1359 {
1360 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1361 create_integer_operand (&ops[5], expected_size);
1362 }
1363 if (nops >= 8)
1364 {
1365 create_integer_operand (&ops[6], min_size);
1366 /* If we can not represent the maximal size,
1367 make parameter NULL. */
1368 if ((HOST_WIDE_INT) max_size != -1)
1369 create_integer_operand (&ops[7], max_size);
1370 else
1371 create_fixed_operand (&ops[7], NULL);
1372 }
1373 if (nops == 9)
1374 {
1375 /* If we can not represent the maximal size,
1376 make parameter NULL. */
1377 if ((HOST_WIDE_INT) probable_max_size != -1)
1378 create_integer_operand (&ops[8], probable_max_size);
1379 else
1380 create_fixed_operand (&ops[8], NULL);
1381 }
1382 if (maybe_expand_insn (code, nops, ops))
1383 {
1384 volatile_ok = save_volatile_ok;
1385 return true;
1386 }
1387 }
1388 }
1389
1390 volatile_ok = save_volatile_ok;
1391 return false;
1392 }
1393
1394 /* A subroutine of emit_block_move. Expand a call to memcpy.
1395 Return the return value from memcpy, 0 otherwise. */
1396
1397 rtx
1398 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1399 {
1400 rtx dst_addr, src_addr;
1401 tree call_expr, fn, src_tree, dst_tree, size_tree;
1402 enum machine_mode size_mode;
1403 rtx retval;
1404
1405 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1406 pseudos. We can then place those new pseudos into a VAR_DECL and
1407 use them later. */
1408
1409 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1410 src_addr = copy_addr_to_reg (XEXP (src, 0));
1411
1412 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1413 src_addr = convert_memory_address (ptr_mode, src_addr);
1414
1415 dst_tree = make_tree (ptr_type_node, dst_addr);
1416 src_tree = make_tree (ptr_type_node, src_addr);
1417
1418 size_mode = TYPE_MODE (sizetype);
1419
1420 size = convert_to_mode (size_mode, size, 1);
1421 size = copy_to_mode_reg (size_mode, size);
1422
1423 /* It is incorrect to use the libcall calling conventions to call
1424 memcpy in this context. This could be a user call to memcpy and
1425 the user may wish to examine the return value from memcpy. For
1426 targets where libcalls and normal calls have different conventions
1427 for returning pointers, we could end up generating incorrect code. */
1428
1429 size_tree = make_tree (sizetype, size);
1430
1431 fn = emit_block_move_libcall_fn (true);
1432 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1433 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1434
1435 retval = expand_normal (call_expr);
1436
1437 return retval;
1438 }
1439
1440 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1441 for the function we use for block copies. */
1442
1443 static GTY(()) tree block_move_fn;
1444
1445 void
1446 init_block_move_fn (const char *asmspec)
1447 {
1448 if (!block_move_fn)
1449 {
1450 tree args, fn, attrs, attr_args;
1451
1452 fn = get_identifier ("memcpy");
1453 args = build_function_type_list (ptr_type_node, ptr_type_node,
1454 const_ptr_type_node, sizetype,
1455 NULL_TREE);
1456
1457 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1458 DECL_EXTERNAL (fn) = 1;
1459 TREE_PUBLIC (fn) = 1;
1460 DECL_ARTIFICIAL (fn) = 1;
1461 TREE_NOTHROW (fn) = 1;
1462 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1463 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1464
1465 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1466 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1467
1468 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1469
1470 block_move_fn = fn;
1471 }
1472
1473 if (asmspec)
1474 set_user_assembler_name (block_move_fn, asmspec);
1475 }
1476
1477 static tree
1478 emit_block_move_libcall_fn (int for_call)
1479 {
1480 static bool emitted_extern;
1481
1482 if (!block_move_fn)
1483 init_block_move_fn (NULL);
1484
1485 if (for_call && !emitted_extern)
1486 {
1487 emitted_extern = true;
1488 make_decl_rtl (block_move_fn);
1489 }
1490
1491 return block_move_fn;
1492 }
1493
1494 /* A subroutine of emit_block_move. Copy the data via an explicit
1495 loop. This is used only when libcalls are forbidden. */
1496 /* ??? It'd be nice to copy in hunks larger than QImode. */
1497
1498 static void
1499 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1500 unsigned int align ATTRIBUTE_UNUSED)
1501 {
1502 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1503 enum machine_mode x_addr_mode = get_address_mode (x);
1504 enum machine_mode y_addr_mode = get_address_mode (y);
1505 enum machine_mode iter_mode;
1506
1507 iter_mode = GET_MODE (size);
1508 if (iter_mode == VOIDmode)
1509 iter_mode = word_mode;
1510
1511 top_label = gen_label_rtx ();
1512 cmp_label = gen_label_rtx ();
1513 iter = gen_reg_rtx (iter_mode);
1514
1515 emit_move_insn (iter, const0_rtx);
1516
1517 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1518 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1519 do_pending_stack_adjust ();
1520
1521 emit_jump (cmp_label);
1522 emit_label (top_label);
1523
1524 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1525 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1526
1527 if (x_addr_mode != y_addr_mode)
1528 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1529 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1530
1531 x = change_address (x, QImode, x_addr);
1532 y = change_address (y, QImode, y_addr);
1533
1534 emit_move_insn (x, y);
1535
1536 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1537 true, OPTAB_LIB_WIDEN);
1538 if (tmp != iter)
1539 emit_move_insn (iter, tmp);
1540
1541 emit_label (cmp_label);
1542
1543 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1544 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1545 }
1546 \f
1547 /* Copy all or part of a value X into registers starting at REGNO.
1548 The number of registers to be filled is NREGS. */
1549
1550 void
1551 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1552 {
1553 int i;
1554 #ifdef HAVE_load_multiple
1555 rtx pat;
1556 rtx last;
1557 #endif
1558
1559 if (nregs == 0)
1560 return;
1561
1562 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1563 x = validize_mem (force_const_mem (mode, x));
1564
1565 /* See if the machine can do this with a load multiple insn. */
1566 #ifdef HAVE_load_multiple
1567 if (HAVE_load_multiple)
1568 {
1569 last = get_last_insn ();
1570 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1571 GEN_INT (nregs));
1572 if (pat)
1573 {
1574 emit_insn (pat);
1575 return;
1576 }
1577 else
1578 delete_insns_since (last);
1579 }
1580 #endif
1581
1582 for (i = 0; i < nregs; i++)
1583 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1584 operand_subword_force (x, i, mode));
1585 }
1586
1587 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1588 The number of registers to be filled is NREGS. */
1589
1590 void
1591 move_block_from_reg (int regno, rtx x, int nregs)
1592 {
1593 int i;
1594
1595 if (nregs == 0)
1596 return;
1597
1598 /* See if the machine can do this with a store multiple insn. */
1599 #ifdef HAVE_store_multiple
1600 if (HAVE_store_multiple)
1601 {
1602 rtx last = get_last_insn ();
1603 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1604 GEN_INT (nregs));
1605 if (pat)
1606 {
1607 emit_insn (pat);
1608 return;
1609 }
1610 else
1611 delete_insns_since (last);
1612 }
1613 #endif
1614
1615 for (i = 0; i < nregs; i++)
1616 {
1617 rtx tem = operand_subword (x, i, 1, BLKmode);
1618
1619 gcc_assert (tem);
1620
1621 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1622 }
1623 }
1624
1625 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1626 ORIG, where ORIG is a non-consecutive group of registers represented by
1627 a PARALLEL. The clone is identical to the original except in that the
1628 original set of registers is replaced by a new set of pseudo registers.
1629 The new set has the same modes as the original set. */
1630
1631 rtx
1632 gen_group_rtx (rtx orig)
1633 {
1634 int i, length;
1635 rtx *tmps;
1636
1637 gcc_assert (GET_CODE (orig) == PARALLEL);
1638
1639 length = XVECLEN (orig, 0);
1640 tmps = XALLOCAVEC (rtx, length);
1641
1642 /* Skip a NULL entry in first slot. */
1643 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1644
1645 if (i)
1646 tmps[0] = 0;
1647
1648 for (; i < length; i++)
1649 {
1650 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1651 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1652
1653 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1654 }
1655
1656 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1657 }
1658
1659 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1660 except that values are placed in TMPS[i], and must later be moved
1661 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1662
1663 static void
1664 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1665 {
1666 rtx src;
1667 int start, i;
1668 enum machine_mode m = GET_MODE (orig_src);
1669
1670 gcc_assert (GET_CODE (dst) == PARALLEL);
1671
1672 if (m != VOIDmode
1673 && !SCALAR_INT_MODE_P (m)
1674 && !MEM_P (orig_src)
1675 && GET_CODE (orig_src) != CONCAT)
1676 {
1677 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1678 if (imode == BLKmode)
1679 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1680 else
1681 src = gen_reg_rtx (imode);
1682 if (imode != BLKmode)
1683 src = gen_lowpart (GET_MODE (orig_src), src);
1684 emit_move_insn (src, orig_src);
1685 /* ...and back again. */
1686 if (imode != BLKmode)
1687 src = gen_lowpart (imode, src);
1688 emit_group_load_1 (tmps, dst, src, type, ssize);
1689 return;
1690 }
1691
1692 /* Check for a NULL entry, used to indicate that the parameter goes
1693 both on the stack and in registers. */
1694 if (XEXP (XVECEXP (dst, 0, 0), 0))
1695 start = 0;
1696 else
1697 start = 1;
1698
1699 /* Process the pieces. */
1700 for (i = start; i < XVECLEN (dst, 0); i++)
1701 {
1702 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1703 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1704 unsigned int bytelen = GET_MODE_SIZE (mode);
1705 int shift = 0;
1706
1707 /* Handle trailing fragments that run over the size of the struct. */
1708 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1709 {
1710 /* Arrange to shift the fragment to where it belongs.
1711 extract_bit_field loads to the lsb of the reg. */
1712 if (
1713 #ifdef BLOCK_REG_PADDING
1714 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1715 == (BYTES_BIG_ENDIAN ? upward : downward)
1716 #else
1717 BYTES_BIG_ENDIAN
1718 #endif
1719 )
1720 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1721 bytelen = ssize - bytepos;
1722 gcc_assert (bytelen > 0);
1723 }
1724
1725 /* If we won't be loading directly from memory, protect the real source
1726 from strange tricks we might play; but make sure that the source can
1727 be loaded directly into the destination. */
1728 src = orig_src;
1729 if (!MEM_P (orig_src)
1730 && (!CONSTANT_P (orig_src)
1731 || (GET_MODE (orig_src) != mode
1732 && GET_MODE (orig_src) != VOIDmode)))
1733 {
1734 if (GET_MODE (orig_src) == VOIDmode)
1735 src = gen_reg_rtx (mode);
1736 else
1737 src = gen_reg_rtx (GET_MODE (orig_src));
1738
1739 emit_move_insn (src, orig_src);
1740 }
1741
1742 /* Optimize the access just a bit. */
1743 if (MEM_P (src)
1744 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1745 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1746 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1747 && bytelen == GET_MODE_SIZE (mode))
1748 {
1749 tmps[i] = gen_reg_rtx (mode);
1750 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1751 }
1752 else if (COMPLEX_MODE_P (mode)
1753 && GET_MODE (src) == mode
1754 && bytelen == GET_MODE_SIZE (mode))
1755 /* Let emit_move_complex do the bulk of the work. */
1756 tmps[i] = src;
1757 else if (GET_CODE (src) == CONCAT)
1758 {
1759 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1760 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1761
1762 if ((bytepos == 0 && bytelen == slen0)
1763 || (bytepos != 0 && bytepos + bytelen <= slen))
1764 {
1765 /* The following assumes that the concatenated objects all
1766 have the same size. In this case, a simple calculation
1767 can be used to determine the object and the bit field
1768 to be extracted. */
1769 tmps[i] = XEXP (src, bytepos / slen0);
1770 if (! CONSTANT_P (tmps[i])
1771 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1772 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1773 (bytepos % slen0) * BITS_PER_UNIT,
1774 1, NULL_RTX, mode, mode);
1775 }
1776 else
1777 {
1778 rtx mem;
1779
1780 gcc_assert (!bytepos);
1781 mem = assign_stack_temp (GET_MODE (src), slen);
1782 emit_move_insn (mem, src);
1783 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1784 0, 1, NULL_RTX, mode, mode);
1785 }
1786 }
1787 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1788 SIMD register, which is currently broken. While we get GCC
1789 to emit proper RTL for these cases, let's dump to memory. */
1790 else if (VECTOR_MODE_P (GET_MODE (dst))
1791 && REG_P (src))
1792 {
1793 int slen = GET_MODE_SIZE (GET_MODE (src));
1794 rtx mem;
1795
1796 mem = assign_stack_temp (GET_MODE (src), slen);
1797 emit_move_insn (mem, src);
1798 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1799 }
1800 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1801 && XVECLEN (dst, 0) > 1)
1802 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1803 else if (CONSTANT_P (src))
1804 {
1805 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1806
1807 if (len == ssize)
1808 tmps[i] = src;
1809 else
1810 {
1811 rtx first, second;
1812
1813 gcc_assert (2 * len == ssize);
1814 split_double (src, &first, &second);
1815 if (i)
1816 tmps[i] = second;
1817 else
1818 tmps[i] = first;
1819 }
1820 }
1821 else if (REG_P (src) && GET_MODE (src) == mode)
1822 tmps[i] = src;
1823 else
1824 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1825 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1826 mode, mode);
1827
1828 if (shift)
1829 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1830 shift, tmps[i], 0);
1831 }
1832 }
1833
1834 /* Emit code to move a block SRC of type TYPE to a block DST,
1835 where DST is non-consecutive registers represented by a PARALLEL.
1836 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1837 if not known. */
1838
1839 void
1840 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1841 {
1842 rtx *tmps;
1843 int i;
1844
1845 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1846 emit_group_load_1 (tmps, dst, src, type, ssize);
1847
1848 /* Copy the extracted pieces into the proper (probable) hard regs. */
1849 for (i = 0; i < XVECLEN (dst, 0); i++)
1850 {
1851 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1852 if (d == NULL)
1853 continue;
1854 emit_move_insn (d, tmps[i]);
1855 }
1856 }
1857
1858 /* Similar, but load SRC into new pseudos in a format that looks like
1859 PARALLEL. This can later be fed to emit_group_move to get things
1860 in the right place. */
1861
1862 rtx
1863 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1864 {
1865 rtvec vec;
1866 int i;
1867
1868 vec = rtvec_alloc (XVECLEN (parallel, 0));
1869 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1870
1871 /* Convert the vector to look just like the original PARALLEL, except
1872 with the computed values. */
1873 for (i = 0; i < XVECLEN (parallel, 0); i++)
1874 {
1875 rtx e = XVECEXP (parallel, 0, i);
1876 rtx d = XEXP (e, 0);
1877
1878 if (d)
1879 {
1880 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1881 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1882 }
1883 RTVEC_ELT (vec, i) = e;
1884 }
1885
1886 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1887 }
1888
1889 /* Emit code to move a block SRC to block DST, where SRC and DST are
1890 non-consecutive groups of registers, each represented by a PARALLEL. */
1891
1892 void
1893 emit_group_move (rtx dst, rtx src)
1894 {
1895 int i;
1896
1897 gcc_assert (GET_CODE (src) == PARALLEL
1898 && GET_CODE (dst) == PARALLEL
1899 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1900
1901 /* Skip first entry if NULL. */
1902 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1903 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1904 XEXP (XVECEXP (src, 0, i), 0));
1905 }
1906
1907 /* Move a group of registers represented by a PARALLEL into pseudos. */
1908
1909 rtx
1910 emit_group_move_into_temps (rtx src)
1911 {
1912 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1913 int i;
1914
1915 for (i = 0; i < XVECLEN (src, 0); i++)
1916 {
1917 rtx e = XVECEXP (src, 0, i);
1918 rtx d = XEXP (e, 0);
1919
1920 if (d)
1921 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1922 RTVEC_ELT (vec, i) = e;
1923 }
1924
1925 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1926 }
1927
1928 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1929 where SRC is non-consecutive registers represented by a PARALLEL.
1930 SSIZE represents the total size of block ORIG_DST, or -1 if not
1931 known. */
1932
1933 void
1934 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1935 {
1936 rtx *tmps, dst;
1937 int start, finish, i;
1938 enum machine_mode m = GET_MODE (orig_dst);
1939
1940 gcc_assert (GET_CODE (src) == PARALLEL);
1941
1942 if (!SCALAR_INT_MODE_P (m)
1943 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1944 {
1945 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1946 if (imode == BLKmode)
1947 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1948 else
1949 dst = gen_reg_rtx (imode);
1950 emit_group_store (dst, src, type, ssize);
1951 if (imode != BLKmode)
1952 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1953 emit_move_insn (orig_dst, dst);
1954 return;
1955 }
1956
1957 /* Check for a NULL entry, used to indicate that the parameter goes
1958 both on the stack and in registers. */
1959 if (XEXP (XVECEXP (src, 0, 0), 0))
1960 start = 0;
1961 else
1962 start = 1;
1963 finish = XVECLEN (src, 0);
1964
1965 tmps = XALLOCAVEC (rtx, finish);
1966
1967 /* Copy the (probable) hard regs into pseudos. */
1968 for (i = start; i < finish; i++)
1969 {
1970 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1971 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1972 {
1973 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1974 emit_move_insn (tmps[i], reg);
1975 }
1976 else
1977 tmps[i] = reg;
1978 }
1979
1980 /* If we won't be storing directly into memory, protect the real destination
1981 from strange tricks we might play. */
1982 dst = orig_dst;
1983 if (GET_CODE (dst) == PARALLEL)
1984 {
1985 rtx temp;
1986
1987 /* We can get a PARALLEL dst if there is a conditional expression in
1988 a return statement. In that case, the dst and src are the same,
1989 so no action is necessary. */
1990 if (rtx_equal_p (dst, src))
1991 return;
1992
1993 /* It is unclear if we can ever reach here, but we may as well handle
1994 it. Allocate a temporary, and split this into a store/load to/from
1995 the temporary. */
1996
1997 temp = assign_stack_temp (GET_MODE (dst), ssize);
1998 emit_group_store (temp, src, type, ssize);
1999 emit_group_load (dst, temp, type, ssize);
2000 return;
2001 }
2002 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
2003 {
2004 enum machine_mode outer = GET_MODE (dst);
2005 enum machine_mode inner;
2006 HOST_WIDE_INT bytepos;
2007 bool done = false;
2008 rtx temp;
2009
2010 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
2011 dst = gen_reg_rtx (outer);
2012
2013 /* Make life a bit easier for combine. */
2014 /* If the first element of the vector is the low part
2015 of the destination mode, use a paradoxical subreg to
2016 initialize the destination. */
2017 if (start < finish)
2018 {
2019 inner = GET_MODE (tmps[start]);
2020 bytepos = subreg_lowpart_offset (inner, outer);
2021 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
2022 {
2023 temp = simplify_gen_subreg (outer, tmps[start],
2024 inner, 0);
2025 if (temp)
2026 {
2027 emit_move_insn (dst, temp);
2028 done = true;
2029 start++;
2030 }
2031 }
2032 }
2033
2034 /* If the first element wasn't the low part, try the last. */
2035 if (!done
2036 && start < finish - 1)
2037 {
2038 inner = GET_MODE (tmps[finish - 1]);
2039 bytepos = subreg_lowpart_offset (inner, outer);
2040 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2041 {
2042 temp = simplify_gen_subreg (outer, tmps[finish - 1],
2043 inner, 0);
2044 if (temp)
2045 {
2046 emit_move_insn (dst, temp);
2047 done = true;
2048 finish--;
2049 }
2050 }
2051 }
2052
2053 /* Otherwise, simply initialize the result to zero. */
2054 if (!done)
2055 emit_move_insn (dst, CONST0_RTX (outer));
2056 }
2057
2058 /* Process the pieces. */
2059 for (i = start; i < finish; i++)
2060 {
2061 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2062 enum machine_mode mode = GET_MODE (tmps[i]);
2063 unsigned int bytelen = GET_MODE_SIZE (mode);
2064 unsigned int adj_bytelen = bytelen;
2065 rtx dest = dst;
2066
2067 /* Handle trailing fragments that run over the size of the struct. */
2068 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2069 adj_bytelen = ssize - bytepos;
2070
2071 if (GET_CODE (dst) == CONCAT)
2072 {
2073 if (bytepos + adj_bytelen
2074 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2075 dest = XEXP (dst, 0);
2076 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2077 {
2078 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2079 dest = XEXP (dst, 1);
2080 }
2081 else
2082 {
2083 enum machine_mode dest_mode = GET_MODE (dest);
2084 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2085
2086 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2087
2088 if (GET_MODE_ALIGNMENT (dest_mode)
2089 >= GET_MODE_ALIGNMENT (tmp_mode))
2090 {
2091 dest = assign_stack_temp (dest_mode,
2092 GET_MODE_SIZE (dest_mode));
2093 emit_move_insn (adjust_address (dest,
2094 tmp_mode,
2095 bytepos),
2096 tmps[i]);
2097 dst = dest;
2098 }
2099 else
2100 {
2101 dest = assign_stack_temp (tmp_mode,
2102 GET_MODE_SIZE (tmp_mode));
2103 emit_move_insn (dest, tmps[i]);
2104 dst = adjust_address (dest, dest_mode, bytepos);
2105 }
2106 break;
2107 }
2108 }
2109
2110 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2111 {
2112 /* store_bit_field always takes its value from the lsb.
2113 Move the fragment to the lsb if it's not already there. */
2114 if (
2115 #ifdef BLOCK_REG_PADDING
2116 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2117 == (BYTES_BIG_ENDIAN ? upward : downward)
2118 #else
2119 BYTES_BIG_ENDIAN
2120 #endif
2121 )
2122 {
2123 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2124 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2125 shift, tmps[i], 0);
2126 }
2127 bytelen = adj_bytelen;
2128 }
2129
2130 /* Optimize the access just a bit. */
2131 if (MEM_P (dest)
2132 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2133 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2134 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2135 && bytelen == GET_MODE_SIZE (mode))
2136 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2137 else
2138 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2139 0, 0, mode, tmps[i]);
2140 }
2141
2142 /* Copy from the pseudo into the (probable) hard reg. */
2143 if (orig_dst != dst)
2144 emit_move_insn (orig_dst, dst);
2145 }
2146
2147 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2148 of the value stored in X. */
2149
2150 rtx
2151 maybe_emit_group_store (rtx x, tree type)
2152 {
2153 enum machine_mode mode = TYPE_MODE (type);
2154 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2155 if (GET_CODE (x) == PARALLEL)
2156 {
2157 rtx result = gen_reg_rtx (mode);
2158 emit_group_store (result, x, type, int_size_in_bytes (type));
2159 return result;
2160 }
2161 return x;
2162 }
2163
2164 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2165
2166 This is used on targets that return BLKmode values in registers. */
2167
2168 void
2169 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2170 {
2171 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2172 rtx src = NULL, dst = NULL;
2173 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2174 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2175 enum machine_mode mode = GET_MODE (srcreg);
2176 enum machine_mode tmode = GET_MODE (target);
2177 enum machine_mode copy_mode;
2178
2179 /* BLKmode registers created in the back-end shouldn't have survived. */
2180 gcc_assert (mode != BLKmode);
2181
2182 /* If the structure doesn't take up a whole number of words, see whether
2183 SRCREG is padded on the left or on the right. If it's on the left,
2184 set PADDING_CORRECTION to the number of bits to skip.
2185
2186 In most ABIs, the structure will be returned at the least end of
2187 the register, which translates to right padding on little-endian
2188 targets and left padding on big-endian targets. The opposite
2189 holds if the structure is returned at the most significant
2190 end of the register. */
2191 if (bytes % UNITS_PER_WORD != 0
2192 && (targetm.calls.return_in_msb (type)
2193 ? !BYTES_BIG_ENDIAN
2194 : BYTES_BIG_ENDIAN))
2195 padding_correction
2196 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2197
2198 /* We can use a single move if we have an exact mode for the size. */
2199 else if (MEM_P (target)
2200 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2201 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2202 && bytes == GET_MODE_SIZE (mode))
2203 {
2204 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2205 return;
2206 }
2207
2208 /* And if we additionally have the same mode for a register. */
2209 else if (REG_P (target)
2210 && GET_MODE (target) == mode
2211 && bytes == GET_MODE_SIZE (mode))
2212 {
2213 emit_move_insn (target, srcreg);
2214 return;
2215 }
2216
2217 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2218 into a new pseudo which is a full word. */
2219 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2220 {
2221 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2222 mode = word_mode;
2223 }
2224
2225 /* Copy the structure BITSIZE bits at a time. If the target lives in
2226 memory, take care of not reading/writing past its end by selecting
2227 a copy mode suited to BITSIZE. This should always be possible given
2228 how it is computed.
2229
2230 If the target lives in register, make sure not to select a copy mode
2231 larger than the mode of the register.
2232
2233 We could probably emit more efficient code for machines which do not use
2234 strict alignment, but it doesn't seem worth the effort at the current
2235 time. */
2236
2237 copy_mode = word_mode;
2238 if (MEM_P (target))
2239 {
2240 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2241 if (mem_mode != BLKmode)
2242 copy_mode = mem_mode;
2243 }
2244 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2245 copy_mode = tmode;
2246
2247 for (bitpos = 0, xbitpos = padding_correction;
2248 bitpos < bytes * BITS_PER_UNIT;
2249 bitpos += bitsize, xbitpos += bitsize)
2250 {
2251 /* We need a new source operand each time xbitpos is on a
2252 word boundary and when xbitpos == padding_correction
2253 (the first time through). */
2254 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2255 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2256
2257 /* We need a new destination operand each time bitpos is on
2258 a word boundary. */
2259 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2260 dst = target;
2261 else if (bitpos % BITS_PER_WORD == 0)
2262 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2263
2264 /* Use xbitpos for the source extraction (right justified) and
2265 bitpos for the destination store (left justified). */
2266 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2267 extract_bit_field (src, bitsize,
2268 xbitpos % BITS_PER_WORD, 1,
2269 NULL_RTX, copy_mode, copy_mode));
2270 }
2271 }
2272
2273 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2274 register if it contains any data, otherwise return null.
2275
2276 This is used on targets that return BLKmode values in registers. */
2277
2278 rtx
2279 copy_blkmode_to_reg (enum machine_mode mode, tree src)
2280 {
2281 int i, n_regs;
2282 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2283 unsigned int bitsize;
2284 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2285 enum machine_mode dst_mode;
2286
2287 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2288
2289 x = expand_normal (src);
2290
2291 bytes = int_size_in_bytes (TREE_TYPE (src));
2292 if (bytes == 0)
2293 return NULL_RTX;
2294
2295 /* If the structure doesn't take up a whole number of words, see
2296 whether the register value should be padded on the left or on
2297 the right. Set PADDING_CORRECTION to the number of padding
2298 bits needed on the left side.
2299
2300 In most ABIs, the structure will be returned at the least end of
2301 the register, which translates to right padding on little-endian
2302 targets and left padding on big-endian targets. The opposite
2303 holds if the structure is returned at the most significant
2304 end of the register. */
2305 if (bytes % UNITS_PER_WORD != 0
2306 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2307 ? !BYTES_BIG_ENDIAN
2308 : BYTES_BIG_ENDIAN))
2309 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2310 * BITS_PER_UNIT));
2311
2312 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2313 dst_words = XALLOCAVEC (rtx, n_regs);
2314 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2315
2316 /* Copy the structure BITSIZE bits at a time. */
2317 for (bitpos = 0, xbitpos = padding_correction;
2318 bitpos < bytes * BITS_PER_UNIT;
2319 bitpos += bitsize, xbitpos += bitsize)
2320 {
2321 /* We need a new destination pseudo each time xbitpos is
2322 on a word boundary and when xbitpos == padding_correction
2323 (the first time through). */
2324 if (xbitpos % BITS_PER_WORD == 0
2325 || xbitpos == padding_correction)
2326 {
2327 /* Generate an appropriate register. */
2328 dst_word = gen_reg_rtx (word_mode);
2329 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2330
2331 /* Clear the destination before we move anything into it. */
2332 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2333 }
2334
2335 /* We need a new source operand each time bitpos is on a word
2336 boundary. */
2337 if (bitpos % BITS_PER_WORD == 0)
2338 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2339
2340 /* Use bitpos for the source extraction (left justified) and
2341 xbitpos for the destination store (right justified). */
2342 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2343 0, 0, word_mode,
2344 extract_bit_field (src_word, bitsize,
2345 bitpos % BITS_PER_WORD, 1,
2346 NULL_RTX, word_mode, word_mode));
2347 }
2348
2349 if (mode == BLKmode)
2350 {
2351 /* Find the smallest integer mode large enough to hold the
2352 entire structure. */
2353 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2354 mode != VOIDmode;
2355 mode = GET_MODE_WIDER_MODE (mode))
2356 /* Have we found a large enough mode? */
2357 if (GET_MODE_SIZE (mode) >= bytes)
2358 break;
2359
2360 /* A suitable mode should have been found. */
2361 gcc_assert (mode != VOIDmode);
2362 }
2363
2364 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2365 dst_mode = word_mode;
2366 else
2367 dst_mode = mode;
2368 dst = gen_reg_rtx (dst_mode);
2369
2370 for (i = 0; i < n_regs; i++)
2371 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2372
2373 if (mode != dst_mode)
2374 dst = gen_lowpart (mode, dst);
2375
2376 return dst;
2377 }
2378
2379 /* Add a USE expression for REG to the (possibly empty) list pointed
2380 to by CALL_FUSAGE. REG must denote a hard register. */
2381
2382 void
2383 use_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode)
2384 {
2385 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2386
2387 *call_fusage
2388 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2389 }
2390
2391 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2392 starting at REGNO. All of these registers must be hard registers. */
2393
2394 void
2395 use_regs (rtx *call_fusage, int regno, int nregs)
2396 {
2397 int i;
2398
2399 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2400
2401 for (i = 0; i < nregs; i++)
2402 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2403 }
2404
2405 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2406 PARALLEL REGS. This is for calls that pass values in multiple
2407 non-contiguous locations. The Irix 6 ABI has examples of this. */
2408
2409 void
2410 use_group_regs (rtx *call_fusage, rtx regs)
2411 {
2412 int i;
2413
2414 for (i = 0; i < XVECLEN (regs, 0); i++)
2415 {
2416 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2417
2418 /* A NULL entry means the parameter goes both on the stack and in
2419 registers. This can also be a MEM for targets that pass values
2420 partially on the stack and partially in registers. */
2421 if (reg != 0 && REG_P (reg))
2422 use_reg (call_fusage, reg);
2423 }
2424 }
2425
2426 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2427 assigment and the code of the expresion on the RHS is CODE. Return
2428 NULL otherwise. */
2429
2430 static gimple
2431 get_def_for_expr (tree name, enum tree_code code)
2432 {
2433 gimple def_stmt;
2434
2435 if (TREE_CODE (name) != SSA_NAME)
2436 return NULL;
2437
2438 def_stmt = get_gimple_for_ssa_name (name);
2439 if (!def_stmt
2440 || gimple_assign_rhs_code (def_stmt) != code)
2441 return NULL;
2442
2443 return def_stmt;
2444 }
2445
2446 #ifdef HAVE_conditional_move
2447 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2448 assigment and the class of the expresion on the RHS is CLASS. Return
2449 NULL otherwise. */
2450
2451 static gimple
2452 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2453 {
2454 gimple def_stmt;
2455
2456 if (TREE_CODE (name) != SSA_NAME)
2457 return NULL;
2458
2459 def_stmt = get_gimple_for_ssa_name (name);
2460 if (!def_stmt
2461 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2462 return NULL;
2463
2464 return def_stmt;
2465 }
2466 #endif
2467 \f
2468
2469 /* Determine whether the LEN bytes generated by CONSTFUN can be
2470 stored to memory using several move instructions. CONSTFUNDATA is
2471 a pointer which will be passed as argument in every CONSTFUN call.
2472 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2473 a memset operation and false if it's a copy of a constant string.
2474 Return nonzero if a call to store_by_pieces should succeed. */
2475
2476 int
2477 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2478 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2479 void *constfundata, unsigned int align, bool memsetp)
2480 {
2481 unsigned HOST_WIDE_INT l;
2482 unsigned int max_size;
2483 HOST_WIDE_INT offset = 0;
2484 enum machine_mode mode;
2485 enum insn_code icode;
2486 int reverse;
2487 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2488 rtx cst ATTRIBUTE_UNUSED;
2489
2490 if (len == 0)
2491 return 1;
2492
2493 if (! (memsetp
2494 ? SET_BY_PIECES_P (len, align)
2495 : STORE_BY_PIECES_P (len, align)))
2496 return 0;
2497
2498 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2499
2500 /* We would first store what we can in the largest integer mode, then go to
2501 successively smaller modes. */
2502
2503 for (reverse = 0;
2504 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2505 reverse++)
2506 {
2507 l = len;
2508 max_size = STORE_MAX_PIECES + 1;
2509 while (max_size > 1 && l > 0)
2510 {
2511 mode = widest_int_mode_for_size (max_size);
2512
2513 if (mode == VOIDmode)
2514 break;
2515
2516 icode = optab_handler (mov_optab, mode);
2517 if (icode != CODE_FOR_nothing
2518 && align >= GET_MODE_ALIGNMENT (mode))
2519 {
2520 unsigned int size = GET_MODE_SIZE (mode);
2521
2522 while (l >= size)
2523 {
2524 if (reverse)
2525 offset -= size;
2526
2527 cst = (*constfun) (constfundata, offset, mode);
2528 if (!targetm.legitimate_constant_p (mode, cst))
2529 return 0;
2530
2531 if (!reverse)
2532 offset += size;
2533
2534 l -= size;
2535 }
2536 }
2537
2538 max_size = GET_MODE_SIZE (mode);
2539 }
2540
2541 /* The code above should have handled everything. */
2542 gcc_assert (!l);
2543 }
2544
2545 return 1;
2546 }
2547
2548 /* Generate several move instructions to store LEN bytes generated by
2549 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2550 pointer which will be passed as argument in every CONSTFUN call.
2551 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2552 a memset operation and false if it's a copy of a constant string.
2553 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2554 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2555 stpcpy. */
2556
2557 rtx
2558 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2559 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2560 void *constfundata, unsigned int align, bool memsetp, int endp)
2561 {
2562 enum machine_mode to_addr_mode = get_address_mode (to);
2563 struct store_by_pieces_d data;
2564
2565 if (len == 0)
2566 {
2567 gcc_assert (endp != 2);
2568 return to;
2569 }
2570
2571 gcc_assert (memsetp
2572 ? SET_BY_PIECES_P (len, align)
2573 : STORE_BY_PIECES_P (len, align));
2574 data.constfun = constfun;
2575 data.constfundata = constfundata;
2576 data.len = len;
2577 data.to = to;
2578 store_by_pieces_1 (&data, align);
2579 if (endp)
2580 {
2581 rtx to1;
2582
2583 gcc_assert (!data.reverse);
2584 if (data.autinc_to)
2585 {
2586 if (endp == 2)
2587 {
2588 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2589 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2590 else
2591 data.to_addr = copy_to_mode_reg (to_addr_mode,
2592 plus_constant (to_addr_mode,
2593 data.to_addr,
2594 -1));
2595 }
2596 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2597 data.offset);
2598 }
2599 else
2600 {
2601 if (endp == 2)
2602 --data.offset;
2603 to1 = adjust_address (data.to, QImode, data.offset);
2604 }
2605 return to1;
2606 }
2607 else
2608 return data.to;
2609 }
2610
2611 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2612 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2613
2614 static void
2615 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2616 {
2617 struct store_by_pieces_d data;
2618
2619 if (len == 0)
2620 return;
2621
2622 data.constfun = clear_by_pieces_1;
2623 data.constfundata = NULL;
2624 data.len = len;
2625 data.to = to;
2626 store_by_pieces_1 (&data, align);
2627 }
2628
2629 /* Callback routine for clear_by_pieces.
2630 Return const0_rtx unconditionally. */
2631
2632 static rtx
2633 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2634 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2635 enum machine_mode mode ATTRIBUTE_UNUSED)
2636 {
2637 return const0_rtx;
2638 }
2639
2640 /* Subroutine of clear_by_pieces and store_by_pieces.
2641 Generate several move instructions to store LEN bytes of block TO. (A MEM
2642 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2643
2644 static void
2645 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2646 unsigned int align ATTRIBUTE_UNUSED)
2647 {
2648 enum machine_mode to_addr_mode = get_address_mode (data->to);
2649 rtx to_addr = XEXP (data->to, 0);
2650 unsigned int max_size = STORE_MAX_PIECES + 1;
2651 enum insn_code icode;
2652
2653 data->offset = 0;
2654 data->to_addr = to_addr;
2655 data->autinc_to
2656 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2657 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2658
2659 data->explicit_inc_to = 0;
2660 data->reverse
2661 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2662 if (data->reverse)
2663 data->offset = data->len;
2664
2665 /* If storing requires more than two move insns,
2666 copy addresses to registers (to make displacements shorter)
2667 and use post-increment if available. */
2668 if (!data->autinc_to
2669 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2670 {
2671 /* Determine the main mode we'll be using.
2672 MODE might not be used depending on the definitions of the
2673 USE_* macros below. */
2674 enum machine_mode mode ATTRIBUTE_UNUSED
2675 = widest_int_mode_for_size (max_size);
2676
2677 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2678 {
2679 data->to_addr = copy_to_mode_reg (to_addr_mode,
2680 plus_constant (to_addr_mode,
2681 to_addr,
2682 data->len));
2683 data->autinc_to = 1;
2684 data->explicit_inc_to = -1;
2685 }
2686
2687 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2688 && ! data->autinc_to)
2689 {
2690 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2691 data->autinc_to = 1;
2692 data->explicit_inc_to = 1;
2693 }
2694
2695 if ( !data->autinc_to && CONSTANT_P (to_addr))
2696 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2697 }
2698
2699 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2700
2701 /* First store what we can in the largest integer mode, then go to
2702 successively smaller modes. */
2703
2704 while (max_size > 1 && data->len > 0)
2705 {
2706 enum machine_mode mode = widest_int_mode_for_size (max_size);
2707
2708 if (mode == VOIDmode)
2709 break;
2710
2711 icode = optab_handler (mov_optab, mode);
2712 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2713 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2714
2715 max_size = GET_MODE_SIZE (mode);
2716 }
2717
2718 /* The code above should have handled everything. */
2719 gcc_assert (!data->len);
2720 }
2721
2722 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2723 with move instructions for mode MODE. GENFUN is the gen_... function
2724 to make a move insn for that mode. DATA has all the other info. */
2725
2726 static void
2727 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2728 struct store_by_pieces_d *data)
2729 {
2730 unsigned int size = GET_MODE_SIZE (mode);
2731 rtx to1, cst;
2732
2733 while (data->len >= size)
2734 {
2735 if (data->reverse)
2736 data->offset -= size;
2737
2738 if (data->autinc_to)
2739 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2740 data->offset);
2741 else
2742 to1 = adjust_address (data->to, mode, data->offset);
2743
2744 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2745 emit_insn (gen_add2_insn (data->to_addr,
2746 gen_int_mode (-(HOST_WIDE_INT) size,
2747 GET_MODE (data->to_addr))));
2748
2749 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2750 emit_insn ((*genfun) (to1, cst));
2751
2752 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2753 emit_insn (gen_add2_insn (data->to_addr,
2754 gen_int_mode (size,
2755 GET_MODE (data->to_addr))));
2756
2757 if (! data->reverse)
2758 data->offset += size;
2759
2760 data->len -= size;
2761 }
2762 }
2763 \f
2764 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2765 its length in bytes. */
2766
2767 rtx
2768 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2769 unsigned int expected_align, HOST_WIDE_INT expected_size,
2770 unsigned HOST_WIDE_INT min_size,
2771 unsigned HOST_WIDE_INT max_size,
2772 unsigned HOST_WIDE_INT probable_max_size)
2773 {
2774 enum machine_mode mode = GET_MODE (object);
2775 unsigned int align;
2776
2777 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2778
2779 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2780 just move a zero. Otherwise, do this a piece at a time. */
2781 if (mode != BLKmode
2782 && CONST_INT_P (size)
2783 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2784 {
2785 rtx zero = CONST0_RTX (mode);
2786 if (zero != NULL)
2787 {
2788 emit_move_insn (object, zero);
2789 return NULL;
2790 }
2791
2792 if (COMPLEX_MODE_P (mode))
2793 {
2794 zero = CONST0_RTX (GET_MODE_INNER (mode));
2795 if (zero != NULL)
2796 {
2797 write_complex_part (object, zero, 0);
2798 write_complex_part (object, zero, 1);
2799 return NULL;
2800 }
2801 }
2802 }
2803
2804 if (size == const0_rtx)
2805 return NULL;
2806
2807 align = MEM_ALIGN (object);
2808
2809 if (CONST_INT_P (size)
2810 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2811 clear_by_pieces (object, INTVAL (size), align);
2812 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2813 expected_align, expected_size,
2814 min_size, max_size, probable_max_size))
2815 ;
2816 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2817 return set_storage_via_libcall (object, size, const0_rtx,
2818 method == BLOCK_OP_TAILCALL);
2819 else
2820 gcc_unreachable ();
2821
2822 return NULL;
2823 }
2824
2825 rtx
2826 clear_storage (rtx object, rtx size, enum block_op_methods method)
2827 {
2828 unsigned HOST_WIDE_INT max, min = 0;
2829 if (GET_CODE (size) == CONST_INT)
2830 min = max = UINTVAL (size);
2831 else
2832 max = GET_MODE_MASK (GET_MODE (size));
2833 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
2834 }
2835
2836
2837 /* A subroutine of clear_storage. Expand a call to memset.
2838 Return the return value of memset, 0 otherwise. */
2839
2840 rtx
2841 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2842 {
2843 tree call_expr, fn, object_tree, size_tree, val_tree;
2844 enum machine_mode size_mode;
2845 rtx retval;
2846
2847 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2848 place those into new pseudos into a VAR_DECL and use them later. */
2849
2850 object = copy_addr_to_reg (XEXP (object, 0));
2851
2852 size_mode = TYPE_MODE (sizetype);
2853 size = convert_to_mode (size_mode, size, 1);
2854 size = copy_to_mode_reg (size_mode, size);
2855
2856 /* It is incorrect to use the libcall calling conventions to call
2857 memset in this context. This could be a user call to memset and
2858 the user may wish to examine the return value from memset. For
2859 targets where libcalls and normal calls have different conventions
2860 for returning pointers, we could end up generating incorrect code. */
2861
2862 object_tree = make_tree (ptr_type_node, object);
2863 if (!CONST_INT_P (val))
2864 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2865 size_tree = make_tree (sizetype, size);
2866 val_tree = make_tree (integer_type_node, val);
2867
2868 fn = clear_storage_libcall_fn (true);
2869 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2870 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2871
2872 retval = expand_normal (call_expr);
2873
2874 return retval;
2875 }
2876
2877 /* A subroutine of set_storage_via_libcall. Create the tree node
2878 for the function we use for block clears. */
2879
2880 tree block_clear_fn;
2881
2882 void
2883 init_block_clear_fn (const char *asmspec)
2884 {
2885 if (!block_clear_fn)
2886 {
2887 tree fn, args;
2888
2889 fn = get_identifier ("memset");
2890 args = build_function_type_list (ptr_type_node, ptr_type_node,
2891 integer_type_node, sizetype,
2892 NULL_TREE);
2893
2894 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2895 DECL_EXTERNAL (fn) = 1;
2896 TREE_PUBLIC (fn) = 1;
2897 DECL_ARTIFICIAL (fn) = 1;
2898 TREE_NOTHROW (fn) = 1;
2899 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2900 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2901
2902 block_clear_fn = fn;
2903 }
2904
2905 if (asmspec)
2906 set_user_assembler_name (block_clear_fn, asmspec);
2907 }
2908
2909 static tree
2910 clear_storage_libcall_fn (int for_call)
2911 {
2912 static bool emitted_extern;
2913
2914 if (!block_clear_fn)
2915 init_block_clear_fn (NULL);
2916
2917 if (for_call && !emitted_extern)
2918 {
2919 emitted_extern = true;
2920 make_decl_rtl (block_clear_fn);
2921 }
2922
2923 return block_clear_fn;
2924 }
2925 \f
2926 /* Expand a setmem pattern; return true if successful. */
2927
2928 bool
2929 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2930 unsigned int expected_align, HOST_WIDE_INT expected_size,
2931 unsigned HOST_WIDE_INT min_size,
2932 unsigned HOST_WIDE_INT max_size,
2933 unsigned HOST_WIDE_INT probable_max_size)
2934 {
2935 /* Try the most limited insn first, because there's no point
2936 including more than one in the machine description unless
2937 the more limited one has some advantage. */
2938
2939 enum machine_mode mode;
2940
2941 if (expected_align < align)
2942 expected_align = align;
2943 if (expected_size != -1)
2944 {
2945 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
2946 expected_size = max_size;
2947 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
2948 expected_size = min_size;
2949 }
2950
2951 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2952 mode = GET_MODE_WIDER_MODE (mode))
2953 {
2954 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2955
2956 if (code != CODE_FOR_nothing
2957 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2958 here because if SIZE is less than the mode mask, as it is
2959 returned by the macro, it will definitely be less than the
2960 actual mode mask. Since SIZE is within the Pmode address
2961 space, we limit MODE to Pmode. */
2962 && ((CONST_INT_P (size)
2963 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2964 <= (GET_MODE_MASK (mode) >> 1)))
2965 || max_size <= (GET_MODE_MASK (mode) >> 1)
2966 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2967 {
2968 struct expand_operand ops[9];
2969 unsigned int nops;
2970
2971 nops = insn_data[(int) code].n_generator_args;
2972 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
2973
2974 create_fixed_operand (&ops[0], object);
2975 /* The check above guarantees that this size conversion is valid. */
2976 create_convert_operand_to (&ops[1], size, mode, true);
2977 create_convert_operand_from (&ops[2], val, byte_mode, true);
2978 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2979 if (nops >= 6)
2980 {
2981 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2982 create_integer_operand (&ops[5], expected_size);
2983 }
2984 if (nops >= 8)
2985 {
2986 create_integer_operand (&ops[6], min_size);
2987 /* If we can not represent the maximal size,
2988 make parameter NULL. */
2989 if ((HOST_WIDE_INT) max_size != -1)
2990 create_integer_operand (&ops[7], max_size);
2991 else
2992 create_fixed_operand (&ops[7], NULL);
2993 }
2994 if (nops == 9)
2995 {
2996 /* If we can not represent the maximal size,
2997 make parameter NULL. */
2998 if ((HOST_WIDE_INT) probable_max_size != -1)
2999 create_integer_operand (&ops[8], probable_max_size);
3000 else
3001 create_fixed_operand (&ops[8], NULL);
3002 }
3003 if (maybe_expand_insn (code, nops, ops))
3004 return true;
3005 }
3006 }
3007
3008 return false;
3009 }
3010
3011 \f
3012 /* Write to one of the components of the complex value CPLX. Write VAL to
3013 the real part if IMAG_P is false, and the imaginary part if its true. */
3014
3015 static void
3016 write_complex_part (rtx cplx, rtx val, bool imag_p)
3017 {
3018 enum machine_mode cmode;
3019 enum machine_mode imode;
3020 unsigned ibitsize;
3021
3022 if (GET_CODE (cplx) == CONCAT)
3023 {
3024 emit_move_insn (XEXP (cplx, imag_p), val);
3025 return;
3026 }
3027
3028 cmode = GET_MODE (cplx);
3029 imode = GET_MODE_INNER (cmode);
3030 ibitsize = GET_MODE_BITSIZE (imode);
3031
3032 /* For MEMs simplify_gen_subreg may generate an invalid new address
3033 because, e.g., the original address is considered mode-dependent
3034 by the target, which restricts simplify_subreg from invoking
3035 adjust_address_nv. Instead of preparing fallback support for an
3036 invalid address, we call adjust_address_nv directly. */
3037 if (MEM_P (cplx))
3038 {
3039 emit_move_insn (adjust_address_nv (cplx, imode,
3040 imag_p ? GET_MODE_SIZE (imode) : 0),
3041 val);
3042 return;
3043 }
3044
3045 /* If the sub-object is at least word sized, then we know that subregging
3046 will work. This special case is important, since store_bit_field
3047 wants to operate on integer modes, and there's rarely an OImode to
3048 correspond to TCmode. */
3049 if (ibitsize >= BITS_PER_WORD
3050 /* For hard regs we have exact predicates. Assume we can split
3051 the original object if it spans an even number of hard regs.
3052 This special case is important for SCmode on 64-bit platforms
3053 where the natural size of floating-point regs is 32-bit. */
3054 || (REG_P (cplx)
3055 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3056 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3057 {
3058 rtx part = simplify_gen_subreg (imode, cplx, cmode,
3059 imag_p ? GET_MODE_SIZE (imode) : 0);
3060 if (part)
3061 {
3062 emit_move_insn (part, val);
3063 return;
3064 }
3065 else
3066 /* simplify_gen_subreg may fail for sub-word MEMs. */
3067 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3068 }
3069
3070 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
3071 }
3072
3073 /* Extract one of the components of the complex value CPLX. Extract the
3074 real part if IMAG_P is false, and the imaginary part if it's true. */
3075
3076 static rtx
3077 read_complex_part (rtx cplx, bool imag_p)
3078 {
3079 enum machine_mode cmode, imode;
3080 unsigned ibitsize;
3081
3082 if (GET_CODE (cplx) == CONCAT)
3083 return XEXP (cplx, imag_p);
3084
3085 cmode = GET_MODE (cplx);
3086 imode = GET_MODE_INNER (cmode);
3087 ibitsize = GET_MODE_BITSIZE (imode);
3088
3089 /* Special case reads from complex constants that got spilled to memory. */
3090 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3091 {
3092 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3093 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3094 {
3095 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3096 if (CONSTANT_CLASS_P (part))
3097 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3098 }
3099 }
3100
3101 /* For MEMs simplify_gen_subreg may generate an invalid new address
3102 because, e.g., the original address is considered mode-dependent
3103 by the target, which restricts simplify_subreg from invoking
3104 adjust_address_nv. Instead of preparing fallback support for an
3105 invalid address, we call adjust_address_nv directly. */
3106 if (MEM_P (cplx))
3107 return adjust_address_nv (cplx, imode,
3108 imag_p ? GET_MODE_SIZE (imode) : 0);
3109
3110 /* If the sub-object is at least word sized, then we know that subregging
3111 will work. This special case is important, since extract_bit_field
3112 wants to operate on integer modes, and there's rarely an OImode to
3113 correspond to TCmode. */
3114 if (ibitsize >= BITS_PER_WORD
3115 /* For hard regs we have exact predicates. Assume we can split
3116 the original object if it spans an even number of hard regs.
3117 This special case is important for SCmode on 64-bit platforms
3118 where the natural size of floating-point regs is 32-bit. */
3119 || (REG_P (cplx)
3120 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3121 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3122 {
3123 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3124 imag_p ? GET_MODE_SIZE (imode) : 0);
3125 if (ret)
3126 return ret;
3127 else
3128 /* simplify_gen_subreg may fail for sub-word MEMs. */
3129 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3130 }
3131
3132 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3133 true, NULL_RTX, imode, imode);
3134 }
3135 \f
3136 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3137 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3138 represented in NEW_MODE. If FORCE is true, this will never happen, as
3139 we'll force-create a SUBREG if needed. */
3140
3141 static rtx
3142 emit_move_change_mode (enum machine_mode new_mode,
3143 enum machine_mode old_mode, rtx x, bool force)
3144 {
3145 rtx ret;
3146
3147 if (push_operand (x, GET_MODE (x)))
3148 {
3149 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3150 MEM_COPY_ATTRIBUTES (ret, x);
3151 }
3152 else if (MEM_P (x))
3153 {
3154 /* We don't have to worry about changing the address since the
3155 size in bytes is supposed to be the same. */
3156 if (reload_in_progress)
3157 {
3158 /* Copy the MEM to change the mode and move any
3159 substitutions from the old MEM to the new one. */
3160 ret = adjust_address_nv (x, new_mode, 0);
3161 copy_replacements (x, ret);
3162 }
3163 else
3164 ret = adjust_address (x, new_mode, 0);
3165 }
3166 else
3167 {
3168 /* Note that we do want simplify_subreg's behavior of validating
3169 that the new mode is ok for a hard register. If we were to use
3170 simplify_gen_subreg, we would create the subreg, but would
3171 probably run into the target not being able to implement it. */
3172 /* Except, of course, when FORCE is true, when this is exactly what
3173 we want. Which is needed for CCmodes on some targets. */
3174 if (force)
3175 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3176 else
3177 ret = simplify_subreg (new_mode, x, old_mode, 0);
3178 }
3179
3180 return ret;
3181 }
3182
3183 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3184 an integer mode of the same size as MODE. Returns the instruction
3185 emitted, or NULL if such a move could not be generated. */
3186
3187 static rtx
3188 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
3189 {
3190 enum machine_mode imode;
3191 enum insn_code code;
3192
3193 /* There must exist a mode of the exact size we require. */
3194 imode = int_mode_for_mode (mode);
3195 if (imode == BLKmode)
3196 return NULL_RTX;
3197
3198 /* The target must support moves in this mode. */
3199 code = optab_handler (mov_optab, imode);
3200 if (code == CODE_FOR_nothing)
3201 return NULL_RTX;
3202
3203 x = emit_move_change_mode (imode, mode, x, force);
3204 if (x == NULL_RTX)
3205 return NULL_RTX;
3206 y = emit_move_change_mode (imode, mode, y, force);
3207 if (y == NULL_RTX)
3208 return NULL_RTX;
3209 return emit_insn (GEN_FCN (code) (x, y));
3210 }
3211
3212 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3213 Return an equivalent MEM that does not use an auto-increment. */
3214
3215 static rtx
3216 emit_move_resolve_push (enum machine_mode mode, rtx x)
3217 {
3218 enum rtx_code code = GET_CODE (XEXP (x, 0));
3219 HOST_WIDE_INT adjust;
3220 rtx temp;
3221
3222 adjust = GET_MODE_SIZE (mode);
3223 #ifdef PUSH_ROUNDING
3224 adjust = PUSH_ROUNDING (adjust);
3225 #endif
3226 if (code == PRE_DEC || code == POST_DEC)
3227 adjust = -adjust;
3228 else if (code == PRE_MODIFY || code == POST_MODIFY)
3229 {
3230 rtx expr = XEXP (XEXP (x, 0), 1);
3231 HOST_WIDE_INT val;
3232
3233 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3234 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3235 val = INTVAL (XEXP (expr, 1));
3236 if (GET_CODE (expr) == MINUS)
3237 val = -val;
3238 gcc_assert (adjust == val || adjust == -val);
3239 adjust = val;
3240 }
3241
3242 /* Do not use anti_adjust_stack, since we don't want to update
3243 stack_pointer_delta. */
3244 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3245 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3246 0, OPTAB_LIB_WIDEN);
3247 if (temp != stack_pointer_rtx)
3248 emit_move_insn (stack_pointer_rtx, temp);
3249
3250 switch (code)
3251 {
3252 case PRE_INC:
3253 case PRE_DEC:
3254 case PRE_MODIFY:
3255 temp = stack_pointer_rtx;
3256 break;
3257 case POST_INC:
3258 case POST_DEC:
3259 case POST_MODIFY:
3260 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3261 break;
3262 default:
3263 gcc_unreachable ();
3264 }
3265
3266 return replace_equiv_address (x, temp);
3267 }
3268
3269 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3270 X is known to satisfy push_operand, and MODE is known to be complex.
3271 Returns the last instruction emitted. */
3272
3273 rtx
3274 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3275 {
3276 enum machine_mode submode = GET_MODE_INNER (mode);
3277 bool imag_first;
3278
3279 #ifdef PUSH_ROUNDING
3280 unsigned int submodesize = GET_MODE_SIZE (submode);
3281
3282 /* In case we output to the stack, but the size is smaller than the
3283 machine can push exactly, we need to use move instructions. */
3284 if (PUSH_ROUNDING (submodesize) != submodesize)
3285 {
3286 x = emit_move_resolve_push (mode, x);
3287 return emit_move_insn (x, y);
3288 }
3289 #endif
3290
3291 /* Note that the real part always precedes the imag part in memory
3292 regardless of machine's endianness. */
3293 switch (GET_CODE (XEXP (x, 0)))
3294 {
3295 case PRE_DEC:
3296 case POST_DEC:
3297 imag_first = true;
3298 break;
3299 case PRE_INC:
3300 case POST_INC:
3301 imag_first = false;
3302 break;
3303 default:
3304 gcc_unreachable ();
3305 }
3306
3307 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3308 read_complex_part (y, imag_first));
3309 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3310 read_complex_part (y, !imag_first));
3311 }
3312
3313 /* A subroutine of emit_move_complex. Perform the move from Y to X
3314 via two moves of the parts. Returns the last instruction emitted. */
3315
3316 rtx
3317 emit_move_complex_parts (rtx x, rtx y)
3318 {
3319 /* Show the output dies here. This is necessary for SUBREGs
3320 of pseudos since we cannot track their lifetimes correctly;
3321 hard regs shouldn't appear here except as return values. */
3322 if (!reload_completed && !reload_in_progress
3323 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3324 emit_clobber (x);
3325
3326 write_complex_part (x, read_complex_part (y, false), false);
3327 write_complex_part (x, read_complex_part (y, true), true);
3328
3329 return get_last_insn ();
3330 }
3331
3332 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3333 MODE is known to be complex. Returns the last instruction emitted. */
3334
3335 static rtx
3336 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3337 {
3338 bool try_int;
3339
3340 /* Need to take special care for pushes, to maintain proper ordering
3341 of the data, and possibly extra padding. */
3342 if (push_operand (x, mode))
3343 return emit_move_complex_push (mode, x, y);
3344
3345 /* See if we can coerce the target into moving both values at once, except
3346 for floating point where we favor moving as parts if this is easy. */
3347 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3348 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3349 && !(REG_P (x)
3350 && HARD_REGISTER_P (x)
3351 && hard_regno_nregs[REGNO (x)][mode] == 1)
3352 && !(REG_P (y)
3353 && HARD_REGISTER_P (y)
3354 && hard_regno_nregs[REGNO (y)][mode] == 1))
3355 try_int = false;
3356 /* Not possible if the values are inherently not adjacent. */
3357 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3358 try_int = false;
3359 /* Is possible if both are registers (or subregs of registers). */
3360 else if (register_operand (x, mode) && register_operand (y, mode))
3361 try_int = true;
3362 /* If one of the operands is a memory, and alignment constraints
3363 are friendly enough, we may be able to do combined memory operations.
3364 We do not attempt this if Y is a constant because that combination is
3365 usually better with the by-parts thing below. */
3366 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3367 && (!STRICT_ALIGNMENT
3368 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3369 try_int = true;
3370 else
3371 try_int = false;
3372
3373 if (try_int)
3374 {
3375 rtx ret;
3376
3377 /* For memory to memory moves, optimal behavior can be had with the
3378 existing block move logic. */
3379 if (MEM_P (x) && MEM_P (y))
3380 {
3381 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3382 BLOCK_OP_NO_LIBCALL);
3383 return get_last_insn ();
3384 }
3385
3386 ret = emit_move_via_integer (mode, x, y, true);
3387 if (ret)
3388 return ret;
3389 }
3390
3391 return emit_move_complex_parts (x, y);
3392 }
3393
3394 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3395 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3396
3397 static rtx
3398 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3399 {
3400 rtx ret;
3401
3402 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3403 if (mode != CCmode)
3404 {
3405 enum insn_code code = optab_handler (mov_optab, CCmode);
3406 if (code != CODE_FOR_nothing)
3407 {
3408 x = emit_move_change_mode (CCmode, mode, x, true);
3409 y = emit_move_change_mode (CCmode, mode, y, true);
3410 return emit_insn (GEN_FCN (code) (x, y));
3411 }
3412 }
3413
3414 /* Otherwise, find the MODE_INT mode of the same width. */
3415 ret = emit_move_via_integer (mode, x, y, false);
3416 gcc_assert (ret != NULL);
3417 return ret;
3418 }
3419
3420 /* Return true if word I of OP lies entirely in the
3421 undefined bits of a paradoxical subreg. */
3422
3423 static bool
3424 undefined_operand_subword_p (const_rtx op, int i)
3425 {
3426 enum machine_mode innermode, innermostmode;
3427 int offset;
3428 if (GET_CODE (op) != SUBREG)
3429 return false;
3430 innermode = GET_MODE (op);
3431 innermostmode = GET_MODE (SUBREG_REG (op));
3432 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3433 /* The SUBREG_BYTE represents offset, as if the value were stored in
3434 memory, except for a paradoxical subreg where we define
3435 SUBREG_BYTE to be 0; undo this exception as in
3436 simplify_subreg. */
3437 if (SUBREG_BYTE (op) == 0
3438 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3439 {
3440 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3441 if (WORDS_BIG_ENDIAN)
3442 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3443 if (BYTES_BIG_ENDIAN)
3444 offset += difference % UNITS_PER_WORD;
3445 }
3446 if (offset >= GET_MODE_SIZE (innermostmode)
3447 || offset <= -GET_MODE_SIZE (word_mode))
3448 return true;
3449 return false;
3450 }
3451
3452 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3453 MODE is any multi-word or full-word mode that lacks a move_insn
3454 pattern. Note that you will get better code if you define such
3455 patterns, even if they must turn into multiple assembler instructions. */
3456
3457 static rtx
3458 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3459 {
3460 rtx last_insn = 0;
3461 rtx seq, inner;
3462 bool need_clobber;
3463 int i;
3464
3465 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3466
3467 /* If X is a push on the stack, do the push now and replace
3468 X with a reference to the stack pointer. */
3469 if (push_operand (x, mode))
3470 x = emit_move_resolve_push (mode, x);
3471
3472 /* If we are in reload, see if either operand is a MEM whose address
3473 is scheduled for replacement. */
3474 if (reload_in_progress && MEM_P (x)
3475 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3476 x = replace_equiv_address_nv (x, inner);
3477 if (reload_in_progress && MEM_P (y)
3478 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3479 y = replace_equiv_address_nv (y, inner);
3480
3481 start_sequence ();
3482
3483 need_clobber = false;
3484 for (i = 0;
3485 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3486 i++)
3487 {
3488 rtx xpart = operand_subword (x, i, 1, mode);
3489 rtx ypart;
3490
3491 /* Do not generate code for a move if it would come entirely
3492 from the undefined bits of a paradoxical subreg. */
3493 if (undefined_operand_subword_p (y, i))
3494 continue;
3495
3496 ypart = operand_subword (y, i, 1, mode);
3497
3498 /* If we can't get a part of Y, put Y into memory if it is a
3499 constant. Otherwise, force it into a register. Then we must
3500 be able to get a part of Y. */
3501 if (ypart == 0 && CONSTANT_P (y))
3502 {
3503 y = use_anchored_address (force_const_mem (mode, y));
3504 ypart = operand_subword (y, i, 1, mode);
3505 }
3506 else if (ypart == 0)
3507 ypart = operand_subword_force (y, i, mode);
3508
3509 gcc_assert (xpart && ypart);
3510
3511 need_clobber |= (GET_CODE (xpart) == SUBREG);
3512
3513 last_insn = emit_move_insn (xpart, ypart);
3514 }
3515
3516 seq = get_insns ();
3517 end_sequence ();
3518
3519 /* Show the output dies here. This is necessary for SUBREGs
3520 of pseudos since we cannot track their lifetimes correctly;
3521 hard regs shouldn't appear here except as return values.
3522 We never want to emit such a clobber after reload. */
3523 if (x != y
3524 && ! (reload_in_progress || reload_completed)
3525 && need_clobber != 0)
3526 emit_clobber (x);
3527
3528 emit_insn (seq);
3529
3530 return last_insn;
3531 }
3532
3533 /* Low level part of emit_move_insn.
3534 Called just like emit_move_insn, but assumes X and Y
3535 are basically valid. */
3536
3537 rtx
3538 emit_move_insn_1 (rtx x, rtx y)
3539 {
3540 enum machine_mode mode = GET_MODE (x);
3541 enum insn_code code;
3542
3543 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3544
3545 code = optab_handler (mov_optab, mode);
3546 if (code != CODE_FOR_nothing)
3547 return emit_insn (GEN_FCN (code) (x, y));
3548
3549 /* Expand complex moves by moving real part and imag part. */
3550 if (COMPLEX_MODE_P (mode))
3551 return emit_move_complex (mode, x, y);
3552
3553 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3554 || ALL_FIXED_POINT_MODE_P (mode))
3555 {
3556 rtx result = emit_move_via_integer (mode, x, y, true);
3557
3558 /* If we can't find an integer mode, use multi words. */
3559 if (result)
3560 return result;
3561 else
3562 return emit_move_multi_word (mode, x, y);
3563 }
3564
3565 if (GET_MODE_CLASS (mode) == MODE_CC)
3566 return emit_move_ccmode (mode, x, y);
3567
3568 /* Try using a move pattern for the corresponding integer mode. This is
3569 only safe when simplify_subreg can convert MODE constants into integer
3570 constants. At present, it can only do this reliably if the value
3571 fits within a HOST_WIDE_INT. */
3572 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3573 {
3574 rtx ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3575
3576 if (ret)
3577 {
3578 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3579 return ret;
3580 }
3581 }
3582
3583 return emit_move_multi_word (mode, x, y);
3584 }
3585
3586 /* Generate code to copy Y into X.
3587 Both Y and X must have the same mode, except that
3588 Y can be a constant with VOIDmode.
3589 This mode cannot be BLKmode; use emit_block_move for that.
3590
3591 Return the last instruction emitted. */
3592
3593 rtx
3594 emit_move_insn (rtx x, rtx y)
3595 {
3596 enum machine_mode mode = GET_MODE (x);
3597 rtx y_cst = NULL_RTX;
3598 rtx last_insn, set;
3599
3600 gcc_assert (mode != BLKmode
3601 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3602
3603 if (CONSTANT_P (y))
3604 {
3605 if (optimize
3606 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3607 && (last_insn = compress_float_constant (x, y)))
3608 return last_insn;
3609
3610 y_cst = y;
3611
3612 if (!targetm.legitimate_constant_p (mode, y))
3613 {
3614 y = force_const_mem (mode, y);
3615
3616 /* If the target's cannot_force_const_mem prevented the spill,
3617 assume that the target's move expanders will also take care
3618 of the non-legitimate constant. */
3619 if (!y)
3620 y = y_cst;
3621 else
3622 y = use_anchored_address (y);
3623 }
3624 }
3625
3626 /* If X or Y are memory references, verify that their addresses are valid
3627 for the machine. */
3628 if (MEM_P (x)
3629 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3630 MEM_ADDR_SPACE (x))
3631 && ! push_operand (x, GET_MODE (x))))
3632 x = validize_mem (x);
3633
3634 if (MEM_P (y)
3635 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3636 MEM_ADDR_SPACE (y)))
3637 y = validize_mem (y);
3638
3639 gcc_assert (mode != BLKmode);
3640
3641 last_insn = emit_move_insn_1 (x, y);
3642
3643 if (y_cst && REG_P (x)
3644 && (set = single_set (last_insn)) != NULL_RTX
3645 && SET_DEST (set) == x
3646 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3647 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3648
3649 return last_insn;
3650 }
3651
3652 /* If Y is representable exactly in a narrower mode, and the target can
3653 perform the extension directly from constant or memory, then emit the
3654 move as an extension. */
3655
3656 static rtx
3657 compress_float_constant (rtx x, rtx y)
3658 {
3659 enum machine_mode dstmode = GET_MODE (x);
3660 enum machine_mode orig_srcmode = GET_MODE (y);
3661 enum machine_mode srcmode;
3662 REAL_VALUE_TYPE r;
3663 int oldcost, newcost;
3664 bool speed = optimize_insn_for_speed_p ();
3665
3666 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3667
3668 if (targetm.legitimate_constant_p (dstmode, y))
3669 oldcost = set_src_cost (y, speed);
3670 else
3671 oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3672
3673 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3674 srcmode != orig_srcmode;
3675 srcmode = GET_MODE_WIDER_MODE (srcmode))
3676 {
3677 enum insn_code ic;
3678 rtx trunc_y, last_insn;
3679
3680 /* Skip if the target can't extend this way. */
3681 ic = can_extend_p (dstmode, srcmode, 0);
3682 if (ic == CODE_FOR_nothing)
3683 continue;
3684
3685 /* Skip if the narrowed value isn't exact. */
3686 if (! exact_real_truncate (srcmode, &r))
3687 continue;
3688
3689 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3690
3691 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3692 {
3693 /* Skip if the target needs extra instructions to perform
3694 the extension. */
3695 if (!insn_operand_matches (ic, 1, trunc_y))
3696 continue;
3697 /* This is valid, but may not be cheaper than the original. */
3698 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3699 speed);
3700 if (oldcost < newcost)
3701 continue;
3702 }
3703 else if (float_extend_from_mem[dstmode][srcmode])
3704 {
3705 trunc_y = force_const_mem (srcmode, trunc_y);
3706 /* This is valid, but may not be cheaper than the original. */
3707 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3708 speed);
3709 if (oldcost < newcost)
3710 continue;
3711 trunc_y = validize_mem (trunc_y);
3712 }
3713 else
3714 continue;
3715
3716 /* For CSE's benefit, force the compressed constant pool entry
3717 into a new pseudo. This constant may be used in different modes,
3718 and if not, combine will put things back together for us. */
3719 trunc_y = force_reg (srcmode, trunc_y);
3720 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3721 last_insn = get_last_insn ();
3722
3723 if (REG_P (x))
3724 set_unique_reg_note (last_insn, REG_EQUAL, y);
3725
3726 return last_insn;
3727 }
3728
3729 return NULL_RTX;
3730 }
3731 \f
3732 /* Pushing data onto the stack. */
3733
3734 /* Push a block of length SIZE (perhaps variable)
3735 and return an rtx to address the beginning of the block.
3736 The value may be virtual_outgoing_args_rtx.
3737
3738 EXTRA is the number of bytes of padding to push in addition to SIZE.
3739 BELOW nonzero means this padding comes at low addresses;
3740 otherwise, the padding comes at high addresses. */
3741
3742 rtx
3743 push_block (rtx size, int extra, int below)
3744 {
3745 rtx temp;
3746
3747 size = convert_modes (Pmode, ptr_mode, size, 1);
3748 if (CONSTANT_P (size))
3749 anti_adjust_stack (plus_constant (Pmode, size, extra));
3750 else if (REG_P (size) && extra == 0)
3751 anti_adjust_stack (size);
3752 else
3753 {
3754 temp = copy_to_mode_reg (Pmode, size);
3755 if (extra != 0)
3756 temp = expand_binop (Pmode, add_optab, temp,
3757 gen_int_mode (extra, Pmode),
3758 temp, 0, OPTAB_LIB_WIDEN);
3759 anti_adjust_stack (temp);
3760 }
3761
3762 #ifndef STACK_GROWS_DOWNWARD
3763 if (0)
3764 #else
3765 if (1)
3766 #endif
3767 {
3768 temp = virtual_outgoing_args_rtx;
3769 if (extra != 0 && below)
3770 temp = plus_constant (Pmode, temp, extra);
3771 }
3772 else
3773 {
3774 if (CONST_INT_P (size))
3775 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3776 -INTVAL (size) - (below ? 0 : extra));
3777 else if (extra != 0 && !below)
3778 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3779 negate_rtx (Pmode, plus_constant (Pmode, size,
3780 extra)));
3781 else
3782 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3783 negate_rtx (Pmode, size));
3784 }
3785
3786 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3787 }
3788
3789 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3790
3791 static rtx
3792 mem_autoinc_base (rtx mem)
3793 {
3794 if (MEM_P (mem))
3795 {
3796 rtx addr = XEXP (mem, 0);
3797 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3798 return XEXP (addr, 0);
3799 }
3800 return NULL;
3801 }
3802
3803 /* A utility routine used here, in reload, and in try_split. The insns
3804 after PREV up to and including LAST are known to adjust the stack,
3805 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3806 placing notes as appropriate. PREV may be NULL, indicating the
3807 entire insn sequence prior to LAST should be scanned.
3808
3809 The set of allowed stack pointer modifications is small:
3810 (1) One or more auto-inc style memory references (aka pushes),
3811 (2) One or more addition/subtraction with the SP as destination,
3812 (3) A single move insn with the SP as destination,
3813 (4) A call_pop insn,
3814 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3815
3816 Insns in the sequence that do not modify the SP are ignored,
3817 except for noreturn calls.
3818
3819 The return value is the amount of adjustment that can be trivially
3820 verified, via immediate operand or auto-inc. If the adjustment
3821 cannot be trivially extracted, the return value is INT_MIN. */
3822
3823 HOST_WIDE_INT
3824 find_args_size_adjust (rtx insn)
3825 {
3826 rtx dest, set, pat;
3827 int i;
3828
3829 pat = PATTERN (insn);
3830 set = NULL;
3831
3832 /* Look for a call_pop pattern. */
3833 if (CALL_P (insn))
3834 {
3835 /* We have to allow non-call_pop patterns for the case
3836 of emit_single_push_insn of a TLS address. */
3837 if (GET_CODE (pat) != PARALLEL)
3838 return 0;
3839
3840 /* All call_pop have a stack pointer adjust in the parallel.
3841 The call itself is always first, and the stack adjust is
3842 usually last, so search from the end. */
3843 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3844 {
3845 set = XVECEXP (pat, 0, i);
3846 if (GET_CODE (set) != SET)
3847 continue;
3848 dest = SET_DEST (set);
3849 if (dest == stack_pointer_rtx)
3850 break;
3851 }
3852 /* We'd better have found the stack pointer adjust. */
3853 if (i == 0)
3854 return 0;
3855 /* Fall through to process the extracted SET and DEST
3856 as if it was a standalone insn. */
3857 }
3858 else if (GET_CODE (pat) == SET)
3859 set = pat;
3860 else if ((set = single_set (insn)) != NULL)
3861 ;
3862 else if (GET_CODE (pat) == PARALLEL)
3863 {
3864 /* ??? Some older ports use a parallel with a stack adjust
3865 and a store for a PUSH_ROUNDING pattern, rather than a
3866 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3867 /* ??? See h8300 and m68k, pushqi1. */
3868 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3869 {
3870 set = XVECEXP (pat, 0, i);
3871 if (GET_CODE (set) != SET)
3872 continue;
3873 dest = SET_DEST (set);
3874 if (dest == stack_pointer_rtx)
3875 break;
3876
3877 /* We do not expect an auto-inc of the sp in the parallel. */
3878 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3879 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3880 != stack_pointer_rtx);
3881 }
3882 if (i < 0)
3883 return 0;
3884 }
3885 else
3886 return 0;
3887
3888 dest = SET_DEST (set);
3889
3890 /* Look for direct modifications of the stack pointer. */
3891 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3892 {
3893 /* Look for a trivial adjustment, otherwise assume nothing. */
3894 /* Note that the SPU restore_stack_block pattern refers to
3895 the stack pointer in V4SImode. Consider that non-trivial. */
3896 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3897 && GET_CODE (SET_SRC (set)) == PLUS
3898 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3899 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3900 return INTVAL (XEXP (SET_SRC (set), 1));
3901 /* ??? Reload can generate no-op moves, which will be cleaned
3902 up later. Recognize it and continue searching. */
3903 else if (rtx_equal_p (dest, SET_SRC (set)))
3904 return 0;
3905 else
3906 return HOST_WIDE_INT_MIN;
3907 }
3908 else
3909 {
3910 rtx mem, addr;
3911
3912 /* Otherwise only think about autoinc patterns. */
3913 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3914 {
3915 mem = dest;
3916 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3917 != stack_pointer_rtx);
3918 }
3919 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3920 mem = SET_SRC (set);
3921 else
3922 return 0;
3923
3924 addr = XEXP (mem, 0);
3925 switch (GET_CODE (addr))
3926 {
3927 case PRE_INC:
3928 case POST_INC:
3929 return GET_MODE_SIZE (GET_MODE (mem));
3930 case PRE_DEC:
3931 case POST_DEC:
3932 return -GET_MODE_SIZE (GET_MODE (mem));
3933 case PRE_MODIFY:
3934 case POST_MODIFY:
3935 addr = XEXP (addr, 1);
3936 gcc_assert (GET_CODE (addr) == PLUS);
3937 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3938 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3939 return INTVAL (XEXP (addr, 1));
3940 default:
3941 gcc_unreachable ();
3942 }
3943 }
3944 }
3945
3946 int
3947 fixup_args_size_notes (rtx prev, rtx last, int end_args_size)
3948 {
3949 int args_size = end_args_size;
3950 bool saw_unknown = false;
3951 rtx insn;
3952
3953 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3954 {
3955 HOST_WIDE_INT this_delta;
3956
3957 if (!NONDEBUG_INSN_P (insn))
3958 continue;
3959
3960 this_delta = find_args_size_adjust (insn);
3961 if (this_delta == 0)
3962 {
3963 if (!CALL_P (insn)
3964 || ACCUMULATE_OUTGOING_ARGS
3965 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3966 continue;
3967 }
3968
3969 gcc_assert (!saw_unknown);
3970 if (this_delta == HOST_WIDE_INT_MIN)
3971 saw_unknown = true;
3972
3973 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3974 #ifdef STACK_GROWS_DOWNWARD
3975 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3976 #endif
3977 args_size -= this_delta;
3978 }
3979
3980 return saw_unknown ? INT_MIN : args_size;
3981 }
3982
3983 #ifdef PUSH_ROUNDING
3984 /* Emit single push insn. */
3985
3986 static void
3987 emit_single_push_insn_1 (enum machine_mode mode, rtx x, tree type)
3988 {
3989 rtx dest_addr;
3990 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3991 rtx dest;
3992 enum insn_code icode;
3993
3994 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3995 /* If there is push pattern, use it. Otherwise try old way of throwing
3996 MEM representing push operation to move expander. */
3997 icode = optab_handler (push_optab, mode);
3998 if (icode != CODE_FOR_nothing)
3999 {
4000 struct expand_operand ops[1];
4001
4002 create_input_operand (&ops[0], x, mode);
4003 if (maybe_expand_insn (icode, 1, ops))
4004 return;
4005 }
4006 if (GET_MODE_SIZE (mode) == rounded_size)
4007 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
4008 /* If we are to pad downward, adjust the stack pointer first and
4009 then store X into the stack location using an offset. This is
4010 because emit_move_insn does not know how to pad; it does not have
4011 access to type. */
4012 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
4013 {
4014 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
4015 HOST_WIDE_INT offset;
4016
4017 emit_move_insn (stack_pointer_rtx,
4018 expand_binop (Pmode,
4019 #ifdef STACK_GROWS_DOWNWARD
4020 sub_optab,
4021 #else
4022 add_optab,
4023 #endif
4024 stack_pointer_rtx,
4025 gen_int_mode (rounded_size, Pmode),
4026 NULL_RTX, 0, OPTAB_LIB_WIDEN));
4027
4028 offset = (HOST_WIDE_INT) padding_size;
4029 #ifdef STACK_GROWS_DOWNWARD
4030 if (STACK_PUSH_CODE == POST_DEC)
4031 /* We have already decremented the stack pointer, so get the
4032 previous value. */
4033 offset += (HOST_WIDE_INT) rounded_size;
4034 #else
4035 if (STACK_PUSH_CODE == POST_INC)
4036 /* We have already incremented the stack pointer, so get the
4037 previous value. */
4038 offset -= (HOST_WIDE_INT) rounded_size;
4039 #endif
4040 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4041 gen_int_mode (offset, Pmode));
4042 }
4043 else
4044 {
4045 #ifdef STACK_GROWS_DOWNWARD
4046 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4047 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4048 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4049 Pmode));
4050 #else
4051 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4052 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4053 gen_int_mode (rounded_size, Pmode));
4054 #endif
4055 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4056 }
4057
4058 dest = gen_rtx_MEM (mode, dest_addr);
4059
4060 if (type != 0)
4061 {
4062 set_mem_attributes (dest, type, 1);
4063
4064 if (flag_optimize_sibling_calls)
4065 /* Function incoming arguments may overlap with sibling call
4066 outgoing arguments and we cannot allow reordering of reads
4067 from function arguments with stores to outgoing arguments
4068 of sibling calls. */
4069 set_mem_alias_set (dest, 0);
4070 }
4071 emit_move_insn (dest, x);
4072 }
4073
4074 /* Emit and annotate a single push insn. */
4075
4076 static void
4077 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
4078 {
4079 int delta, old_delta = stack_pointer_delta;
4080 rtx prev = get_last_insn ();
4081 rtx last;
4082
4083 emit_single_push_insn_1 (mode, x, type);
4084
4085 last = get_last_insn ();
4086
4087 /* Notice the common case where we emitted exactly one insn. */
4088 if (PREV_INSN (last) == prev)
4089 {
4090 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4091 return;
4092 }
4093
4094 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4095 gcc_assert (delta == INT_MIN || delta == old_delta);
4096 }
4097 #endif
4098
4099 /* Generate code to push X onto the stack, assuming it has mode MODE and
4100 type TYPE.
4101 MODE is redundant except when X is a CONST_INT (since they don't
4102 carry mode info).
4103 SIZE is an rtx for the size of data to be copied (in bytes),
4104 needed only if X is BLKmode.
4105
4106 ALIGN (in bits) is maximum alignment we can assume.
4107
4108 If PARTIAL and REG are both nonzero, then copy that many of the first
4109 bytes of X into registers starting with REG, and push the rest of X.
4110 The amount of space pushed is decreased by PARTIAL bytes.
4111 REG must be a hard register in this case.
4112 If REG is zero but PARTIAL is not, take any all others actions for an
4113 argument partially in registers, but do not actually load any
4114 registers.
4115
4116 EXTRA is the amount in bytes of extra space to leave next to this arg.
4117 This is ignored if an argument block has already been allocated.
4118
4119 On a machine that lacks real push insns, ARGS_ADDR is the address of
4120 the bottom of the argument block for this call. We use indexing off there
4121 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4122 argument block has not been preallocated.
4123
4124 ARGS_SO_FAR is the size of args previously pushed for this call.
4125
4126 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4127 for arguments passed in registers. If nonzero, it will be the number
4128 of bytes required. */
4129
4130 void
4131 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
4132 unsigned int align, int partial, rtx reg, int extra,
4133 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4134 rtx alignment_pad)
4135 {
4136 rtx xinner;
4137 enum direction stack_direction
4138 #ifdef STACK_GROWS_DOWNWARD
4139 = downward;
4140 #else
4141 = upward;
4142 #endif
4143
4144 /* Decide where to pad the argument: `downward' for below,
4145 `upward' for above, or `none' for don't pad it.
4146 Default is below for small data on big-endian machines; else above. */
4147 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4148
4149 /* Invert direction if stack is post-decrement.
4150 FIXME: why? */
4151 if (STACK_PUSH_CODE == POST_DEC)
4152 if (where_pad != none)
4153 where_pad = (where_pad == downward ? upward : downward);
4154
4155 xinner = x;
4156
4157 if (mode == BLKmode
4158 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4159 {
4160 /* Copy a block into the stack, entirely or partially. */
4161
4162 rtx temp;
4163 int used;
4164 int offset;
4165 int skip;
4166
4167 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4168 used = partial - offset;
4169
4170 if (mode != BLKmode)
4171 {
4172 /* A value is to be stored in an insufficiently aligned
4173 stack slot; copy via a suitably aligned slot if
4174 necessary. */
4175 size = GEN_INT (GET_MODE_SIZE (mode));
4176 if (!MEM_P (xinner))
4177 {
4178 temp = assign_temp (type, 1, 1);
4179 emit_move_insn (temp, xinner);
4180 xinner = temp;
4181 }
4182 }
4183
4184 gcc_assert (size);
4185
4186 /* USED is now the # of bytes we need not copy to the stack
4187 because registers will take care of them. */
4188
4189 if (partial != 0)
4190 xinner = adjust_address (xinner, BLKmode, used);
4191
4192 /* If the partial register-part of the arg counts in its stack size,
4193 skip the part of stack space corresponding to the registers.
4194 Otherwise, start copying to the beginning of the stack space,
4195 by setting SKIP to 0. */
4196 skip = (reg_parm_stack_space == 0) ? 0 : used;
4197
4198 #ifdef PUSH_ROUNDING
4199 /* Do it with several push insns if that doesn't take lots of insns
4200 and if there is no difficulty with push insns that skip bytes
4201 on the stack for alignment purposes. */
4202 if (args_addr == 0
4203 && PUSH_ARGS
4204 && CONST_INT_P (size)
4205 && skip == 0
4206 && MEM_ALIGN (xinner) >= align
4207 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
4208 /* Here we avoid the case of a structure whose weak alignment
4209 forces many pushes of a small amount of data,
4210 and such small pushes do rounding that causes trouble. */
4211 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4212 || align >= BIGGEST_ALIGNMENT
4213 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4214 == (align / BITS_PER_UNIT)))
4215 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4216 {
4217 /* Push padding now if padding above and stack grows down,
4218 or if padding below and stack grows up.
4219 But if space already allocated, this has already been done. */
4220 if (extra && args_addr == 0
4221 && where_pad != none && where_pad != stack_direction)
4222 anti_adjust_stack (GEN_INT (extra));
4223
4224 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4225 }
4226 else
4227 #endif /* PUSH_ROUNDING */
4228 {
4229 rtx target;
4230
4231 /* Otherwise make space on the stack and copy the data
4232 to the address of that space. */
4233
4234 /* Deduct words put into registers from the size we must copy. */
4235 if (partial != 0)
4236 {
4237 if (CONST_INT_P (size))
4238 size = GEN_INT (INTVAL (size) - used);
4239 else
4240 size = expand_binop (GET_MODE (size), sub_optab, size,
4241 gen_int_mode (used, GET_MODE (size)),
4242 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4243 }
4244
4245 /* Get the address of the stack space.
4246 In this case, we do not deal with EXTRA separately.
4247 A single stack adjust will do. */
4248 if (! args_addr)
4249 {
4250 temp = push_block (size, extra, where_pad == downward);
4251 extra = 0;
4252 }
4253 else if (CONST_INT_P (args_so_far))
4254 temp = memory_address (BLKmode,
4255 plus_constant (Pmode, args_addr,
4256 skip + INTVAL (args_so_far)));
4257 else
4258 temp = memory_address (BLKmode,
4259 plus_constant (Pmode,
4260 gen_rtx_PLUS (Pmode,
4261 args_addr,
4262 args_so_far),
4263 skip));
4264
4265 if (!ACCUMULATE_OUTGOING_ARGS)
4266 {
4267 /* If the source is referenced relative to the stack pointer,
4268 copy it to another register to stabilize it. We do not need
4269 to do this if we know that we won't be changing sp. */
4270
4271 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4272 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4273 temp = copy_to_reg (temp);
4274 }
4275
4276 target = gen_rtx_MEM (BLKmode, temp);
4277
4278 /* We do *not* set_mem_attributes here, because incoming arguments
4279 may overlap with sibling call outgoing arguments and we cannot
4280 allow reordering of reads from function arguments with stores
4281 to outgoing arguments of sibling calls. We do, however, want
4282 to record the alignment of the stack slot. */
4283 /* ALIGN may well be better aligned than TYPE, e.g. due to
4284 PARM_BOUNDARY. Assume the caller isn't lying. */
4285 set_mem_align (target, align);
4286
4287 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4288 }
4289 }
4290 else if (partial > 0)
4291 {
4292 /* Scalar partly in registers. */
4293
4294 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4295 int i;
4296 int not_stack;
4297 /* # bytes of start of argument
4298 that we must make space for but need not store. */
4299 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4300 int args_offset = INTVAL (args_so_far);
4301 int skip;
4302
4303 /* Push padding now if padding above and stack grows down,
4304 or if padding below and stack grows up.
4305 But if space already allocated, this has already been done. */
4306 if (extra && args_addr == 0
4307 && where_pad != none && where_pad != stack_direction)
4308 anti_adjust_stack (GEN_INT (extra));
4309
4310 /* If we make space by pushing it, we might as well push
4311 the real data. Otherwise, we can leave OFFSET nonzero
4312 and leave the space uninitialized. */
4313 if (args_addr == 0)
4314 offset = 0;
4315
4316 /* Now NOT_STACK gets the number of words that we don't need to
4317 allocate on the stack. Convert OFFSET to words too. */
4318 not_stack = (partial - offset) / UNITS_PER_WORD;
4319 offset /= UNITS_PER_WORD;
4320
4321 /* If the partial register-part of the arg counts in its stack size,
4322 skip the part of stack space corresponding to the registers.
4323 Otherwise, start copying to the beginning of the stack space,
4324 by setting SKIP to 0. */
4325 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4326
4327 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4328 x = validize_mem (force_const_mem (mode, x));
4329
4330 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4331 SUBREGs of such registers are not allowed. */
4332 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4333 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4334 x = copy_to_reg (x);
4335
4336 /* Loop over all the words allocated on the stack for this arg. */
4337 /* We can do it by words, because any scalar bigger than a word
4338 has a size a multiple of a word. */
4339 #ifndef PUSH_ARGS_REVERSED
4340 for (i = not_stack; i < size; i++)
4341 #else
4342 for (i = size - 1; i >= not_stack; i--)
4343 #endif
4344 if (i >= not_stack + offset)
4345 emit_push_insn (operand_subword_force (x, i, mode),
4346 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4347 0, args_addr,
4348 GEN_INT (args_offset + ((i - not_stack + skip)
4349 * UNITS_PER_WORD)),
4350 reg_parm_stack_space, alignment_pad);
4351 }
4352 else
4353 {
4354 rtx addr;
4355 rtx dest;
4356
4357 /* Push padding now if padding above and stack grows down,
4358 or if padding below and stack grows up.
4359 But if space already allocated, this has already been done. */
4360 if (extra && args_addr == 0
4361 && where_pad != none && where_pad != stack_direction)
4362 anti_adjust_stack (GEN_INT (extra));
4363
4364 #ifdef PUSH_ROUNDING
4365 if (args_addr == 0 && PUSH_ARGS)
4366 emit_single_push_insn (mode, x, type);
4367 else
4368 #endif
4369 {
4370 if (CONST_INT_P (args_so_far))
4371 addr
4372 = memory_address (mode,
4373 plus_constant (Pmode, args_addr,
4374 INTVAL (args_so_far)));
4375 else
4376 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4377 args_so_far));
4378 dest = gen_rtx_MEM (mode, addr);
4379
4380 /* We do *not* set_mem_attributes here, because incoming arguments
4381 may overlap with sibling call outgoing arguments and we cannot
4382 allow reordering of reads from function arguments with stores
4383 to outgoing arguments of sibling calls. We do, however, want
4384 to record the alignment of the stack slot. */
4385 /* ALIGN may well be better aligned than TYPE, e.g. due to
4386 PARM_BOUNDARY. Assume the caller isn't lying. */
4387 set_mem_align (dest, align);
4388
4389 emit_move_insn (dest, x);
4390 }
4391 }
4392
4393 /* If part should go in registers, copy that part
4394 into the appropriate registers. Do this now, at the end,
4395 since mem-to-mem copies above may do function calls. */
4396 if (partial > 0 && reg != 0)
4397 {
4398 /* Handle calls that pass values in multiple non-contiguous locations.
4399 The Irix 6 ABI has examples of this. */
4400 if (GET_CODE (reg) == PARALLEL)
4401 emit_group_load (reg, x, type, -1);
4402 else
4403 {
4404 gcc_assert (partial % UNITS_PER_WORD == 0);
4405 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4406 }
4407 }
4408
4409 if (extra && args_addr == 0 && where_pad == stack_direction)
4410 anti_adjust_stack (GEN_INT (extra));
4411
4412 if (alignment_pad && args_addr == 0)
4413 anti_adjust_stack (alignment_pad);
4414 }
4415 \f
4416 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4417 operations. */
4418
4419 static rtx
4420 get_subtarget (rtx x)
4421 {
4422 return (optimize
4423 || x == 0
4424 /* Only registers can be subtargets. */
4425 || !REG_P (x)
4426 /* Don't use hard regs to avoid extending their life. */
4427 || REGNO (x) < FIRST_PSEUDO_REGISTER
4428 ? 0 : x);
4429 }
4430
4431 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4432 FIELD is a bitfield. Returns true if the optimization was successful,
4433 and there's nothing else to do. */
4434
4435 static bool
4436 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4437 unsigned HOST_WIDE_INT bitpos,
4438 unsigned HOST_WIDE_INT bitregion_start,
4439 unsigned HOST_WIDE_INT bitregion_end,
4440 enum machine_mode mode1, rtx str_rtx,
4441 tree to, tree src)
4442 {
4443 enum machine_mode str_mode = GET_MODE (str_rtx);
4444 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4445 tree op0, op1;
4446 rtx value, result;
4447 optab binop;
4448 gimple srcstmt;
4449 enum tree_code code;
4450
4451 if (mode1 != VOIDmode
4452 || bitsize >= BITS_PER_WORD
4453 || str_bitsize > BITS_PER_WORD
4454 || TREE_SIDE_EFFECTS (to)
4455 || TREE_THIS_VOLATILE (to))
4456 return false;
4457
4458 STRIP_NOPS (src);
4459 if (TREE_CODE (src) != SSA_NAME)
4460 return false;
4461 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4462 return false;
4463
4464 srcstmt = get_gimple_for_ssa_name (src);
4465 if (!srcstmt
4466 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4467 return false;
4468
4469 code = gimple_assign_rhs_code (srcstmt);
4470
4471 op0 = gimple_assign_rhs1 (srcstmt);
4472
4473 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4474 to find its initialization. Hopefully the initialization will
4475 be from a bitfield load. */
4476 if (TREE_CODE (op0) == SSA_NAME)
4477 {
4478 gimple op0stmt = get_gimple_for_ssa_name (op0);
4479
4480 /* We want to eventually have OP0 be the same as TO, which
4481 should be a bitfield. */
4482 if (!op0stmt
4483 || !is_gimple_assign (op0stmt)
4484 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4485 return false;
4486 op0 = gimple_assign_rhs1 (op0stmt);
4487 }
4488
4489 op1 = gimple_assign_rhs2 (srcstmt);
4490
4491 if (!operand_equal_p (to, op0, 0))
4492 return false;
4493
4494 if (MEM_P (str_rtx))
4495 {
4496 unsigned HOST_WIDE_INT offset1;
4497
4498 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4499 str_mode = word_mode;
4500 str_mode = get_best_mode (bitsize, bitpos,
4501 bitregion_start, bitregion_end,
4502 MEM_ALIGN (str_rtx), str_mode, 0);
4503 if (str_mode == VOIDmode)
4504 return false;
4505 str_bitsize = GET_MODE_BITSIZE (str_mode);
4506
4507 offset1 = bitpos;
4508 bitpos %= str_bitsize;
4509 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4510 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4511 }
4512 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4513 return false;
4514
4515 /* If the bit field covers the whole REG/MEM, store_field
4516 will likely generate better code. */
4517 if (bitsize >= str_bitsize)
4518 return false;
4519
4520 /* We can't handle fields split across multiple entities. */
4521 if (bitpos + bitsize > str_bitsize)
4522 return false;
4523
4524 if (BYTES_BIG_ENDIAN)
4525 bitpos = str_bitsize - bitpos - bitsize;
4526
4527 switch (code)
4528 {
4529 case PLUS_EXPR:
4530 case MINUS_EXPR:
4531 /* For now, just optimize the case of the topmost bitfield
4532 where we don't need to do any masking and also
4533 1 bit bitfields where xor can be used.
4534 We might win by one instruction for the other bitfields
4535 too if insv/extv instructions aren't used, so that
4536 can be added later. */
4537 if (bitpos + bitsize != str_bitsize
4538 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4539 break;
4540
4541 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4542 value = convert_modes (str_mode,
4543 TYPE_MODE (TREE_TYPE (op1)), value,
4544 TYPE_UNSIGNED (TREE_TYPE (op1)));
4545
4546 /* We may be accessing data outside the field, which means
4547 we can alias adjacent data. */
4548 if (MEM_P (str_rtx))
4549 {
4550 str_rtx = shallow_copy_rtx (str_rtx);
4551 set_mem_alias_set (str_rtx, 0);
4552 set_mem_expr (str_rtx, 0);
4553 }
4554
4555 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4556 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4557 {
4558 value = expand_and (str_mode, value, const1_rtx, NULL);
4559 binop = xor_optab;
4560 }
4561 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4562 result = expand_binop (str_mode, binop, str_rtx,
4563 value, str_rtx, 1, OPTAB_WIDEN);
4564 if (result != str_rtx)
4565 emit_move_insn (str_rtx, result);
4566 return true;
4567
4568 case BIT_IOR_EXPR:
4569 case BIT_XOR_EXPR:
4570 if (TREE_CODE (op1) != INTEGER_CST)
4571 break;
4572 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4573 value = convert_modes (str_mode,
4574 TYPE_MODE (TREE_TYPE (op1)), value,
4575 TYPE_UNSIGNED (TREE_TYPE (op1)));
4576
4577 /* We may be accessing data outside the field, which means
4578 we can alias adjacent data. */
4579 if (MEM_P (str_rtx))
4580 {
4581 str_rtx = shallow_copy_rtx (str_rtx);
4582 set_mem_alias_set (str_rtx, 0);
4583 set_mem_expr (str_rtx, 0);
4584 }
4585
4586 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4587 if (bitpos + bitsize != str_bitsize)
4588 {
4589 rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4590 str_mode);
4591 value = expand_and (str_mode, value, mask, NULL_RTX);
4592 }
4593 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4594 result = expand_binop (str_mode, binop, str_rtx,
4595 value, str_rtx, 1, OPTAB_WIDEN);
4596 if (result != str_rtx)
4597 emit_move_insn (str_rtx, result);
4598 return true;
4599
4600 default:
4601 break;
4602 }
4603
4604 return false;
4605 }
4606
4607 /* In the C++ memory model, consecutive bit fields in a structure are
4608 considered one memory location.
4609
4610 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4611 returns the bit range of consecutive bits in which this COMPONENT_REF
4612 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4613 and *OFFSET may be adjusted in the process.
4614
4615 If the access does not need to be restricted, 0 is returned in both
4616 *BITSTART and *BITEND. */
4617
4618 static void
4619 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4620 unsigned HOST_WIDE_INT *bitend,
4621 tree exp,
4622 HOST_WIDE_INT *bitpos,
4623 tree *offset)
4624 {
4625 HOST_WIDE_INT bitoffset;
4626 tree field, repr;
4627
4628 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4629
4630 field = TREE_OPERAND (exp, 1);
4631 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4632 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4633 need to limit the range we can access. */
4634 if (!repr)
4635 {
4636 *bitstart = *bitend = 0;
4637 return;
4638 }
4639
4640 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4641 part of a larger bit field, then the representative does not serve any
4642 useful purpose. This can occur in Ada. */
4643 if (handled_component_p (TREE_OPERAND (exp, 0)))
4644 {
4645 enum machine_mode rmode;
4646 HOST_WIDE_INT rbitsize, rbitpos;
4647 tree roffset;
4648 int unsignedp;
4649 int volatilep = 0;
4650 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4651 &roffset, &rmode, &unsignedp, &volatilep);
4652 if ((rbitpos % BITS_PER_UNIT) != 0)
4653 {
4654 *bitstart = *bitend = 0;
4655 return;
4656 }
4657 }
4658
4659 /* Compute the adjustment to bitpos from the offset of the field
4660 relative to the representative. DECL_FIELD_OFFSET of field and
4661 repr are the same by construction if they are not constants,
4662 see finish_bitfield_layout. */
4663 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4664 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4665 bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4666 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4667 else
4668 bitoffset = 0;
4669 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4670 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4671
4672 /* If the adjustment is larger than bitpos, we would have a negative bit
4673 position for the lower bound and this may wreak havoc later. Adjust
4674 offset and bitpos to make the lower bound non-negative in that case. */
4675 if (bitoffset > *bitpos)
4676 {
4677 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4678 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4679
4680 *bitpos += adjust;
4681 if (*offset == NULL_TREE)
4682 *offset = size_int (-adjust / BITS_PER_UNIT);
4683 else
4684 *offset
4685 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4686 *bitstart = 0;
4687 }
4688 else
4689 *bitstart = *bitpos - bitoffset;
4690
4691 *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4692 }
4693
4694 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4695 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4696 DECL_RTL was not set yet, return NORTL. */
4697
4698 static inline bool
4699 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4700 {
4701 if (TREE_CODE (addr) != ADDR_EXPR)
4702 return false;
4703
4704 tree base = TREE_OPERAND (addr, 0);
4705
4706 if (!DECL_P (base)
4707 || TREE_ADDRESSABLE (base)
4708 || DECL_MODE (base) == BLKmode)
4709 return false;
4710
4711 if (!DECL_RTL_SET_P (base))
4712 return nortl;
4713
4714 return (!MEM_P (DECL_RTL (base)));
4715 }
4716
4717 /* Returns true if the MEM_REF REF refers to an object that does not
4718 reside in memory and has non-BLKmode. */
4719
4720 static inline bool
4721 mem_ref_refers_to_non_mem_p (tree ref)
4722 {
4723 tree base = TREE_OPERAND (ref, 0);
4724 return addr_expr_of_non_mem_decl_p_1 (base, false);
4725 }
4726
4727 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4728 is true, try generating a nontemporal store. */
4729
4730 void
4731 expand_assignment (tree to, tree from, bool nontemporal)
4732 {
4733 rtx to_rtx = 0;
4734 rtx result;
4735 enum machine_mode mode;
4736 unsigned int align;
4737 enum insn_code icode;
4738
4739 /* Don't crash if the lhs of the assignment was erroneous. */
4740 if (TREE_CODE (to) == ERROR_MARK)
4741 {
4742 expand_normal (from);
4743 return;
4744 }
4745
4746 /* Optimize away no-op moves without side-effects. */
4747 if (operand_equal_p (to, from, 0))
4748 return;
4749
4750 /* Handle misaligned stores. */
4751 mode = TYPE_MODE (TREE_TYPE (to));
4752 if ((TREE_CODE (to) == MEM_REF
4753 || TREE_CODE (to) == TARGET_MEM_REF)
4754 && mode != BLKmode
4755 && !mem_ref_refers_to_non_mem_p (to)
4756 && ((align = get_object_alignment (to))
4757 < GET_MODE_ALIGNMENT (mode))
4758 && (((icode = optab_handler (movmisalign_optab, mode))
4759 != CODE_FOR_nothing)
4760 || SLOW_UNALIGNED_ACCESS (mode, align)))
4761 {
4762 rtx reg, mem;
4763
4764 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4765 reg = force_not_mem (reg);
4766 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4767
4768 if (icode != CODE_FOR_nothing)
4769 {
4770 struct expand_operand ops[2];
4771
4772 create_fixed_operand (&ops[0], mem);
4773 create_input_operand (&ops[1], reg, mode);
4774 /* The movmisalign<mode> pattern cannot fail, else the assignment
4775 would silently be omitted. */
4776 expand_insn (icode, 2, ops);
4777 }
4778 else
4779 store_bit_field (mem, GET_MODE_BITSIZE (mode),
4780 0, 0, 0, mode, reg);
4781 return;
4782 }
4783
4784 /* Assignment of a structure component needs special treatment
4785 if the structure component's rtx is not simply a MEM.
4786 Assignment of an array element at a constant index, and assignment of
4787 an array element in an unaligned packed structure field, has the same
4788 problem. Same for (partially) storing into a non-memory object. */
4789 if (handled_component_p (to)
4790 || (TREE_CODE (to) == MEM_REF
4791 && mem_ref_refers_to_non_mem_p (to))
4792 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4793 {
4794 enum machine_mode mode1;
4795 HOST_WIDE_INT bitsize, bitpos;
4796 unsigned HOST_WIDE_INT bitregion_start = 0;
4797 unsigned HOST_WIDE_INT bitregion_end = 0;
4798 tree offset;
4799 int unsignedp;
4800 int volatilep = 0;
4801 tree tem;
4802
4803 push_temp_slots ();
4804 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4805 &unsignedp, &volatilep);
4806
4807 /* Make sure bitpos is not negative, it can wreak havoc later. */
4808 if (bitpos < 0)
4809 {
4810 gcc_assert (offset == NULL_TREE);
4811 offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4812 ? 3 : exact_log2 (BITS_PER_UNIT)));
4813 bitpos &= BITS_PER_UNIT - 1;
4814 }
4815
4816 if (TREE_CODE (to) == COMPONENT_REF
4817 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4818 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4819
4820 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4821
4822 /* If the bitfield is volatile, we want to access it in the
4823 field's mode, not the computed mode.
4824 If a MEM has VOIDmode (external with incomplete type),
4825 use BLKmode for it instead. */
4826 if (MEM_P (to_rtx))
4827 {
4828 if (volatilep && flag_strict_volatile_bitfields > 0)
4829 to_rtx = adjust_address (to_rtx, mode1, 0);
4830 else if (GET_MODE (to_rtx) == VOIDmode)
4831 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4832 }
4833
4834 if (offset != 0)
4835 {
4836 enum machine_mode address_mode;
4837 rtx offset_rtx;
4838
4839 if (!MEM_P (to_rtx))
4840 {
4841 /* We can get constant negative offsets into arrays with broken
4842 user code. Translate this to a trap instead of ICEing. */
4843 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4844 expand_builtin_trap ();
4845 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4846 }
4847
4848 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4849 address_mode = get_address_mode (to_rtx);
4850 if (GET_MODE (offset_rtx) != address_mode)
4851 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4852
4853 /* A constant address in TO_RTX can have VOIDmode, we must not try
4854 to call force_reg for that case. Avoid that case. */
4855 if (MEM_P (to_rtx)
4856 && GET_MODE (to_rtx) == BLKmode
4857 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4858 && bitsize > 0
4859 && (bitpos % bitsize) == 0
4860 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4861 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4862 {
4863 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4864 bitpos = 0;
4865 }
4866
4867 to_rtx = offset_address (to_rtx, offset_rtx,
4868 highest_pow2_factor_for_target (to,
4869 offset));
4870 }
4871
4872 /* No action is needed if the target is not a memory and the field
4873 lies completely outside that target. This can occur if the source
4874 code contains an out-of-bounds access to a small array. */
4875 if (!MEM_P (to_rtx)
4876 && GET_MODE (to_rtx) != BLKmode
4877 && (unsigned HOST_WIDE_INT) bitpos
4878 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4879 {
4880 expand_normal (from);
4881 result = NULL;
4882 }
4883 /* Handle expand_expr of a complex value returning a CONCAT. */
4884 else if (GET_CODE (to_rtx) == CONCAT)
4885 {
4886 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4887 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4888 && bitpos == 0
4889 && bitsize == mode_bitsize)
4890 result = store_expr (from, to_rtx, false, nontemporal);
4891 else if (bitsize == mode_bitsize / 2
4892 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4893 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4894 nontemporal);
4895 else if (bitpos + bitsize <= mode_bitsize / 2)
4896 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4897 bitregion_start, bitregion_end,
4898 mode1, from,
4899 get_alias_set (to), nontemporal);
4900 else if (bitpos >= mode_bitsize / 2)
4901 result = store_field (XEXP (to_rtx, 1), bitsize,
4902 bitpos - mode_bitsize / 2,
4903 bitregion_start, bitregion_end,
4904 mode1, from,
4905 get_alias_set (to), nontemporal);
4906 else if (bitpos == 0 && bitsize == mode_bitsize)
4907 {
4908 rtx from_rtx;
4909 result = expand_normal (from);
4910 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4911 TYPE_MODE (TREE_TYPE (from)), 0);
4912 emit_move_insn (XEXP (to_rtx, 0),
4913 read_complex_part (from_rtx, false));
4914 emit_move_insn (XEXP (to_rtx, 1),
4915 read_complex_part (from_rtx, true));
4916 }
4917 else
4918 {
4919 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4920 GET_MODE_SIZE (GET_MODE (to_rtx)));
4921 write_complex_part (temp, XEXP (to_rtx, 0), false);
4922 write_complex_part (temp, XEXP (to_rtx, 1), true);
4923 result = store_field (temp, bitsize, bitpos,
4924 bitregion_start, bitregion_end,
4925 mode1, from,
4926 get_alias_set (to), nontemporal);
4927 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4928 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4929 }
4930 }
4931 else
4932 {
4933 if (MEM_P (to_rtx))
4934 {
4935 /* If the field is at offset zero, we could have been given the
4936 DECL_RTX of the parent struct. Don't munge it. */
4937 to_rtx = shallow_copy_rtx (to_rtx);
4938 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4939 if (volatilep)
4940 MEM_VOLATILE_P (to_rtx) = 1;
4941 }
4942
4943 if (optimize_bitfield_assignment_op (bitsize, bitpos,
4944 bitregion_start, bitregion_end,
4945 mode1,
4946 to_rtx, to, from))
4947 result = NULL;
4948 else
4949 result = store_field (to_rtx, bitsize, bitpos,
4950 bitregion_start, bitregion_end,
4951 mode1, from,
4952 get_alias_set (to), nontemporal);
4953 }
4954
4955 if (result)
4956 preserve_temp_slots (result);
4957 pop_temp_slots ();
4958 return;
4959 }
4960
4961 /* If the rhs is a function call and its value is not an aggregate,
4962 call the function before we start to compute the lhs.
4963 This is needed for correct code for cases such as
4964 val = setjmp (buf) on machines where reference to val
4965 requires loading up part of an address in a separate insn.
4966
4967 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4968 since it might be a promoted variable where the zero- or sign- extension
4969 needs to be done. Handling this in the normal way is safe because no
4970 computation is done before the call. The same is true for SSA names. */
4971 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4972 && COMPLETE_TYPE_P (TREE_TYPE (from))
4973 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4974 && ! (((TREE_CODE (to) == VAR_DECL
4975 || TREE_CODE (to) == PARM_DECL
4976 || TREE_CODE (to) == RESULT_DECL)
4977 && REG_P (DECL_RTL (to)))
4978 || TREE_CODE (to) == SSA_NAME))
4979 {
4980 rtx value;
4981
4982 push_temp_slots ();
4983 value = expand_normal (from);
4984 if (to_rtx == 0)
4985 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4986
4987 /* Handle calls that return values in multiple non-contiguous locations.
4988 The Irix 6 ABI has examples of this. */
4989 if (GET_CODE (to_rtx) == PARALLEL)
4990 {
4991 if (GET_CODE (value) == PARALLEL)
4992 emit_group_move (to_rtx, value);
4993 else
4994 emit_group_load (to_rtx, value, TREE_TYPE (from),
4995 int_size_in_bytes (TREE_TYPE (from)));
4996 }
4997 else if (GET_CODE (value) == PARALLEL)
4998 emit_group_store (to_rtx, value, TREE_TYPE (from),
4999 int_size_in_bytes (TREE_TYPE (from)));
5000 else if (GET_MODE (to_rtx) == BLKmode)
5001 {
5002 /* Handle calls that return BLKmode values in registers. */
5003 if (REG_P (value))
5004 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5005 else
5006 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5007 }
5008 else
5009 {
5010 if (POINTER_TYPE_P (TREE_TYPE (to)))
5011 value = convert_memory_address_addr_space
5012 (GET_MODE (to_rtx), value,
5013 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5014
5015 emit_move_insn (to_rtx, value);
5016 }
5017 preserve_temp_slots (to_rtx);
5018 pop_temp_slots ();
5019 return;
5020 }
5021
5022 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5023 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5024
5025 /* Don't move directly into a return register. */
5026 if (TREE_CODE (to) == RESULT_DECL
5027 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5028 {
5029 rtx temp;
5030
5031 push_temp_slots ();
5032
5033 /* If the source is itself a return value, it still is in a pseudo at
5034 this point so we can move it back to the return register directly. */
5035 if (REG_P (to_rtx)
5036 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5037 && TREE_CODE (from) != CALL_EXPR)
5038 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5039 else
5040 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5041
5042 /* Handle calls that return values in multiple non-contiguous locations.
5043 The Irix 6 ABI has examples of this. */
5044 if (GET_CODE (to_rtx) == PARALLEL)
5045 {
5046 if (GET_CODE (temp) == PARALLEL)
5047 emit_group_move (to_rtx, temp);
5048 else
5049 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5050 int_size_in_bytes (TREE_TYPE (from)));
5051 }
5052 else if (temp)
5053 emit_move_insn (to_rtx, temp);
5054
5055 preserve_temp_slots (to_rtx);
5056 pop_temp_slots ();
5057 return;
5058 }
5059
5060 /* In case we are returning the contents of an object which overlaps
5061 the place the value is being stored, use a safe function when copying
5062 a value through a pointer into a structure value return block. */
5063 if (TREE_CODE (to) == RESULT_DECL
5064 && TREE_CODE (from) == INDIRECT_REF
5065 && ADDR_SPACE_GENERIC_P
5066 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5067 && refs_may_alias_p (to, from)
5068 && cfun->returns_struct
5069 && !cfun->returns_pcc_struct)
5070 {
5071 rtx from_rtx, size;
5072
5073 push_temp_slots ();
5074 size = expr_size (from);
5075 from_rtx = expand_normal (from);
5076
5077 emit_library_call (memmove_libfunc, LCT_NORMAL,
5078 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
5079 XEXP (from_rtx, 0), Pmode,
5080 convert_to_mode (TYPE_MODE (sizetype),
5081 size, TYPE_UNSIGNED (sizetype)),
5082 TYPE_MODE (sizetype));
5083
5084 preserve_temp_slots (to_rtx);
5085 pop_temp_slots ();
5086 return;
5087 }
5088
5089 /* Compute FROM and store the value in the rtx we got. */
5090
5091 push_temp_slots ();
5092 result = store_expr (from, to_rtx, 0, nontemporal);
5093 preserve_temp_slots (result);
5094 pop_temp_slots ();
5095 return;
5096 }
5097
5098 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5099 succeeded, false otherwise. */
5100
5101 bool
5102 emit_storent_insn (rtx to, rtx from)
5103 {
5104 struct expand_operand ops[2];
5105 enum machine_mode mode = GET_MODE (to);
5106 enum insn_code code = optab_handler (storent_optab, mode);
5107
5108 if (code == CODE_FOR_nothing)
5109 return false;
5110
5111 create_fixed_operand (&ops[0], to);
5112 create_input_operand (&ops[1], from, mode);
5113 return maybe_expand_insn (code, 2, ops);
5114 }
5115
5116 /* Generate code for computing expression EXP,
5117 and storing the value into TARGET.
5118
5119 If the mode is BLKmode then we may return TARGET itself.
5120 It turns out that in BLKmode it doesn't cause a problem.
5121 because C has no operators that could combine two different
5122 assignments into the same BLKmode object with different values
5123 with no sequence point. Will other languages need this to
5124 be more thorough?
5125
5126 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5127 stack, and block moves may need to be treated specially.
5128
5129 If NONTEMPORAL is true, try using a nontemporal store instruction. */
5130
5131 rtx
5132 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5133 {
5134 rtx temp;
5135 rtx alt_rtl = NULL_RTX;
5136 location_t loc = curr_insn_location ();
5137
5138 if (VOID_TYPE_P (TREE_TYPE (exp)))
5139 {
5140 /* C++ can generate ?: expressions with a throw expression in one
5141 branch and an rvalue in the other. Here, we resolve attempts to
5142 store the throw expression's nonexistent result. */
5143 gcc_assert (!call_param_p);
5144 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5145 return NULL_RTX;
5146 }
5147 if (TREE_CODE (exp) == COMPOUND_EXPR)
5148 {
5149 /* Perform first part of compound expression, then assign from second
5150 part. */
5151 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5152 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5153 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5154 nontemporal);
5155 }
5156 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5157 {
5158 /* For conditional expression, get safe form of the target. Then
5159 test the condition, doing the appropriate assignment on either
5160 side. This avoids the creation of unnecessary temporaries.
5161 For non-BLKmode, it is more efficient not to do this. */
5162
5163 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
5164
5165 do_pending_stack_adjust ();
5166 NO_DEFER_POP;
5167 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5168 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5169 nontemporal);
5170 emit_jump_insn (gen_jump (lab2));
5171 emit_barrier ();
5172 emit_label (lab1);
5173 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
5174 nontemporal);
5175 emit_label (lab2);
5176 OK_DEFER_POP;
5177
5178 return NULL_RTX;
5179 }
5180 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5181 /* If this is a scalar in a register that is stored in a wider mode
5182 than the declared mode, compute the result into its declared mode
5183 and then convert to the wider mode. Our value is the computed
5184 expression. */
5185 {
5186 rtx inner_target = 0;
5187
5188 /* We can do the conversion inside EXP, which will often result
5189 in some optimizations. Do the conversion in two steps: first
5190 change the signedness, if needed, then the extend. But don't
5191 do this if the type of EXP is a subtype of something else
5192 since then the conversion might involve more than just
5193 converting modes. */
5194 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5195 && TREE_TYPE (TREE_TYPE (exp)) == 0
5196 && GET_MODE_PRECISION (GET_MODE (target))
5197 == TYPE_PRECISION (TREE_TYPE (exp)))
5198 {
5199 if (TYPE_UNSIGNED (TREE_TYPE (exp))
5200 != SUBREG_PROMOTED_UNSIGNED_P (target))
5201 {
5202 /* Some types, e.g. Fortran's logical*4, won't have a signed
5203 version, so use the mode instead. */
5204 tree ntype
5205 = (signed_or_unsigned_type_for
5206 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
5207 if (ntype == NULL)
5208 ntype = lang_hooks.types.type_for_mode
5209 (TYPE_MODE (TREE_TYPE (exp)),
5210 SUBREG_PROMOTED_UNSIGNED_P (target));
5211
5212 exp = fold_convert_loc (loc, ntype, exp);
5213 }
5214
5215 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5216 (GET_MODE (SUBREG_REG (target)),
5217 SUBREG_PROMOTED_UNSIGNED_P (target)),
5218 exp);
5219
5220 inner_target = SUBREG_REG (target);
5221 }
5222
5223 temp = expand_expr (exp, inner_target, VOIDmode,
5224 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5225
5226 /* If TEMP is a VOIDmode constant, use convert_modes to make
5227 sure that we properly convert it. */
5228 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5229 {
5230 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5231 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
5232 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5233 GET_MODE (target), temp,
5234 SUBREG_PROMOTED_UNSIGNED_P (target));
5235 }
5236
5237 convert_move (SUBREG_REG (target), temp,
5238 SUBREG_PROMOTED_UNSIGNED_P (target));
5239
5240 return NULL_RTX;
5241 }
5242 else if ((TREE_CODE (exp) == STRING_CST
5243 || (TREE_CODE (exp) == MEM_REF
5244 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5245 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5246 == STRING_CST
5247 && integer_zerop (TREE_OPERAND (exp, 1))))
5248 && !nontemporal && !call_param_p
5249 && MEM_P (target))
5250 {
5251 /* Optimize initialization of an array with a STRING_CST. */
5252 HOST_WIDE_INT exp_len, str_copy_len;
5253 rtx dest_mem;
5254 tree str = TREE_CODE (exp) == STRING_CST
5255 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5256
5257 exp_len = int_expr_size (exp);
5258 if (exp_len <= 0)
5259 goto normal_expr;
5260
5261 if (TREE_STRING_LENGTH (str) <= 0)
5262 goto normal_expr;
5263
5264 str_copy_len = strlen (TREE_STRING_POINTER (str));
5265 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5266 goto normal_expr;
5267
5268 str_copy_len = TREE_STRING_LENGTH (str);
5269 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5270 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5271 {
5272 str_copy_len += STORE_MAX_PIECES - 1;
5273 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5274 }
5275 str_copy_len = MIN (str_copy_len, exp_len);
5276 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5277 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5278 MEM_ALIGN (target), false))
5279 goto normal_expr;
5280
5281 dest_mem = target;
5282
5283 dest_mem = store_by_pieces (dest_mem,
5284 str_copy_len, builtin_strncpy_read_str,
5285 CONST_CAST (char *,
5286 TREE_STRING_POINTER (str)),
5287 MEM_ALIGN (target), false,
5288 exp_len > str_copy_len ? 1 : 0);
5289 if (exp_len > str_copy_len)
5290 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5291 GEN_INT (exp_len - str_copy_len),
5292 BLOCK_OP_NORMAL);
5293 return NULL_RTX;
5294 }
5295 else
5296 {
5297 rtx tmp_target;
5298
5299 normal_expr:
5300 /* If we want to use a nontemporal store, force the value to
5301 register first. */
5302 tmp_target = nontemporal ? NULL_RTX : target;
5303 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5304 (call_param_p
5305 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5306 &alt_rtl);
5307 }
5308
5309 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5310 the same as that of TARGET, adjust the constant. This is needed, for
5311 example, in case it is a CONST_DOUBLE and we want only a word-sized
5312 value. */
5313 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5314 && TREE_CODE (exp) != ERROR_MARK
5315 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5316 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5317 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5318
5319 /* If value was not generated in the target, store it there.
5320 Convert the value to TARGET's type first if necessary and emit the
5321 pending incrementations that have been queued when expanding EXP.
5322 Note that we cannot emit the whole queue blindly because this will
5323 effectively disable the POST_INC optimization later.
5324
5325 If TEMP and TARGET compare equal according to rtx_equal_p, but
5326 one or both of them are volatile memory refs, we have to distinguish
5327 two cases:
5328 - expand_expr has used TARGET. In this case, we must not generate
5329 another copy. This can be detected by TARGET being equal according
5330 to == .
5331 - expand_expr has not used TARGET - that means that the source just
5332 happens to have the same RTX form. Since temp will have been created
5333 by expand_expr, it will compare unequal according to == .
5334 We must generate a copy in this case, to reach the correct number
5335 of volatile memory references. */
5336
5337 if ((! rtx_equal_p (temp, target)
5338 || (temp != target && (side_effects_p (temp)
5339 || side_effects_p (target))))
5340 && TREE_CODE (exp) != ERROR_MARK
5341 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5342 but TARGET is not valid memory reference, TEMP will differ
5343 from TARGET although it is really the same location. */
5344 && !(alt_rtl
5345 && rtx_equal_p (alt_rtl, target)
5346 && !side_effects_p (alt_rtl)
5347 && !side_effects_p (target))
5348 /* If there's nothing to copy, don't bother. Don't call
5349 expr_size unless necessary, because some front-ends (C++)
5350 expr_size-hook must not be given objects that are not
5351 supposed to be bit-copied or bit-initialized. */
5352 && expr_size (exp) != const0_rtx)
5353 {
5354 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5355 {
5356 if (GET_MODE (target) == BLKmode)
5357 {
5358 /* Handle calls that return BLKmode values in registers. */
5359 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5360 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5361 else
5362 store_bit_field (target,
5363 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5364 0, 0, 0, GET_MODE (temp), temp);
5365 }
5366 else
5367 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5368 }
5369
5370 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5371 {
5372 /* Handle copying a string constant into an array. The string
5373 constant may be shorter than the array. So copy just the string's
5374 actual length, and clear the rest. First get the size of the data
5375 type of the string, which is actually the size of the target. */
5376 rtx size = expr_size (exp);
5377
5378 if (CONST_INT_P (size)
5379 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5380 emit_block_move (target, temp, size,
5381 (call_param_p
5382 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5383 else
5384 {
5385 enum machine_mode pointer_mode
5386 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5387 enum machine_mode address_mode = get_address_mode (target);
5388
5389 /* Compute the size of the data to copy from the string. */
5390 tree copy_size
5391 = size_binop_loc (loc, MIN_EXPR,
5392 make_tree (sizetype, size),
5393 size_int (TREE_STRING_LENGTH (exp)));
5394 rtx copy_size_rtx
5395 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5396 (call_param_p
5397 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5398 rtx label = 0;
5399
5400 /* Copy that much. */
5401 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5402 TYPE_UNSIGNED (sizetype));
5403 emit_block_move (target, temp, copy_size_rtx,
5404 (call_param_p
5405 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5406
5407 /* Figure out how much is left in TARGET that we have to clear.
5408 Do all calculations in pointer_mode. */
5409 if (CONST_INT_P (copy_size_rtx))
5410 {
5411 size = plus_constant (address_mode, size,
5412 -INTVAL (copy_size_rtx));
5413 target = adjust_address (target, BLKmode,
5414 INTVAL (copy_size_rtx));
5415 }
5416 else
5417 {
5418 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5419 copy_size_rtx, NULL_RTX, 0,
5420 OPTAB_LIB_WIDEN);
5421
5422 if (GET_MODE (copy_size_rtx) != address_mode)
5423 copy_size_rtx = convert_to_mode (address_mode,
5424 copy_size_rtx,
5425 TYPE_UNSIGNED (sizetype));
5426
5427 target = offset_address (target, copy_size_rtx,
5428 highest_pow2_factor (copy_size));
5429 label = gen_label_rtx ();
5430 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5431 GET_MODE (size), 0, label);
5432 }
5433
5434 if (size != const0_rtx)
5435 clear_storage (target, size, BLOCK_OP_NORMAL);
5436
5437 if (label)
5438 emit_label (label);
5439 }
5440 }
5441 /* Handle calls that return values in multiple non-contiguous locations.
5442 The Irix 6 ABI has examples of this. */
5443 else if (GET_CODE (target) == PARALLEL)
5444 {
5445 if (GET_CODE (temp) == PARALLEL)
5446 emit_group_move (target, temp);
5447 else
5448 emit_group_load (target, temp, TREE_TYPE (exp),
5449 int_size_in_bytes (TREE_TYPE (exp)));
5450 }
5451 else if (GET_CODE (temp) == PARALLEL)
5452 emit_group_store (target, temp, TREE_TYPE (exp),
5453 int_size_in_bytes (TREE_TYPE (exp)));
5454 else if (GET_MODE (temp) == BLKmode)
5455 emit_block_move (target, temp, expr_size (exp),
5456 (call_param_p
5457 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5458 /* If we emit a nontemporal store, there is nothing else to do. */
5459 else if (nontemporal && emit_storent_insn (target, temp))
5460 ;
5461 else
5462 {
5463 temp = force_operand (temp, target);
5464 if (temp != target)
5465 emit_move_insn (target, temp);
5466 }
5467 }
5468
5469 return NULL_RTX;
5470 }
5471 \f
5472 /* Return true if field F of structure TYPE is a flexible array. */
5473
5474 static bool
5475 flexible_array_member_p (const_tree f, const_tree type)
5476 {
5477 const_tree tf;
5478
5479 tf = TREE_TYPE (f);
5480 return (DECL_CHAIN (f) == NULL
5481 && TREE_CODE (tf) == ARRAY_TYPE
5482 && TYPE_DOMAIN (tf)
5483 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5484 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5485 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5486 && int_size_in_bytes (type) >= 0);
5487 }
5488
5489 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5490 must have in order for it to completely initialize a value of type TYPE.
5491 Return -1 if the number isn't known.
5492
5493 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5494
5495 static HOST_WIDE_INT
5496 count_type_elements (const_tree type, bool for_ctor_p)
5497 {
5498 switch (TREE_CODE (type))
5499 {
5500 case ARRAY_TYPE:
5501 {
5502 tree nelts;
5503
5504 nelts = array_type_nelts (type);
5505 if (nelts && tree_fits_uhwi_p (nelts))
5506 {
5507 unsigned HOST_WIDE_INT n;
5508
5509 n = tree_to_uhwi (nelts) + 1;
5510 if (n == 0 || for_ctor_p)
5511 return n;
5512 else
5513 return n * count_type_elements (TREE_TYPE (type), false);
5514 }
5515 return for_ctor_p ? -1 : 1;
5516 }
5517
5518 case RECORD_TYPE:
5519 {
5520 unsigned HOST_WIDE_INT n;
5521 tree f;
5522
5523 n = 0;
5524 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5525 if (TREE_CODE (f) == FIELD_DECL)
5526 {
5527 if (!for_ctor_p)
5528 n += count_type_elements (TREE_TYPE (f), false);
5529 else if (!flexible_array_member_p (f, type))
5530 /* Don't count flexible arrays, which are not supposed
5531 to be initialized. */
5532 n += 1;
5533 }
5534
5535 return n;
5536 }
5537
5538 case UNION_TYPE:
5539 case QUAL_UNION_TYPE:
5540 {
5541 tree f;
5542 HOST_WIDE_INT n, m;
5543
5544 gcc_assert (!for_ctor_p);
5545 /* Estimate the number of scalars in each field and pick the
5546 maximum. Other estimates would do instead; the idea is simply
5547 to make sure that the estimate is not sensitive to the ordering
5548 of the fields. */
5549 n = 1;
5550 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5551 if (TREE_CODE (f) == FIELD_DECL)
5552 {
5553 m = count_type_elements (TREE_TYPE (f), false);
5554 /* If the field doesn't span the whole union, add an extra
5555 scalar for the rest. */
5556 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5557 TYPE_SIZE (type)) != 1)
5558 m++;
5559 if (n < m)
5560 n = m;
5561 }
5562 return n;
5563 }
5564
5565 case COMPLEX_TYPE:
5566 return 2;
5567
5568 case VECTOR_TYPE:
5569 return TYPE_VECTOR_SUBPARTS (type);
5570
5571 case INTEGER_TYPE:
5572 case REAL_TYPE:
5573 case FIXED_POINT_TYPE:
5574 case ENUMERAL_TYPE:
5575 case BOOLEAN_TYPE:
5576 case POINTER_TYPE:
5577 case OFFSET_TYPE:
5578 case REFERENCE_TYPE:
5579 case NULLPTR_TYPE:
5580 return 1;
5581
5582 case ERROR_MARK:
5583 return 0;
5584
5585 case VOID_TYPE:
5586 case METHOD_TYPE:
5587 case FUNCTION_TYPE:
5588 case LANG_TYPE:
5589 default:
5590 gcc_unreachable ();
5591 }
5592 }
5593
5594 /* Helper for categorize_ctor_elements. Identical interface. */
5595
5596 static bool
5597 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5598 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5599 {
5600 unsigned HOST_WIDE_INT idx;
5601 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5602 tree value, purpose, elt_type;
5603
5604 /* Whether CTOR is a valid constant initializer, in accordance with what
5605 initializer_constant_valid_p does. If inferred from the constructor
5606 elements, true until proven otherwise. */
5607 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5608 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5609
5610 nz_elts = 0;
5611 init_elts = 0;
5612 num_fields = 0;
5613 elt_type = NULL_TREE;
5614
5615 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5616 {
5617 HOST_WIDE_INT mult = 1;
5618
5619 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5620 {
5621 tree lo_index = TREE_OPERAND (purpose, 0);
5622 tree hi_index = TREE_OPERAND (purpose, 1);
5623
5624 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5625 mult = (tree_to_uhwi (hi_index)
5626 - tree_to_uhwi (lo_index) + 1);
5627 }
5628 num_fields += mult;
5629 elt_type = TREE_TYPE (value);
5630
5631 switch (TREE_CODE (value))
5632 {
5633 case CONSTRUCTOR:
5634 {
5635 HOST_WIDE_INT nz = 0, ic = 0;
5636
5637 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5638 p_complete);
5639
5640 nz_elts += mult * nz;
5641 init_elts += mult * ic;
5642
5643 if (const_from_elts_p && const_p)
5644 const_p = const_elt_p;
5645 }
5646 break;
5647
5648 case INTEGER_CST:
5649 case REAL_CST:
5650 case FIXED_CST:
5651 if (!initializer_zerop (value))
5652 nz_elts += mult;
5653 init_elts += mult;
5654 break;
5655
5656 case STRING_CST:
5657 nz_elts += mult * TREE_STRING_LENGTH (value);
5658 init_elts += mult * TREE_STRING_LENGTH (value);
5659 break;
5660
5661 case COMPLEX_CST:
5662 if (!initializer_zerop (TREE_REALPART (value)))
5663 nz_elts += mult;
5664 if (!initializer_zerop (TREE_IMAGPART (value)))
5665 nz_elts += mult;
5666 init_elts += mult;
5667 break;
5668
5669 case VECTOR_CST:
5670 {
5671 unsigned i;
5672 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5673 {
5674 tree v = VECTOR_CST_ELT (value, i);
5675 if (!initializer_zerop (v))
5676 nz_elts += mult;
5677 init_elts += mult;
5678 }
5679 }
5680 break;
5681
5682 default:
5683 {
5684 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5685 nz_elts += mult * tc;
5686 init_elts += mult * tc;
5687
5688 if (const_from_elts_p && const_p)
5689 const_p = initializer_constant_valid_p (value, elt_type)
5690 != NULL_TREE;
5691 }
5692 break;
5693 }
5694 }
5695
5696 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5697 num_fields, elt_type))
5698 *p_complete = false;
5699
5700 *p_nz_elts += nz_elts;
5701 *p_init_elts += init_elts;
5702
5703 return const_p;
5704 }
5705
5706 /* Examine CTOR to discover:
5707 * how many scalar fields are set to nonzero values,
5708 and place it in *P_NZ_ELTS;
5709 * how many scalar fields in total are in CTOR,
5710 and place it in *P_ELT_COUNT.
5711 * whether the constructor is complete -- in the sense that every
5712 meaningful byte is explicitly given a value --
5713 and place it in *P_COMPLETE.
5714
5715 Return whether or not CTOR is a valid static constant initializer, the same
5716 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5717
5718 bool
5719 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5720 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5721 {
5722 *p_nz_elts = 0;
5723 *p_init_elts = 0;
5724 *p_complete = true;
5725
5726 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5727 }
5728
5729 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5730 of which had type LAST_TYPE. Each element was itself a complete
5731 initializer, in the sense that every meaningful byte was explicitly
5732 given a value. Return true if the same is true for the constructor
5733 as a whole. */
5734
5735 bool
5736 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5737 const_tree last_type)
5738 {
5739 if (TREE_CODE (type) == UNION_TYPE
5740 || TREE_CODE (type) == QUAL_UNION_TYPE)
5741 {
5742 if (num_elts == 0)
5743 return false;
5744
5745 gcc_assert (num_elts == 1 && last_type);
5746
5747 /* ??? We could look at each element of the union, and find the
5748 largest element. Which would avoid comparing the size of the
5749 initialized element against any tail padding in the union.
5750 Doesn't seem worth the effort... */
5751 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5752 }
5753
5754 return count_type_elements (type, true) == num_elts;
5755 }
5756
5757 /* Return 1 if EXP contains mostly (3/4) zeros. */
5758
5759 static int
5760 mostly_zeros_p (const_tree exp)
5761 {
5762 if (TREE_CODE (exp) == CONSTRUCTOR)
5763 {
5764 HOST_WIDE_INT nz_elts, init_elts;
5765 bool complete_p;
5766
5767 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5768 return !complete_p || nz_elts < init_elts / 4;
5769 }
5770
5771 return initializer_zerop (exp);
5772 }
5773
5774 /* Return 1 if EXP contains all zeros. */
5775
5776 static int
5777 all_zeros_p (const_tree exp)
5778 {
5779 if (TREE_CODE (exp) == CONSTRUCTOR)
5780 {
5781 HOST_WIDE_INT nz_elts, init_elts;
5782 bool complete_p;
5783
5784 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5785 return nz_elts == 0;
5786 }
5787
5788 return initializer_zerop (exp);
5789 }
5790 \f
5791 /* Helper function for store_constructor.
5792 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5793 CLEARED is as for store_constructor.
5794 ALIAS_SET is the alias set to use for any stores.
5795
5796 This provides a recursive shortcut back to store_constructor when it isn't
5797 necessary to go through store_field. This is so that we can pass through
5798 the cleared field to let store_constructor know that we may not have to
5799 clear a substructure if the outer structure has already been cleared. */
5800
5801 static void
5802 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5803 HOST_WIDE_INT bitpos, enum machine_mode mode,
5804 tree exp, int cleared, alias_set_type alias_set)
5805 {
5806 if (TREE_CODE (exp) == CONSTRUCTOR
5807 /* We can only call store_constructor recursively if the size and
5808 bit position are on a byte boundary. */
5809 && bitpos % BITS_PER_UNIT == 0
5810 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5811 /* If we have a nonzero bitpos for a register target, then we just
5812 let store_field do the bitfield handling. This is unlikely to
5813 generate unnecessary clear instructions anyways. */
5814 && (bitpos == 0 || MEM_P (target)))
5815 {
5816 if (MEM_P (target))
5817 target
5818 = adjust_address (target,
5819 GET_MODE (target) == BLKmode
5820 || 0 != (bitpos
5821 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5822 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5823
5824
5825 /* Update the alias set, if required. */
5826 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5827 && MEM_ALIAS_SET (target) != 0)
5828 {
5829 target = copy_rtx (target);
5830 set_mem_alias_set (target, alias_set);
5831 }
5832
5833 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5834 }
5835 else
5836 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false);
5837 }
5838
5839
5840 /* Returns the number of FIELD_DECLs in TYPE. */
5841
5842 static int
5843 fields_length (const_tree type)
5844 {
5845 tree t = TYPE_FIELDS (type);
5846 int count = 0;
5847
5848 for (; t; t = DECL_CHAIN (t))
5849 if (TREE_CODE (t) == FIELD_DECL)
5850 ++count;
5851
5852 return count;
5853 }
5854
5855
5856 /* Store the value of constructor EXP into the rtx TARGET.
5857 TARGET is either a REG or a MEM; we know it cannot conflict, since
5858 safe_from_p has been called.
5859 CLEARED is true if TARGET is known to have been zero'd.
5860 SIZE is the number of bytes of TARGET we are allowed to modify: this
5861 may not be the same as the size of EXP if we are assigning to a field
5862 which has been packed to exclude padding bits. */
5863
5864 static void
5865 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5866 {
5867 tree type = TREE_TYPE (exp);
5868 #ifdef WORD_REGISTER_OPERATIONS
5869 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5870 #endif
5871
5872 switch (TREE_CODE (type))
5873 {
5874 case RECORD_TYPE:
5875 case UNION_TYPE:
5876 case QUAL_UNION_TYPE:
5877 {
5878 unsigned HOST_WIDE_INT idx;
5879 tree field, value;
5880
5881 /* If size is zero or the target is already cleared, do nothing. */
5882 if (size == 0 || cleared)
5883 cleared = 1;
5884 /* We either clear the aggregate or indicate the value is dead. */
5885 else if ((TREE_CODE (type) == UNION_TYPE
5886 || TREE_CODE (type) == QUAL_UNION_TYPE)
5887 && ! CONSTRUCTOR_ELTS (exp))
5888 /* If the constructor is empty, clear the union. */
5889 {
5890 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5891 cleared = 1;
5892 }
5893
5894 /* If we are building a static constructor into a register,
5895 set the initial value as zero so we can fold the value into
5896 a constant. But if more than one register is involved,
5897 this probably loses. */
5898 else if (REG_P (target) && TREE_STATIC (exp)
5899 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5900 {
5901 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5902 cleared = 1;
5903 }
5904
5905 /* If the constructor has fewer fields than the structure or
5906 if we are initializing the structure to mostly zeros, clear
5907 the whole structure first. Don't do this if TARGET is a
5908 register whose mode size isn't equal to SIZE since
5909 clear_storage can't handle this case. */
5910 else if (size > 0
5911 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
5912 != fields_length (type))
5913 || mostly_zeros_p (exp))
5914 && (!REG_P (target)
5915 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5916 == size)))
5917 {
5918 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5919 cleared = 1;
5920 }
5921
5922 if (REG_P (target) && !cleared)
5923 emit_clobber (target);
5924
5925 /* Store each element of the constructor into the
5926 corresponding field of TARGET. */
5927 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5928 {
5929 enum machine_mode mode;
5930 HOST_WIDE_INT bitsize;
5931 HOST_WIDE_INT bitpos = 0;
5932 tree offset;
5933 rtx to_rtx = target;
5934
5935 /* Just ignore missing fields. We cleared the whole
5936 structure, above, if any fields are missing. */
5937 if (field == 0)
5938 continue;
5939
5940 if (cleared && initializer_zerop (value))
5941 continue;
5942
5943 if (tree_fits_uhwi_p (DECL_SIZE (field)))
5944 bitsize = tree_to_uhwi (DECL_SIZE (field));
5945 else
5946 bitsize = -1;
5947
5948 mode = DECL_MODE (field);
5949 if (DECL_BIT_FIELD (field))
5950 mode = VOIDmode;
5951
5952 offset = DECL_FIELD_OFFSET (field);
5953 if (tree_fits_shwi_p (offset)
5954 && tree_fits_shwi_p (bit_position (field)))
5955 {
5956 bitpos = int_bit_position (field);
5957 offset = 0;
5958 }
5959 else
5960 bitpos = tree_to_shwi (DECL_FIELD_BIT_OFFSET (field));
5961
5962 if (offset)
5963 {
5964 enum machine_mode address_mode;
5965 rtx offset_rtx;
5966
5967 offset
5968 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5969 make_tree (TREE_TYPE (exp),
5970 target));
5971
5972 offset_rtx = expand_normal (offset);
5973 gcc_assert (MEM_P (to_rtx));
5974
5975 address_mode = get_address_mode (to_rtx);
5976 if (GET_MODE (offset_rtx) != address_mode)
5977 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5978
5979 to_rtx = offset_address (to_rtx, offset_rtx,
5980 highest_pow2_factor (offset));
5981 }
5982
5983 #ifdef WORD_REGISTER_OPERATIONS
5984 /* If this initializes a field that is smaller than a
5985 word, at the start of a word, try to widen it to a full
5986 word. This special case allows us to output C++ member
5987 function initializations in a form that the optimizers
5988 can understand. */
5989 if (REG_P (target)
5990 && bitsize < BITS_PER_WORD
5991 && bitpos % BITS_PER_WORD == 0
5992 && GET_MODE_CLASS (mode) == MODE_INT
5993 && TREE_CODE (value) == INTEGER_CST
5994 && exp_size >= 0
5995 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5996 {
5997 tree type = TREE_TYPE (value);
5998
5999 if (TYPE_PRECISION (type) < BITS_PER_WORD)
6000 {
6001 type = lang_hooks.types.type_for_mode
6002 (word_mode, TYPE_UNSIGNED (type));
6003 value = fold_convert (type, value);
6004 }
6005
6006 if (BYTES_BIG_ENDIAN)
6007 value
6008 = fold_build2 (LSHIFT_EXPR, type, value,
6009 build_int_cst (type,
6010 BITS_PER_WORD - bitsize));
6011 bitsize = BITS_PER_WORD;
6012 mode = word_mode;
6013 }
6014 #endif
6015
6016 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6017 && DECL_NONADDRESSABLE_P (field))
6018 {
6019 to_rtx = copy_rtx (to_rtx);
6020 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6021 }
6022
6023 store_constructor_field (to_rtx, bitsize, bitpos, mode,
6024 value, cleared,
6025 get_alias_set (TREE_TYPE (field)));
6026 }
6027 break;
6028 }
6029 case ARRAY_TYPE:
6030 {
6031 tree value, index;
6032 unsigned HOST_WIDE_INT i;
6033 int need_to_clear;
6034 tree domain;
6035 tree elttype = TREE_TYPE (type);
6036 int const_bounds_p;
6037 HOST_WIDE_INT minelt = 0;
6038 HOST_WIDE_INT maxelt = 0;
6039
6040 domain = TYPE_DOMAIN (type);
6041 const_bounds_p = (TYPE_MIN_VALUE (domain)
6042 && TYPE_MAX_VALUE (domain)
6043 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6044 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6045
6046 /* If we have constant bounds for the range of the type, get them. */
6047 if (const_bounds_p)
6048 {
6049 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6050 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6051 }
6052
6053 /* If the constructor has fewer elements than the array, clear
6054 the whole array first. Similarly if this is static
6055 constructor of a non-BLKmode object. */
6056 if (cleared)
6057 need_to_clear = 0;
6058 else if (REG_P (target) && TREE_STATIC (exp))
6059 need_to_clear = 1;
6060 else
6061 {
6062 unsigned HOST_WIDE_INT idx;
6063 tree index, value;
6064 HOST_WIDE_INT count = 0, zero_count = 0;
6065 need_to_clear = ! const_bounds_p;
6066
6067 /* This loop is a more accurate version of the loop in
6068 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6069 is also needed to check for missing elements. */
6070 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6071 {
6072 HOST_WIDE_INT this_node_count;
6073
6074 if (need_to_clear)
6075 break;
6076
6077 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6078 {
6079 tree lo_index = TREE_OPERAND (index, 0);
6080 tree hi_index = TREE_OPERAND (index, 1);
6081
6082 if (! tree_fits_uhwi_p (lo_index)
6083 || ! tree_fits_uhwi_p (hi_index))
6084 {
6085 need_to_clear = 1;
6086 break;
6087 }
6088
6089 this_node_count = (tree_to_uhwi (hi_index)
6090 - tree_to_uhwi (lo_index) + 1);
6091 }
6092 else
6093 this_node_count = 1;
6094
6095 count += this_node_count;
6096 if (mostly_zeros_p (value))
6097 zero_count += this_node_count;
6098 }
6099
6100 /* Clear the entire array first if there are any missing
6101 elements, or if the incidence of zero elements is >=
6102 75%. */
6103 if (! need_to_clear
6104 && (count < maxelt - minelt + 1
6105 || 4 * zero_count >= 3 * count))
6106 need_to_clear = 1;
6107 }
6108
6109 if (need_to_clear && size > 0)
6110 {
6111 if (REG_P (target))
6112 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6113 else
6114 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6115 cleared = 1;
6116 }
6117
6118 if (!cleared && REG_P (target))
6119 /* Inform later passes that the old value is dead. */
6120 emit_clobber (target);
6121
6122 /* Store each element of the constructor into the
6123 corresponding element of TARGET, determined by counting the
6124 elements. */
6125 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6126 {
6127 enum machine_mode mode;
6128 HOST_WIDE_INT bitsize;
6129 HOST_WIDE_INT bitpos;
6130 rtx xtarget = target;
6131
6132 if (cleared && initializer_zerop (value))
6133 continue;
6134
6135 mode = TYPE_MODE (elttype);
6136 if (mode == BLKmode)
6137 bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6138 ? tree_to_uhwi (TYPE_SIZE (elttype))
6139 : -1);
6140 else
6141 bitsize = GET_MODE_BITSIZE (mode);
6142
6143 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6144 {
6145 tree lo_index = TREE_OPERAND (index, 0);
6146 tree hi_index = TREE_OPERAND (index, 1);
6147 rtx index_r, pos_rtx;
6148 HOST_WIDE_INT lo, hi, count;
6149 tree position;
6150
6151 /* If the range is constant and "small", unroll the loop. */
6152 if (const_bounds_p
6153 && tree_fits_shwi_p (lo_index)
6154 && tree_fits_shwi_p (hi_index)
6155 && (lo = tree_to_shwi (lo_index),
6156 hi = tree_to_shwi (hi_index),
6157 count = hi - lo + 1,
6158 (!MEM_P (target)
6159 || count <= 2
6160 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6161 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6162 <= 40 * 8)))))
6163 {
6164 lo -= minelt; hi -= minelt;
6165 for (; lo <= hi; lo++)
6166 {
6167 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6168
6169 if (MEM_P (target)
6170 && !MEM_KEEP_ALIAS_SET_P (target)
6171 && TREE_CODE (type) == ARRAY_TYPE
6172 && TYPE_NONALIASED_COMPONENT (type))
6173 {
6174 target = copy_rtx (target);
6175 MEM_KEEP_ALIAS_SET_P (target) = 1;
6176 }
6177
6178 store_constructor_field
6179 (target, bitsize, bitpos, mode, value, cleared,
6180 get_alias_set (elttype));
6181 }
6182 }
6183 else
6184 {
6185 rtx loop_start = gen_label_rtx ();
6186 rtx loop_end = gen_label_rtx ();
6187 tree exit_cond;
6188
6189 expand_normal (hi_index);
6190
6191 index = build_decl (EXPR_LOCATION (exp),
6192 VAR_DECL, NULL_TREE, domain);
6193 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6194 SET_DECL_RTL (index, index_r);
6195 store_expr (lo_index, index_r, 0, false);
6196
6197 /* Build the head of the loop. */
6198 do_pending_stack_adjust ();
6199 emit_label (loop_start);
6200
6201 /* Assign value to element index. */
6202 position =
6203 fold_convert (ssizetype,
6204 fold_build2 (MINUS_EXPR,
6205 TREE_TYPE (index),
6206 index,
6207 TYPE_MIN_VALUE (domain)));
6208
6209 position =
6210 size_binop (MULT_EXPR, position,
6211 fold_convert (ssizetype,
6212 TYPE_SIZE_UNIT (elttype)));
6213
6214 pos_rtx = expand_normal (position);
6215 xtarget = offset_address (target, pos_rtx,
6216 highest_pow2_factor (position));
6217 xtarget = adjust_address (xtarget, mode, 0);
6218 if (TREE_CODE (value) == CONSTRUCTOR)
6219 store_constructor (value, xtarget, cleared,
6220 bitsize / BITS_PER_UNIT);
6221 else
6222 store_expr (value, xtarget, 0, false);
6223
6224 /* Generate a conditional jump to exit the loop. */
6225 exit_cond = build2 (LT_EXPR, integer_type_node,
6226 index, hi_index);
6227 jumpif (exit_cond, loop_end, -1);
6228
6229 /* Update the loop counter, and jump to the head of
6230 the loop. */
6231 expand_assignment (index,
6232 build2 (PLUS_EXPR, TREE_TYPE (index),
6233 index, integer_one_node),
6234 false);
6235
6236 emit_jump (loop_start);
6237
6238 /* Build the end of the loop. */
6239 emit_label (loop_end);
6240 }
6241 }
6242 else if ((index != 0 && ! tree_fits_shwi_p (index))
6243 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6244 {
6245 tree position;
6246
6247 if (index == 0)
6248 index = ssize_int (1);
6249
6250 if (minelt)
6251 index = fold_convert (ssizetype,
6252 fold_build2 (MINUS_EXPR,
6253 TREE_TYPE (index),
6254 index,
6255 TYPE_MIN_VALUE (domain)));
6256
6257 position =
6258 size_binop (MULT_EXPR, index,
6259 fold_convert (ssizetype,
6260 TYPE_SIZE_UNIT (elttype)));
6261 xtarget = offset_address (target,
6262 expand_normal (position),
6263 highest_pow2_factor (position));
6264 xtarget = adjust_address (xtarget, mode, 0);
6265 store_expr (value, xtarget, 0, false);
6266 }
6267 else
6268 {
6269 if (index != 0)
6270 bitpos = ((tree_to_shwi (index) - minelt)
6271 * tree_to_uhwi (TYPE_SIZE (elttype)));
6272 else
6273 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6274
6275 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6276 && TREE_CODE (type) == ARRAY_TYPE
6277 && TYPE_NONALIASED_COMPONENT (type))
6278 {
6279 target = copy_rtx (target);
6280 MEM_KEEP_ALIAS_SET_P (target) = 1;
6281 }
6282 store_constructor_field (target, bitsize, bitpos, mode, value,
6283 cleared, get_alias_set (elttype));
6284 }
6285 }
6286 break;
6287 }
6288
6289 case VECTOR_TYPE:
6290 {
6291 unsigned HOST_WIDE_INT idx;
6292 constructor_elt *ce;
6293 int i;
6294 int need_to_clear;
6295 int icode = CODE_FOR_nothing;
6296 tree elttype = TREE_TYPE (type);
6297 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6298 enum machine_mode eltmode = TYPE_MODE (elttype);
6299 HOST_WIDE_INT bitsize;
6300 HOST_WIDE_INT bitpos;
6301 rtvec vector = NULL;
6302 unsigned n_elts;
6303 alias_set_type alias;
6304
6305 gcc_assert (eltmode != BLKmode);
6306
6307 n_elts = TYPE_VECTOR_SUBPARTS (type);
6308 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6309 {
6310 enum machine_mode mode = GET_MODE (target);
6311
6312 icode = (int) optab_handler (vec_init_optab, mode);
6313 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6314 if (icode != CODE_FOR_nothing)
6315 {
6316 tree value;
6317
6318 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6319 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
6320 {
6321 icode = CODE_FOR_nothing;
6322 break;
6323 }
6324 }
6325 if (icode != CODE_FOR_nothing)
6326 {
6327 unsigned int i;
6328
6329 vector = rtvec_alloc (n_elts);
6330 for (i = 0; i < n_elts; i++)
6331 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6332 }
6333 }
6334
6335 /* If the constructor has fewer elements than the vector,
6336 clear the whole array first. Similarly if this is static
6337 constructor of a non-BLKmode object. */
6338 if (cleared)
6339 need_to_clear = 0;
6340 else if (REG_P (target) && TREE_STATIC (exp))
6341 need_to_clear = 1;
6342 else
6343 {
6344 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6345 tree value;
6346
6347 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6348 {
6349 int n_elts_here = tree_to_uhwi
6350 (int_const_binop (TRUNC_DIV_EXPR,
6351 TYPE_SIZE (TREE_TYPE (value)),
6352 TYPE_SIZE (elttype)));
6353
6354 count += n_elts_here;
6355 if (mostly_zeros_p (value))
6356 zero_count += n_elts_here;
6357 }
6358
6359 /* Clear the entire vector first if there are any missing elements,
6360 or if the incidence of zero elements is >= 75%. */
6361 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6362 }
6363
6364 if (need_to_clear && size > 0 && !vector)
6365 {
6366 if (REG_P (target))
6367 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6368 else
6369 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6370 cleared = 1;
6371 }
6372
6373 /* Inform later passes that the old value is dead. */
6374 if (!cleared && !vector && REG_P (target))
6375 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6376
6377 if (MEM_P (target))
6378 alias = MEM_ALIAS_SET (target);
6379 else
6380 alias = get_alias_set (elttype);
6381
6382 /* Store each element of the constructor into the corresponding
6383 element of TARGET, determined by counting the elements. */
6384 for (idx = 0, i = 0;
6385 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6386 idx++, i += bitsize / elt_size)
6387 {
6388 HOST_WIDE_INT eltpos;
6389 tree value = ce->value;
6390
6391 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6392 if (cleared && initializer_zerop (value))
6393 continue;
6394
6395 if (ce->index)
6396 eltpos = tree_to_uhwi (ce->index);
6397 else
6398 eltpos = i;
6399
6400 if (vector)
6401 {
6402 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6403 elements. */
6404 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6405 RTVEC_ELT (vector, eltpos)
6406 = expand_normal (value);
6407 }
6408 else
6409 {
6410 enum machine_mode value_mode =
6411 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6412 ? TYPE_MODE (TREE_TYPE (value))
6413 : eltmode;
6414 bitpos = eltpos * elt_size;
6415 store_constructor_field (target, bitsize, bitpos, value_mode,
6416 value, cleared, alias);
6417 }
6418 }
6419
6420 if (vector)
6421 emit_insn (GEN_FCN (icode)
6422 (target,
6423 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6424 break;
6425 }
6426
6427 default:
6428 gcc_unreachable ();
6429 }
6430 }
6431
6432 /* Store the value of EXP (an expression tree)
6433 into a subfield of TARGET which has mode MODE and occupies
6434 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6435 If MODE is VOIDmode, it means that we are storing into a bit-field.
6436
6437 BITREGION_START is bitpos of the first bitfield in this region.
6438 BITREGION_END is the bitpos of the ending bitfield in this region.
6439 These two fields are 0, if the C++ memory model does not apply,
6440 or we are not interested in keeping track of bitfield regions.
6441
6442 Always return const0_rtx unless we have something particular to
6443 return.
6444
6445 ALIAS_SET is the alias set for the destination. This value will
6446 (in general) be different from that for TARGET, since TARGET is a
6447 reference to the containing structure.
6448
6449 If NONTEMPORAL is true, try generating a nontemporal store. */
6450
6451 static rtx
6452 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6453 unsigned HOST_WIDE_INT bitregion_start,
6454 unsigned HOST_WIDE_INT bitregion_end,
6455 enum machine_mode mode, tree exp,
6456 alias_set_type alias_set, bool nontemporal)
6457 {
6458 if (TREE_CODE (exp) == ERROR_MARK)
6459 return const0_rtx;
6460
6461 /* If we have nothing to store, do nothing unless the expression has
6462 side-effects. */
6463 if (bitsize == 0)
6464 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6465
6466 if (GET_CODE (target) == CONCAT)
6467 {
6468 /* We're storing into a struct containing a single __complex. */
6469
6470 gcc_assert (!bitpos);
6471 return store_expr (exp, target, 0, nontemporal);
6472 }
6473
6474 /* If the structure is in a register or if the component
6475 is a bit field, we cannot use addressing to access it.
6476 Use bit-field techniques or SUBREG to store in it. */
6477
6478 if (mode == VOIDmode
6479 || (mode != BLKmode && ! direct_store[(int) mode]
6480 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6481 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6482 || REG_P (target)
6483 || GET_CODE (target) == SUBREG
6484 /* If the field isn't aligned enough to store as an ordinary memref,
6485 store it as a bit field. */
6486 || (mode != BLKmode
6487 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6488 || bitpos % GET_MODE_ALIGNMENT (mode))
6489 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6490 || (bitpos % BITS_PER_UNIT != 0)))
6491 || (bitsize >= 0 && mode != BLKmode
6492 && GET_MODE_BITSIZE (mode) > bitsize)
6493 /* If the RHS and field are a constant size and the size of the
6494 RHS isn't the same size as the bitfield, we must use bitfield
6495 operations. */
6496 || (bitsize >= 0
6497 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6498 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6499 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6500 decl we must use bitfield operations. */
6501 || (bitsize >= 0
6502 && TREE_CODE (exp) == MEM_REF
6503 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6504 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6505 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6506 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6507 {
6508 rtx temp;
6509 gimple nop_def;
6510
6511 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6512 implies a mask operation. If the precision is the same size as
6513 the field we're storing into, that mask is redundant. This is
6514 particularly common with bit field assignments generated by the
6515 C front end. */
6516 nop_def = get_def_for_expr (exp, NOP_EXPR);
6517 if (nop_def)
6518 {
6519 tree type = TREE_TYPE (exp);
6520 if (INTEGRAL_TYPE_P (type)
6521 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6522 && bitsize == TYPE_PRECISION (type))
6523 {
6524 tree op = gimple_assign_rhs1 (nop_def);
6525 type = TREE_TYPE (op);
6526 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6527 exp = op;
6528 }
6529 }
6530
6531 temp = expand_normal (exp);
6532
6533 /* If BITSIZE is narrower than the size of the type of EXP
6534 we will be narrowing TEMP. Normally, what's wanted are the
6535 low-order bits. However, if EXP's type is a record and this is
6536 big-endian machine, we want the upper BITSIZE bits. */
6537 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6538 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6539 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6540 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6541 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6542 NULL_RTX, 1);
6543
6544 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6545 if (mode != VOIDmode && mode != BLKmode
6546 && mode != TYPE_MODE (TREE_TYPE (exp)))
6547 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6548
6549 /* If the modes of TEMP and TARGET are both BLKmode, both
6550 must be in memory and BITPOS must be aligned on a byte
6551 boundary. If so, we simply do a block copy. Likewise
6552 for a BLKmode-like TARGET. */
6553 if (GET_MODE (temp) == BLKmode
6554 && (GET_MODE (target) == BLKmode
6555 || (MEM_P (target)
6556 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6557 && (bitpos % BITS_PER_UNIT) == 0
6558 && (bitsize % BITS_PER_UNIT) == 0)))
6559 {
6560 gcc_assert (MEM_P (target) && MEM_P (temp)
6561 && (bitpos % BITS_PER_UNIT) == 0);
6562
6563 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6564 emit_block_move (target, temp,
6565 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6566 / BITS_PER_UNIT),
6567 BLOCK_OP_NORMAL);
6568
6569 return const0_rtx;
6570 }
6571
6572 /* Handle calls that return values in multiple non-contiguous locations.
6573 The Irix 6 ABI has examples of this. */
6574 if (GET_CODE (temp) == PARALLEL)
6575 {
6576 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6577 rtx temp_target;
6578 if (mode == BLKmode)
6579 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6580 temp_target = gen_reg_rtx (mode);
6581 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6582 temp = temp_target;
6583 }
6584 else if (mode == BLKmode)
6585 {
6586 /* Handle calls that return BLKmode values in registers. */
6587 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6588 {
6589 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6590 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6591 temp = temp_target;
6592 }
6593 else
6594 {
6595 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6596 rtx temp_target;
6597 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6598 temp_target = gen_reg_rtx (mode);
6599 temp_target
6600 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6601 temp_target, mode, mode);
6602 temp = temp_target;
6603 }
6604 }
6605
6606 /* Store the value in the bitfield. */
6607 store_bit_field (target, bitsize, bitpos,
6608 bitregion_start, bitregion_end,
6609 mode, temp);
6610
6611 return const0_rtx;
6612 }
6613 else
6614 {
6615 /* Now build a reference to just the desired component. */
6616 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6617
6618 if (to_rtx == target)
6619 to_rtx = copy_rtx (to_rtx);
6620
6621 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6622 set_mem_alias_set (to_rtx, alias_set);
6623
6624 return store_expr (exp, to_rtx, 0, nontemporal);
6625 }
6626 }
6627 \f
6628 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6629 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6630 codes and find the ultimate containing object, which we return.
6631
6632 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6633 bit position, and *PUNSIGNEDP to the signedness of the field.
6634 If the position of the field is variable, we store a tree
6635 giving the variable offset (in units) in *POFFSET.
6636 This offset is in addition to the bit position.
6637 If the position is not variable, we store 0 in *POFFSET.
6638
6639 If any of the extraction expressions is volatile,
6640 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6641
6642 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6643 Otherwise, it is a mode that can be used to access the field.
6644
6645 If the field describes a variable-sized object, *PMODE is set to
6646 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6647 this case, but the address of the object can be found. */
6648
6649 tree
6650 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6651 HOST_WIDE_INT *pbitpos, tree *poffset,
6652 enum machine_mode *pmode, int *punsignedp,
6653 int *pvolatilep)
6654 {
6655 tree size_tree = 0;
6656 enum machine_mode mode = VOIDmode;
6657 bool blkmode_bitfield = false;
6658 tree offset = size_zero_node;
6659 double_int bit_offset = double_int_zero;
6660
6661 /* First get the mode, signedness, and size. We do this from just the
6662 outermost expression. */
6663 *pbitsize = -1;
6664 if (TREE_CODE (exp) == COMPONENT_REF)
6665 {
6666 tree field = TREE_OPERAND (exp, 1);
6667 size_tree = DECL_SIZE (field);
6668 if (flag_strict_volatile_bitfields > 0
6669 && TREE_THIS_VOLATILE (exp)
6670 && DECL_BIT_FIELD_TYPE (field)
6671 && DECL_MODE (field) != BLKmode)
6672 /* Volatile bitfields should be accessed in the mode of the
6673 field's type, not the mode computed based on the bit
6674 size. */
6675 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6676 else if (!DECL_BIT_FIELD (field))
6677 mode = DECL_MODE (field);
6678 else if (DECL_MODE (field) == BLKmode)
6679 blkmode_bitfield = true;
6680
6681 *punsignedp = DECL_UNSIGNED (field);
6682 }
6683 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6684 {
6685 size_tree = TREE_OPERAND (exp, 1);
6686 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6687 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6688
6689 /* For vector types, with the correct size of access, use the mode of
6690 inner type. */
6691 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6692 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6693 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6694 mode = TYPE_MODE (TREE_TYPE (exp));
6695 }
6696 else
6697 {
6698 mode = TYPE_MODE (TREE_TYPE (exp));
6699 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6700
6701 if (mode == BLKmode)
6702 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6703 else
6704 *pbitsize = GET_MODE_BITSIZE (mode);
6705 }
6706
6707 if (size_tree != 0)
6708 {
6709 if (! tree_fits_uhwi_p (size_tree))
6710 mode = BLKmode, *pbitsize = -1;
6711 else
6712 *pbitsize = tree_to_uhwi (size_tree);
6713 }
6714
6715 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6716 and find the ultimate containing object. */
6717 while (1)
6718 {
6719 switch (TREE_CODE (exp))
6720 {
6721 case BIT_FIELD_REF:
6722 bit_offset += tree_to_double_int (TREE_OPERAND (exp, 2));
6723 break;
6724
6725 case COMPONENT_REF:
6726 {
6727 tree field = TREE_OPERAND (exp, 1);
6728 tree this_offset = component_ref_field_offset (exp);
6729
6730 /* If this field hasn't been filled in yet, don't go past it.
6731 This should only happen when folding expressions made during
6732 type construction. */
6733 if (this_offset == 0)
6734 break;
6735
6736 offset = size_binop (PLUS_EXPR, offset, this_offset);
6737 bit_offset += tree_to_double_int (DECL_FIELD_BIT_OFFSET (field));
6738
6739 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6740 }
6741 break;
6742
6743 case ARRAY_REF:
6744 case ARRAY_RANGE_REF:
6745 {
6746 tree index = TREE_OPERAND (exp, 1);
6747 tree low_bound = array_ref_low_bound (exp);
6748 tree unit_size = array_ref_element_size (exp);
6749
6750 /* We assume all arrays have sizes that are a multiple of a byte.
6751 First subtract the lower bound, if any, in the type of the
6752 index, then convert to sizetype and multiply by the size of
6753 the array element. */
6754 if (! integer_zerop (low_bound))
6755 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6756 index, low_bound);
6757
6758 offset = size_binop (PLUS_EXPR, offset,
6759 size_binop (MULT_EXPR,
6760 fold_convert (sizetype, index),
6761 unit_size));
6762 }
6763 break;
6764
6765 case REALPART_EXPR:
6766 break;
6767
6768 case IMAGPART_EXPR:
6769 bit_offset += double_int::from_uhwi (*pbitsize);
6770 break;
6771
6772 case VIEW_CONVERT_EXPR:
6773 break;
6774
6775 case MEM_REF:
6776 /* Hand back the decl for MEM[&decl, off]. */
6777 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6778 {
6779 tree off = TREE_OPERAND (exp, 1);
6780 if (!integer_zerop (off))
6781 {
6782 double_int boff, coff = mem_ref_offset (exp);
6783 boff = coff.lshift (BITS_PER_UNIT == 8
6784 ? 3 : exact_log2 (BITS_PER_UNIT));
6785 bit_offset += boff;
6786 }
6787 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6788 }
6789 goto done;
6790
6791 default:
6792 goto done;
6793 }
6794
6795 /* If any reference in the chain is volatile, the effect is volatile. */
6796 if (TREE_THIS_VOLATILE (exp))
6797 *pvolatilep = 1;
6798
6799 exp = TREE_OPERAND (exp, 0);
6800 }
6801 done:
6802
6803 /* If OFFSET is constant, see if we can return the whole thing as a
6804 constant bit position. Make sure to handle overflow during
6805 this conversion. */
6806 if (TREE_CODE (offset) == INTEGER_CST)
6807 {
6808 double_int tem = tree_to_double_int (offset);
6809 tem = tem.sext (TYPE_PRECISION (sizetype));
6810 tem = tem.lshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT));
6811 tem += bit_offset;
6812 if (tem.fits_shwi ())
6813 {
6814 *pbitpos = tem.to_shwi ();
6815 *poffset = offset = NULL_TREE;
6816 }
6817 }
6818
6819 /* Otherwise, split it up. */
6820 if (offset)
6821 {
6822 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6823 if (bit_offset.is_negative ())
6824 {
6825 double_int mask
6826 = double_int::mask (BITS_PER_UNIT == 8
6827 ? 3 : exact_log2 (BITS_PER_UNIT));
6828 double_int tem = bit_offset.and_not (mask);
6829 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6830 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6831 bit_offset -= tem;
6832 tem = tem.arshift (BITS_PER_UNIT == 8
6833 ? 3 : exact_log2 (BITS_PER_UNIT),
6834 HOST_BITS_PER_DOUBLE_INT);
6835 offset = size_binop (PLUS_EXPR, offset,
6836 double_int_to_tree (sizetype, tem));
6837 }
6838
6839 *pbitpos = bit_offset.to_shwi ();
6840 *poffset = offset;
6841 }
6842
6843 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6844 if (mode == VOIDmode
6845 && blkmode_bitfield
6846 && (*pbitpos % BITS_PER_UNIT) == 0
6847 && (*pbitsize % BITS_PER_UNIT) == 0)
6848 *pmode = BLKmode;
6849 else
6850 *pmode = mode;
6851
6852 return exp;
6853 }
6854
6855 /* Return a tree of sizetype representing the size, in bytes, of the element
6856 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6857
6858 tree
6859 array_ref_element_size (tree exp)
6860 {
6861 tree aligned_size = TREE_OPERAND (exp, 3);
6862 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6863 location_t loc = EXPR_LOCATION (exp);
6864
6865 /* If a size was specified in the ARRAY_REF, it's the size measured
6866 in alignment units of the element type. So multiply by that value. */
6867 if (aligned_size)
6868 {
6869 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6870 sizetype from another type of the same width and signedness. */
6871 if (TREE_TYPE (aligned_size) != sizetype)
6872 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6873 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6874 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6875 }
6876
6877 /* Otherwise, take the size from that of the element type. Substitute
6878 any PLACEHOLDER_EXPR that we have. */
6879 else
6880 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6881 }
6882
6883 /* Return a tree representing the lower bound of the array mentioned in
6884 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6885
6886 tree
6887 array_ref_low_bound (tree exp)
6888 {
6889 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6890
6891 /* If a lower bound is specified in EXP, use it. */
6892 if (TREE_OPERAND (exp, 2))
6893 return TREE_OPERAND (exp, 2);
6894
6895 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6896 substituting for a PLACEHOLDER_EXPR as needed. */
6897 if (domain_type && TYPE_MIN_VALUE (domain_type))
6898 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6899
6900 /* Otherwise, return a zero of the appropriate type. */
6901 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6902 }
6903
6904 /* Returns true if REF is an array reference to an array at the end of
6905 a structure. If this is the case, the array may be allocated larger
6906 than its upper bound implies. */
6907
6908 bool
6909 array_at_struct_end_p (tree ref)
6910 {
6911 if (TREE_CODE (ref) != ARRAY_REF
6912 && TREE_CODE (ref) != ARRAY_RANGE_REF)
6913 return false;
6914
6915 while (handled_component_p (ref))
6916 {
6917 /* If the reference chain contains a component reference to a
6918 non-union type and there follows another field the reference
6919 is not at the end of a structure. */
6920 if (TREE_CODE (ref) == COMPONENT_REF
6921 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
6922 {
6923 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
6924 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
6925 nextf = DECL_CHAIN (nextf);
6926 if (nextf)
6927 return false;
6928 }
6929
6930 ref = TREE_OPERAND (ref, 0);
6931 }
6932
6933 /* If the reference is based on a declared entity, the size of the array
6934 is constrained by its given domain. */
6935 if (DECL_P (ref))
6936 return false;
6937
6938 return true;
6939 }
6940
6941 /* Return a tree representing the upper bound of the array mentioned in
6942 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6943
6944 tree
6945 array_ref_up_bound (tree exp)
6946 {
6947 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6948
6949 /* If there is a domain type and it has an upper bound, use it, substituting
6950 for a PLACEHOLDER_EXPR as needed. */
6951 if (domain_type && TYPE_MAX_VALUE (domain_type))
6952 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6953
6954 /* Otherwise fail. */
6955 return NULL_TREE;
6956 }
6957
6958 /* Return a tree representing the offset, in bytes, of the field referenced
6959 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6960
6961 tree
6962 component_ref_field_offset (tree exp)
6963 {
6964 tree aligned_offset = TREE_OPERAND (exp, 2);
6965 tree field = TREE_OPERAND (exp, 1);
6966 location_t loc = EXPR_LOCATION (exp);
6967
6968 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6969 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6970 value. */
6971 if (aligned_offset)
6972 {
6973 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6974 sizetype from another type of the same width and signedness. */
6975 if (TREE_TYPE (aligned_offset) != sizetype)
6976 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6977 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
6978 size_int (DECL_OFFSET_ALIGN (field)
6979 / BITS_PER_UNIT));
6980 }
6981
6982 /* Otherwise, take the offset from that of the field. Substitute
6983 any PLACEHOLDER_EXPR that we have. */
6984 else
6985 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6986 }
6987
6988 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6989
6990 static unsigned HOST_WIDE_INT
6991 target_align (const_tree target)
6992 {
6993 /* We might have a chain of nested references with intermediate misaligning
6994 bitfields components, so need to recurse to find out. */
6995
6996 unsigned HOST_WIDE_INT this_align, outer_align;
6997
6998 switch (TREE_CODE (target))
6999 {
7000 case BIT_FIELD_REF:
7001 return 1;
7002
7003 case COMPONENT_REF:
7004 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7005 outer_align = target_align (TREE_OPERAND (target, 0));
7006 return MIN (this_align, outer_align);
7007
7008 case ARRAY_REF:
7009 case ARRAY_RANGE_REF:
7010 this_align = TYPE_ALIGN (TREE_TYPE (target));
7011 outer_align = target_align (TREE_OPERAND (target, 0));
7012 return MIN (this_align, outer_align);
7013
7014 CASE_CONVERT:
7015 case NON_LVALUE_EXPR:
7016 case VIEW_CONVERT_EXPR:
7017 this_align = TYPE_ALIGN (TREE_TYPE (target));
7018 outer_align = target_align (TREE_OPERAND (target, 0));
7019 return MAX (this_align, outer_align);
7020
7021 default:
7022 return TYPE_ALIGN (TREE_TYPE (target));
7023 }
7024 }
7025
7026 \f
7027 /* Given an rtx VALUE that may contain additions and multiplications, return
7028 an equivalent value that just refers to a register, memory, or constant.
7029 This is done by generating instructions to perform the arithmetic and
7030 returning a pseudo-register containing the value.
7031
7032 The returned value may be a REG, SUBREG, MEM or constant. */
7033
7034 rtx
7035 force_operand (rtx value, rtx target)
7036 {
7037 rtx op1, op2;
7038 /* Use subtarget as the target for operand 0 of a binary operation. */
7039 rtx subtarget = get_subtarget (target);
7040 enum rtx_code code = GET_CODE (value);
7041
7042 /* Check for subreg applied to an expression produced by loop optimizer. */
7043 if (code == SUBREG
7044 && !REG_P (SUBREG_REG (value))
7045 && !MEM_P (SUBREG_REG (value)))
7046 {
7047 value
7048 = simplify_gen_subreg (GET_MODE (value),
7049 force_reg (GET_MODE (SUBREG_REG (value)),
7050 force_operand (SUBREG_REG (value),
7051 NULL_RTX)),
7052 GET_MODE (SUBREG_REG (value)),
7053 SUBREG_BYTE (value));
7054 code = GET_CODE (value);
7055 }
7056
7057 /* Check for a PIC address load. */
7058 if ((code == PLUS || code == MINUS)
7059 && XEXP (value, 0) == pic_offset_table_rtx
7060 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7061 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7062 || GET_CODE (XEXP (value, 1)) == CONST))
7063 {
7064 if (!subtarget)
7065 subtarget = gen_reg_rtx (GET_MODE (value));
7066 emit_move_insn (subtarget, value);
7067 return subtarget;
7068 }
7069
7070 if (ARITHMETIC_P (value))
7071 {
7072 op2 = XEXP (value, 1);
7073 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7074 subtarget = 0;
7075 if (code == MINUS && CONST_INT_P (op2))
7076 {
7077 code = PLUS;
7078 op2 = negate_rtx (GET_MODE (value), op2);
7079 }
7080
7081 /* Check for an addition with OP2 a constant integer and our first
7082 operand a PLUS of a virtual register and something else. In that
7083 case, we want to emit the sum of the virtual register and the
7084 constant first and then add the other value. This allows virtual
7085 register instantiation to simply modify the constant rather than
7086 creating another one around this addition. */
7087 if (code == PLUS && CONST_INT_P (op2)
7088 && GET_CODE (XEXP (value, 0)) == PLUS
7089 && REG_P (XEXP (XEXP (value, 0), 0))
7090 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7091 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7092 {
7093 rtx temp = expand_simple_binop (GET_MODE (value), code,
7094 XEXP (XEXP (value, 0), 0), op2,
7095 subtarget, 0, OPTAB_LIB_WIDEN);
7096 return expand_simple_binop (GET_MODE (value), code, temp,
7097 force_operand (XEXP (XEXP (value,
7098 0), 1), 0),
7099 target, 0, OPTAB_LIB_WIDEN);
7100 }
7101
7102 op1 = force_operand (XEXP (value, 0), subtarget);
7103 op2 = force_operand (op2, NULL_RTX);
7104 switch (code)
7105 {
7106 case MULT:
7107 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7108 case DIV:
7109 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7110 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7111 target, 1, OPTAB_LIB_WIDEN);
7112 else
7113 return expand_divmod (0,
7114 FLOAT_MODE_P (GET_MODE (value))
7115 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7116 GET_MODE (value), op1, op2, target, 0);
7117 case MOD:
7118 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7119 target, 0);
7120 case UDIV:
7121 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7122 target, 1);
7123 case UMOD:
7124 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7125 target, 1);
7126 case ASHIFTRT:
7127 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7128 target, 0, OPTAB_LIB_WIDEN);
7129 default:
7130 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7131 target, 1, OPTAB_LIB_WIDEN);
7132 }
7133 }
7134 if (UNARY_P (value))
7135 {
7136 if (!target)
7137 target = gen_reg_rtx (GET_MODE (value));
7138 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7139 switch (code)
7140 {
7141 case ZERO_EXTEND:
7142 case SIGN_EXTEND:
7143 case TRUNCATE:
7144 case FLOAT_EXTEND:
7145 case FLOAT_TRUNCATE:
7146 convert_move (target, op1, code == ZERO_EXTEND);
7147 return target;
7148
7149 case FIX:
7150 case UNSIGNED_FIX:
7151 expand_fix (target, op1, code == UNSIGNED_FIX);
7152 return target;
7153
7154 case FLOAT:
7155 case UNSIGNED_FLOAT:
7156 expand_float (target, op1, code == UNSIGNED_FLOAT);
7157 return target;
7158
7159 default:
7160 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7161 }
7162 }
7163
7164 #ifdef INSN_SCHEDULING
7165 /* On machines that have insn scheduling, we want all memory reference to be
7166 explicit, so we need to deal with such paradoxical SUBREGs. */
7167 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7168 value
7169 = simplify_gen_subreg (GET_MODE (value),
7170 force_reg (GET_MODE (SUBREG_REG (value)),
7171 force_operand (SUBREG_REG (value),
7172 NULL_RTX)),
7173 GET_MODE (SUBREG_REG (value)),
7174 SUBREG_BYTE (value));
7175 #endif
7176
7177 return value;
7178 }
7179 \f
7180 /* Subroutine of expand_expr: return nonzero iff there is no way that
7181 EXP can reference X, which is being modified. TOP_P is nonzero if this
7182 call is going to be used to determine whether we need a temporary
7183 for EXP, as opposed to a recursive call to this function.
7184
7185 It is always safe for this routine to return zero since it merely
7186 searches for optimization opportunities. */
7187
7188 int
7189 safe_from_p (const_rtx x, tree exp, int top_p)
7190 {
7191 rtx exp_rtl = 0;
7192 int i, nops;
7193
7194 if (x == 0
7195 /* If EXP has varying size, we MUST use a target since we currently
7196 have no way of allocating temporaries of variable size
7197 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7198 So we assume here that something at a higher level has prevented a
7199 clash. This is somewhat bogus, but the best we can do. Only
7200 do this when X is BLKmode and when we are at the top level. */
7201 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7202 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7203 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7204 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7205 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7206 != INTEGER_CST)
7207 && GET_MODE (x) == BLKmode)
7208 /* If X is in the outgoing argument area, it is always safe. */
7209 || (MEM_P (x)
7210 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7211 || (GET_CODE (XEXP (x, 0)) == PLUS
7212 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7213 return 1;
7214
7215 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7216 find the underlying pseudo. */
7217 if (GET_CODE (x) == SUBREG)
7218 {
7219 x = SUBREG_REG (x);
7220 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7221 return 0;
7222 }
7223
7224 /* Now look at our tree code and possibly recurse. */
7225 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7226 {
7227 case tcc_declaration:
7228 exp_rtl = DECL_RTL_IF_SET (exp);
7229 break;
7230
7231 case tcc_constant:
7232 return 1;
7233
7234 case tcc_exceptional:
7235 if (TREE_CODE (exp) == TREE_LIST)
7236 {
7237 while (1)
7238 {
7239 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7240 return 0;
7241 exp = TREE_CHAIN (exp);
7242 if (!exp)
7243 return 1;
7244 if (TREE_CODE (exp) != TREE_LIST)
7245 return safe_from_p (x, exp, 0);
7246 }
7247 }
7248 else if (TREE_CODE (exp) == CONSTRUCTOR)
7249 {
7250 constructor_elt *ce;
7251 unsigned HOST_WIDE_INT idx;
7252
7253 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7254 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7255 || !safe_from_p (x, ce->value, 0))
7256 return 0;
7257 return 1;
7258 }
7259 else if (TREE_CODE (exp) == ERROR_MARK)
7260 return 1; /* An already-visited SAVE_EXPR? */
7261 else
7262 return 0;
7263
7264 case tcc_statement:
7265 /* The only case we look at here is the DECL_INITIAL inside a
7266 DECL_EXPR. */
7267 return (TREE_CODE (exp) != DECL_EXPR
7268 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7269 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7270 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7271
7272 case tcc_binary:
7273 case tcc_comparison:
7274 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7275 return 0;
7276 /* Fall through. */
7277
7278 case tcc_unary:
7279 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7280
7281 case tcc_expression:
7282 case tcc_reference:
7283 case tcc_vl_exp:
7284 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7285 the expression. If it is set, we conflict iff we are that rtx or
7286 both are in memory. Otherwise, we check all operands of the
7287 expression recursively. */
7288
7289 switch (TREE_CODE (exp))
7290 {
7291 case ADDR_EXPR:
7292 /* If the operand is static or we are static, we can't conflict.
7293 Likewise if we don't conflict with the operand at all. */
7294 if (staticp (TREE_OPERAND (exp, 0))
7295 || TREE_STATIC (exp)
7296 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7297 return 1;
7298
7299 /* Otherwise, the only way this can conflict is if we are taking
7300 the address of a DECL a that address if part of X, which is
7301 very rare. */
7302 exp = TREE_OPERAND (exp, 0);
7303 if (DECL_P (exp))
7304 {
7305 if (!DECL_RTL_SET_P (exp)
7306 || !MEM_P (DECL_RTL (exp)))
7307 return 0;
7308 else
7309 exp_rtl = XEXP (DECL_RTL (exp), 0);
7310 }
7311 break;
7312
7313 case MEM_REF:
7314 if (MEM_P (x)
7315 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7316 get_alias_set (exp)))
7317 return 0;
7318 break;
7319
7320 case CALL_EXPR:
7321 /* Assume that the call will clobber all hard registers and
7322 all of memory. */
7323 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7324 || MEM_P (x))
7325 return 0;
7326 break;
7327
7328 case WITH_CLEANUP_EXPR:
7329 case CLEANUP_POINT_EXPR:
7330 /* Lowered by gimplify.c. */
7331 gcc_unreachable ();
7332
7333 case SAVE_EXPR:
7334 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7335
7336 default:
7337 break;
7338 }
7339
7340 /* If we have an rtx, we do not need to scan our operands. */
7341 if (exp_rtl)
7342 break;
7343
7344 nops = TREE_OPERAND_LENGTH (exp);
7345 for (i = 0; i < nops; i++)
7346 if (TREE_OPERAND (exp, i) != 0
7347 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7348 return 0;
7349
7350 break;
7351
7352 case tcc_type:
7353 /* Should never get a type here. */
7354 gcc_unreachable ();
7355 }
7356
7357 /* If we have an rtl, find any enclosed object. Then see if we conflict
7358 with it. */
7359 if (exp_rtl)
7360 {
7361 if (GET_CODE (exp_rtl) == SUBREG)
7362 {
7363 exp_rtl = SUBREG_REG (exp_rtl);
7364 if (REG_P (exp_rtl)
7365 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7366 return 0;
7367 }
7368
7369 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7370 are memory and they conflict. */
7371 return ! (rtx_equal_p (x, exp_rtl)
7372 || (MEM_P (x) && MEM_P (exp_rtl)
7373 && true_dependence (exp_rtl, VOIDmode, x)));
7374 }
7375
7376 /* If we reach here, it is safe. */
7377 return 1;
7378 }
7379
7380 \f
7381 /* Return the highest power of two that EXP is known to be a multiple of.
7382 This is used in updating alignment of MEMs in array references. */
7383
7384 unsigned HOST_WIDE_INT
7385 highest_pow2_factor (const_tree exp)
7386 {
7387 unsigned HOST_WIDE_INT ret;
7388 int trailing_zeros = tree_ctz (exp);
7389 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7390 return BIGGEST_ALIGNMENT;
7391 ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7392 if (ret > BIGGEST_ALIGNMENT)
7393 return BIGGEST_ALIGNMENT;
7394 return ret;
7395 }
7396
7397 /* Similar, except that the alignment requirements of TARGET are
7398 taken into account. Assume it is at least as aligned as its
7399 type, unless it is a COMPONENT_REF in which case the layout of
7400 the structure gives the alignment. */
7401
7402 static unsigned HOST_WIDE_INT
7403 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7404 {
7405 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7406 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7407
7408 return MAX (factor, talign);
7409 }
7410 \f
7411 #ifdef HAVE_conditional_move
7412 /* Convert the tree comparison code TCODE to the rtl one where the
7413 signedness is UNSIGNEDP. */
7414
7415 static enum rtx_code
7416 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7417 {
7418 enum rtx_code code;
7419 switch (tcode)
7420 {
7421 case EQ_EXPR:
7422 code = EQ;
7423 break;
7424 case NE_EXPR:
7425 code = NE;
7426 break;
7427 case LT_EXPR:
7428 code = unsignedp ? LTU : LT;
7429 break;
7430 case LE_EXPR:
7431 code = unsignedp ? LEU : LE;
7432 break;
7433 case GT_EXPR:
7434 code = unsignedp ? GTU : GT;
7435 break;
7436 case GE_EXPR:
7437 code = unsignedp ? GEU : GE;
7438 break;
7439 case UNORDERED_EXPR:
7440 code = UNORDERED;
7441 break;
7442 case ORDERED_EXPR:
7443 code = ORDERED;
7444 break;
7445 case UNLT_EXPR:
7446 code = UNLT;
7447 break;
7448 case UNLE_EXPR:
7449 code = UNLE;
7450 break;
7451 case UNGT_EXPR:
7452 code = UNGT;
7453 break;
7454 case UNGE_EXPR:
7455 code = UNGE;
7456 break;
7457 case UNEQ_EXPR:
7458 code = UNEQ;
7459 break;
7460 case LTGT_EXPR:
7461 code = LTGT;
7462 break;
7463
7464 default:
7465 gcc_unreachable ();
7466 }
7467 return code;
7468 }
7469 #endif
7470
7471 /* Subroutine of expand_expr. Expand the two operands of a binary
7472 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7473 The value may be stored in TARGET if TARGET is nonzero. The
7474 MODIFIER argument is as documented by expand_expr. */
7475
7476 static void
7477 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7478 enum expand_modifier modifier)
7479 {
7480 if (! safe_from_p (target, exp1, 1))
7481 target = 0;
7482 if (operand_equal_p (exp0, exp1, 0))
7483 {
7484 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7485 *op1 = copy_rtx (*op0);
7486 }
7487 else
7488 {
7489 /* If we need to preserve evaluation order, copy exp0 into its own
7490 temporary variable so that it can't be clobbered by exp1. */
7491 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7492 exp0 = save_expr (exp0);
7493 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7494 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7495 }
7496 }
7497
7498 \f
7499 /* Return a MEM that contains constant EXP. DEFER is as for
7500 output_constant_def and MODIFIER is as for expand_expr. */
7501
7502 static rtx
7503 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7504 {
7505 rtx mem;
7506
7507 mem = output_constant_def (exp, defer);
7508 if (modifier != EXPAND_INITIALIZER)
7509 mem = use_anchored_address (mem);
7510 return mem;
7511 }
7512
7513 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7514 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7515
7516 static rtx
7517 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
7518 enum expand_modifier modifier, addr_space_t as)
7519 {
7520 rtx result, subtarget;
7521 tree inner, offset;
7522 HOST_WIDE_INT bitsize, bitpos;
7523 int volatilep, unsignedp;
7524 enum machine_mode mode1;
7525
7526 /* If we are taking the address of a constant and are at the top level,
7527 we have to use output_constant_def since we can't call force_const_mem
7528 at top level. */
7529 /* ??? This should be considered a front-end bug. We should not be
7530 generating ADDR_EXPR of something that isn't an LVALUE. The only
7531 exception here is STRING_CST. */
7532 if (CONSTANT_CLASS_P (exp))
7533 {
7534 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7535 if (modifier < EXPAND_SUM)
7536 result = force_operand (result, target);
7537 return result;
7538 }
7539
7540 /* Everything must be something allowed by is_gimple_addressable. */
7541 switch (TREE_CODE (exp))
7542 {
7543 case INDIRECT_REF:
7544 /* This case will happen via recursion for &a->b. */
7545 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7546
7547 case MEM_REF:
7548 {
7549 tree tem = TREE_OPERAND (exp, 0);
7550 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7551 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7552 return expand_expr (tem, target, tmode, modifier);
7553 }
7554
7555 case CONST_DECL:
7556 /* Expand the initializer like constants above. */
7557 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7558 0, modifier), 0);
7559 if (modifier < EXPAND_SUM)
7560 result = force_operand (result, target);
7561 return result;
7562
7563 case REALPART_EXPR:
7564 /* The real part of the complex number is always first, therefore
7565 the address is the same as the address of the parent object. */
7566 offset = 0;
7567 bitpos = 0;
7568 inner = TREE_OPERAND (exp, 0);
7569 break;
7570
7571 case IMAGPART_EXPR:
7572 /* The imaginary part of the complex number is always second.
7573 The expression is therefore always offset by the size of the
7574 scalar type. */
7575 offset = 0;
7576 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7577 inner = TREE_OPERAND (exp, 0);
7578 break;
7579
7580 case COMPOUND_LITERAL_EXPR:
7581 /* Allow COMPOUND_LITERAL_EXPR in initializers, if e.g.
7582 rtl_for_decl_init is called on DECL_INITIAL with
7583 COMPOUNT_LITERAL_EXPRs in it, they aren't gimplified. */
7584 if (modifier == EXPAND_INITIALIZER
7585 && COMPOUND_LITERAL_EXPR_DECL (exp))
7586 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7587 target, tmode, modifier, as);
7588 /* FALLTHRU */
7589 default:
7590 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7591 expand_expr, as that can have various side effects; LABEL_DECLs for
7592 example, may not have their DECL_RTL set yet. Expand the rtl of
7593 CONSTRUCTORs too, which should yield a memory reference for the
7594 constructor's contents. Assume language specific tree nodes can
7595 be expanded in some interesting way. */
7596 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7597 if (DECL_P (exp)
7598 || TREE_CODE (exp) == CONSTRUCTOR
7599 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7600 {
7601 result = expand_expr (exp, target, tmode,
7602 modifier == EXPAND_INITIALIZER
7603 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7604
7605 /* If the DECL isn't in memory, then the DECL wasn't properly
7606 marked TREE_ADDRESSABLE, which will be either a front-end
7607 or a tree optimizer bug. */
7608
7609 if (TREE_ADDRESSABLE (exp)
7610 && ! MEM_P (result)
7611 && ! targetm.calls.allocate_stack_slots_for_args ())
7612 {
7613 error ("local frame unavailable (naked function?)");
7614 return result;
7615 }
7616 else
7617 gcc_assert (MEM_P (result));
7618 result = XEXP (result, 0);
7619
7620 /* ??? Is this needed anymore? */
7621 if (DECL_P (exp))
7622 TREE_USED (exp) = 1;
7623
7624 if (modifier != EXPAND_INITIALIZER
7625 && modifier != EXPAND_CONST_ADDRESS
7626 && modifier != EXPAND_SUM)
7627 result = force_operand (result, target);
7628 return result;
7629 }
7630
7631 /* Pass FALSE as the last argument to get_inner_reference although
7632 we are expanding to RTL. The rationale is that we know how to
7633 handle "aligning nodes" here: we can just bypass them because
7634 they won't change the final object whose address will be returned
7635 (they actually exist only for that purpose). */
7636 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7637 &mode1, &unsignedp, &volatilep);
7638 break;
7639 }
7640
7641 /* We must have made progress. */
7642 gcc_assert (inner != exp);
7643
7644 subtarget = offset || bitpos ? NULL_RTX : target;
7645 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7646 inner alignment, force the inner to be sufficiently aligned. */
7647 if (CONSTANT_CLASS_P (inner)
7648 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7649 {
7650 inner = copy_node (inner);
7651 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7652 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7653 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7654 }
7655 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7656
7657 if (offset)
7658 {
7659 rtx tmp;
7660
7661 if (modifier != EXPAND_NORMAL)
7662 result = force_operand (result, NULL);
7663 tmp = expand_expr (offset, NULL_RTX, tmode,
7664 modifier == EXPAND_INITIALIZER
7665 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7666
7667 result = convert_memory_address_addr_space (tmode, result, as);
7668 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7669
7670 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7671 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7672 else
7673 {
7674 subtarget = bitpos ? NULL_RTX : target;
7675 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7676 1, OPTAB_LIB_WIDEN);
7677 }
7678 }
7679
7680 if (bitpos)
7681 {
7682 /* Someone beforehand should have rejected taking the address
7683 of such an object. */
7684 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7685
7686 result = convert_memory_address_addr_space (tmode, result, as);
7687 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7688 if (modifier < EXPAND_SUM)
7689 result = force_operand (result, target);
7690 }
7691
7692 return result;
7693 }
7694
7695 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7696 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7697
7698 static rtx
7699 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7700 enum expand_modifier modifier)
7701 {
7702 addr_space_t as = ADDR_SPACE_GENERIC;
7703 enum machine_mode address_mode = Pmode;
7704 enum machine_mode pointer_mode = ptr_mode;
7705 enum machine_mode rmode;
7706 rtx result;
7707
7708 /* Target mode of VOIDmode says "whatever's natural". */
7709 if (tmode == VOIDmode)
7710 tmode = TYPE_MODE (TREE_TYPE (exp));
7711
7712 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7713 {
7714 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7715 address_mode = targetm.addr_space.address_mode (as);
7716 pointer_mode = targetm.addr_space.pointer_mode (as);
7717 }
7718
7719 /* We can get called with some Weird Things if the user does silliness
7720 like "(short) &a". In that case, convert_memory_address won't do
7721 the right thing, so ignore the given target mode. */
7722 if (tmode != address_mode && tmode != pointer_mode)
7723 tmode = address_mode;
7724
7725 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7726 tmode, modifier, as);
7727
7728 /* Despite expand_expr claims concerning ignoring TMODE when not
7729 strictly convenient, stuff breaks if we don't honor it. Note
7730 that combined with the above, we only do this for pointer modes. */
7731 rmode = GET_MODE (result);
7732 if (rmode == VOIDmode)
7733 rmode = tmode;
7734 if (rmode != tmode)
7735 result = convert_memory_address_addr_space (tmode, result, as);
7736
7737 return result;
7738 }
7739
7740 /* Generate code for computing CONSTRUCTOR EXP.
7741 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7742 is TRUE, instead of creating a temporary variable in memory
7743 NULL is returned and the caller needs to handle it differently. */
7744
7745 static rtx
7746 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7747 bool avoid_temp_mem)
7748 {
7749 tree type = TREE_TYPE (exp);
7750 enum machine_mode mode = TYPE_MODE (type);
7751
7752 /* Try to avoid creating a temporary at all. This is possible
7753 if all of the initializer is zero.
7754 FIXME: try to handle all [0..255] initializers we can handle
7755 with memset. */
7756 if (TREE_STATIC (exp)
7757 && !TREE_ADDRESSABLE (exp)
7758 && target != 0 && mode == BLKmode
7759 && all_zeros_p (exp))
7760 {
7761 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7762 return target;
7763 }
7764
7765 /* All elts simple constants => refer to a constant in memory. But
7766 if this is a non-BLKmode mode, let it store a field at a time
7767 since that should make a CONST_INT or CONST_DOUBLE when we
7768 fold. Likewise, if we have a target we can use, it is best to
7769 store directly into the target unless the type is large enough
7770 that memcpy will be used. If we are making an initializer and
7771 all operands are constant, put it in memory as well.
7772
7773 FIXME: Avoid trying to fill vector constructors piece-meal.
7774 Output them with output_constant_def below unless we're sure
7775 they're zeros. This should go away when vector initializers
7776 are treated like VECTOR_CST instead of arrays. */
7777 if ((TREE_STATIC (exp)
7778 && ((mode == BLKmode
7779 && ! (target != 0 && safe_from_p (target, exp, 1)))
7780 || TREE_ADDRESSABLE (exp)
7781 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
7782 && (! MOVE_BY_PIECES_P
7783 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
7784 TYPE_ALIGN (type)))
7785 && ! mostly_zeros_p (exp))))
7786 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7787 && TREE_CONSTANT (exp)))
7788 {
7789 rtx constructor;
7790
7791 if (avoid_temp_mem)
7792 return NULL_RTX;
7793
7794 constructor = expand_expr_constant (exp, 1, modifier);
7795
7796 if (modifier != EXPAND_CONST_ADDRESS
7797 && modifier != EXPAND_INITIALIZER
7798 && modifier != EXPAND_SUM)
7799 constructor = validize_mem (constructor);
7800
7801 return constructor;
7802 }
7803
7804 /* Handle calls that pass values in multiple non-contiguous
7805 locations. The Irix 6 ABI has examples of this. */
7806 if (target == 0 || ! safe_from_p (target, exp, 1)
7807 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7808 {
7809 if (avoid_temp_mem)
7810 return NULL_RTX;
7811
7812 target
7813 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7814 | (TREE_READONLY (exp)
7815 * TYPE_QUAL_CONST))),
7816 TREE_ADDRESSABLE (exp), 1);
7817 }
7818
7819 store_constructor (exp, target, 0, int_expr_size (exp));
7820 return target;
7821 }
7822
7823
7824 /* expand_expr: generate code for computing expression EXP.
7825 An rtx for the computed value is returned. The value is never null.
7826 In the case of a void EXP, const0_rtx is returned.
7827
7828 The value may be stored in TARGET if TARGET is nonzero.
7829 TARGET is just a suggestion; callers must assume that
7830 the rtx returned may not be the same as TARGET.
7831
7832 If TARGET is CONST0_RTX, it means that the value will be ignored.
7833
7834 If TMODE is not VOIDmode, it suggests generating the
7835 result in mode TMODE. But this is done only when convenient.
7836 Otherwise, TMODE is ignored and the value generated in its natural mode.
7837 TMODE is just a suggestion; callers must assume that
7838 the rtx returned may not have mode TMODE.
7839
7840 Note that TARGET may have neither TMODE nor MODE. In that case, it
7841 probably will not be used.
7842
7843 If MODIFIER is EXPAND_SUM then when EXP is an addition
7844 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7845 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7846 products as above, or REG or MEM, or constant.
7847 Ordinarily in such cases we would output mul or add instructions
7848 and then return a pseudo reg containing the sum.
7849
7850 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7851 it also marks a label as absolutely required (it can't be dead).
7852 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7853 This is used for outputting expressions used in initializers.
7854
7855 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7856 with a constant address even if that address is not normally legitimate.
7857 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7858
7859 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7860 a call parameter. Such targets require special care as we haven't yet
7861 marked TARGET so that it's safe from being trashed by libcalls. We
7862 don't want to use TARGET for anything but the final result;
7863 Intermediate values must go elsewhere. Additionally, calls to
7864 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7865
7866 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7867 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7868 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7869 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7870 recursively. */
7871
7872 rtx
7873 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7874 enum expand_modifier modifier, rtx *alt_rtl)
7875 {
7876 rtx ret;
7877
7878 /* Handle ERROR_MARK before anybody tries to access its type. */
7879 if (TREE_CODE (exp) == ERROR_MARK
7880 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7881 {
7882 ret = CONST0_RTX (tmode);
7883 return ret ? ret : const0_rtx;
7884 }
7885
7886 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7887 return ret;
7888 }
7889
7890 /* Try to expand the conditional expression which is represented by
7891 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
7892 return the rtl reg which repsents the result. Otherwise return
7893 NULL_RTL. */
7894
7895 static rtx
7896 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7897 tree treeop1 ATTRIBUTE_UNUSED,
7898 tree treeop2 ATTRIBUTE_UNUSED)
7899 {
7900 #ifdef HAVE_conditional_move
7901 rtx insn;
7902 rtx op00, op01, op1, op2;
7903 enum rtx_code comparison_code;
7904 enum machine_mode comparison_mode;
7905 gimple srcstmt;
7906 rtx temp;
7907 tree type = TREE_TYPE (treeop1);
7908 int unsignedp = TYPE_UNSIGNED (type);
7909 enum machine_mode mode = TYPE_MODE (type);
7910 enum machine_mode orig_mode = mode;
7911
7912 /* If we cannot do a conditional move on the mode, try doing it
7913 with the promoted mode. */
7914 if (!can_conditionally_move_p (mode))
7915 {
7916 mode = promote_mode (type, mode, &unsignedp);
7917 if (!can_conditionally_move_p (mode))
7918 return NULL_RTX;
7919 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
7920 }
7921 else
7922 temp = assign_temp (type, 0, 1);
7923
7924 start_sequence ();
7925 expand_operands (treeop1, treeop2,
7926 temp, &op1, &op2, EXPAND_NORMAL);
7927
7928 if (TREE_CODE (treeop0) == SSA_NAME
7929 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
7930 {
7931 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
7932 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
7933 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
7934 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
7935 comparison_mode = TYPE_MODE (type);
7936 unsignedp = TYPE_UNSIGNED (type);
7937 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7938 }
7939 else if (TREE_CODE_CLASS (TREE_CODE (treeop0)) == tcc_comparison)
7940 {
7941 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
7942 enum tree_code cmpcode = TREE_CODE (treeop0);
7943 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
7944 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
7945 unsignedp = TYPE_UNSIGNED (type);
7946 comparison_mode = TYPE_MODE (type);
7947 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7948 }
7949 else
7950 {
7951 op00 = expand_normal (treeop0);
7952 op01 = const0_rtx;
7953 comparison_code = NE;
7954 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
7955 }
7956
7957 if (GET_MODE (op1) != mode)
7958 op1 = gen_lowpart (mode, op1);
7959
7960 if (GET_MODE (op2) != mode)
7961 op2 = gen_lowpart (mode, op2);
7962
7963 /* Try to emit the conditional move. */
7964 insn = emit_conditional_move (temp, comparison_code,
7965 op00, op01, comparison_mode,
7966 op1, op2, mode,
7967 unsignedp);
7968
7969 /* If we could do the conditional move, emit the sequence,
7970 and return. */
7971 if (insn)
7972 {
7973 rtx seq = get_insns ();
7974 end_sequence ();
7975 emit_insn (seq);
7976 return convert_modes (orig_mode, mode, temp, 0);
7977 }
7978
7979 /* Otherwise discard the sequence and fall back to code with
7980 branches. */
7981 end_sequence ();
7982 #endif
7983 return NULL_RTX;
7984 }
7985
7986 rtx
7987 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
7988 enum expand_modifier modifier)
7989 {
7990 rtx op0, op1, op2, temp;
7991 tree type;
7992 int unsignedp;
7993 enum machine_mode mode;
7994 enum tree_code code = ops->code;
7995 optab this_optab;
7996 rtx subtarget, original_target;
7997 int ignore;
7998 bool reduce_bit_field;
7999 location_t loc = ops->location;
8000 tree treeop0, treeop1, treeop2;
8001 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8002 ? reduce_to_bit_field_precision ((expr), \
8003 target, \
8004 type) \
8005 : (expr))
8006
8007 type = ops->type;
8008 mode = TYPE_MODE (type);
8009 unsignedp = TYPE_UNSIGNED (type);
8010
8011 treeop0 = ops->op0;
8012 treeop1 = ops->op1;
8013 treeop2 = ops->op2;
8014
8015 /* We should be called only on simple (binary or unary) expressions,
8016 exactly those that are valid in gimple expressions that aren't
8017 GIMPLE_SINGLE_RHS (or invalid). */
8018 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8019 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8020 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8021
8022 ignore = (target == const0_rtx
8023 || ((CONVERT_EXPR_CODE_P (code)
8024 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8025 && TREE_CODE (type) == VOID_TYPE));
8026
8027 /* We should be called only if we need the result. */
8028 gcc_assert (!ignore);
8029
8030 /* An operation in what may be a bit-field type needs the
8031 result to be reduced to the precision of the bit-field type,
8032 which is narrower than that of the type's mode. */
8033 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8034 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8035
8036 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8037 target = 0;
8038
8039 /* Use subtarget as the target for operand 0 of a binary operation. */
8040 subtarget = get_subtarget (target);
8041 original_target = target;
8042
8043 switch (code)
8044 {
8045 case NON_LVALUE_EXPR:
8046 case PAREN_EXPR:
8047 CASE_CONVERT:
8048 if (treeop0 == error_mark_node)
8049 return const0_rtx;
8050
8051 if (TREE_CODE (type) == UNION_TYPE)
8052 {
8053 tree valtype = TREE_TYPE (treeop0);
8054
8055 /* If both input and output are BLKmode, this conversion isn't doing
8056 anything except possibly changing memory attribute. */
8057 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8058 {
8059 rtx result = expand_expr (treeop0, target, tmode,
8060 modifier);
8061
8062 result = copy_rtx (result);
8063 set_mem_attributes (result, type, 0);
8064 return result;
8065 }
8066
8067 if (target == 0)
8068 {
8069 if (TYPE_MODE (type) != BLKmode)
8070 target = gen_reg_rtx (TYPE_MODE (type));
8071 else
8072 target = assign_temp (type, 1, 1);
8073 }
8074
8075 if (MEM_P (target))
8076 /* Store data into beginning of memory target. */
8077 store_expr (treeop0,
8078 adjust_address (target, TYPE_MODE (valtype), 0),
8079 modifier == EXPAND_STACK_PARM,
8080 false);
8081
8082 else
8083 {
8084 gcc_assert (REG_P (target));
8085
8086 /* Store this field into a union of the proper type. */
8087 store_field (target,
8088 MIN ((int_size_in_bytes (TREE_TYPE
8089 (treeop0))
8090 * BITS_PER_UNIT),
8091 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8092 0, 0, 0, TYPE_MODE (valtype), treeop0, 0, false);
8093 }
8094
8095 /* Return the entire union. */
8096 return target;
8097 }
8098
8099 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8100 {
8101 op0 = expand_expr (treeop0, target, VOIDmode,
8102 modifier);
8103
8104 /* If the signedness of the conversion differs and OP0 is
8105 a promoted SUBREG, clear that indication since we now
8106 have to do the proper extension. */
8107 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8108 && GET_CODE (op0) == SUBREG)
8109 SUBREG_PROMOTED_VAR_P (op0) = 0;
8110
8111 return REDUCE_BIT_FIELD (op0);
8112 }
8113
8114 op0 = expand_expr (treeop0, NULL_RTX, mode,
8115 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8116 if (GET_MODE (op0) == mode)
8117 ;
8118
8119 /* If OP0 is a constant, just convert it into the proper mode. */
8120 else if (CONSTANT_P (op0))
8121 {
8122 tree inner_type = TREE_TYPE (treeop0);
8123 enum machine_mode inner_mode = GET_MODE (op0);
8124
8125 if (inner_mode == VOIDmode)
8126 inner_mode = TYPE_MODE (inner_type);
8127
8128 if (modifier == EXPAND_INITIALIZER)
8129 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8130 subreg_lowpart_offset (mode,
8131 inner_mode));
8132 else
8133 op0= convert_modes (mode, inner_mode, op0,
8134 TYPE_UNSIGNED (inner_type));
8135 }
8136
8137 else if (modifier == EXPAND_INITIALIZER)
8138 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8139
8140 else if (target == 0)
8141 op0 = convert_to_mode (mode, op0,
8142 TYPE_UNSIGNED (TREE_TYPE
8143 (treeop0)));
8144 else
8145 {
8146 convert_move (target, op0,
8147 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8148 op0 = target;
8149 }
8150
8151 return REDUCE_BIT_FIELD (op0);
8152
8153 case ADDR_SPACE_CONVERT_EXPR:
8154 {
8155 tree treeop0_type = TREE_TYPE (treeop0);
8156 addr_space_t as_to;
8157 addr_space_t as_from;
8158
8159 gcc_assert (POINTER_TYPE_P (type));
8160 gcc_assert (POINTER_TYPE_P (treeop0_type));
8161
8162 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8163 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8164
8165 /* Conversions between pointers to the same address space should
8166 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8167 gcc_assert (as_to != as_from);
8168
8169 /* Ask target code to handle conversion between pointers
8170 to overlapping address spaces. */
8171 if (targetm.addr_space.subset_p (as_to, as_from)
8172 || targetm.addr_space.subset_p (as_from, as_to))
8173 {
8174 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8175 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8176 gcc_assert (op0);
8177 return op0;
8178 }
8179
8180 /* For disjoint address spaces, converting anything but
8181 a null pointer invokes undefined behaviour. We simply
8182 always return a null pointer here. */
8183 return CONST0_RTX (mode);
8184 }
8185
8186 case POINTER_PLUS_EXPR:
8187 /* Even though the sizetype mode and the pointer's mode can be different
8188 expand is able to handle this correctly and get the correct result out
8189 of the PLUS_EXPR code. */
8190 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8191 if sizetype precision is smaller than pointer precision. */
8192 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8193 treeop1 = fold_convert_loc (loc, type,
8194 fold_convert_loc (loc, ssizetype,
8195 treeop1));
8196 /* If sizetype precision is larger than pointer precision, truncate the
8197 offset to have matching modes. */
8198 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8199 treeop1 = fold_convert_loc (loc, type, treeop1);
8200
8201 case PLUS_EXPR:
8202 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8203 something else, make sure we add the register to the constant and
8204 then to the other thing. This case can occur during strength
8205 reduction and doing it this way will produce better code if the
8206 frame pointer or argument pointer is eliminated.
8207
8208 fold-const.c will ensure that the constant is always in the inner
8209 PLUS_EXPR, so the only case we need to do anything about is if
8210 sp, ap, or fp is our second argument, in which case we must swap
8211 the innermost first argument and our second argument. */
8212
8213 if (TREE_CODE (treeop0) == PLUS_EXPR
8214 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8215 && TREE_CODE (treeop1) == VAR_DECL
8216 && (DECL_RTL (treeop1) == frame_pointer_rtx
8217 || DECL_RTL (treeop1) == stack_pointer_rtx
8218 || DECL_RTL (treeop1) == arg_pointer_rtx))
8219 {
8220 gcc_unreachable ();
8221 }
8222
8223 /* If the result is to be ptr_mode and we are adding an integer to
8224 something, we might be forming a constant. So try to use
8225 plus_constant. If it produces a sum and we can't accept it,
8226 use force_operand. This allows P = &ARR[const] to generate
8227 efficient code on machines where a SYMBOL_REF is not a valid
8228 address.
8229
8230 If this is an EXPAND_SUM call, always return the sum. */
8231 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8232 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8233 {
8234 if (modifier == EXPAND_STACK_PARM)
8235 target = 0;
8236 if (TREE_CODE (treeop0) == INTEGER_CST
8237 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8238 && TREE_CONSTANT (treeop1))
8239 {
8240 rtx constant_part;
8241
8242 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8243 EXPAND_SUM);
8244 /* Use immed_double_const to ensure that the constant is
8245 truncated according to the mode of OP1, then sign extended
8246 to a HOST_WIDE_INT. Using the constant directly can result
8247 in non-canonical RTL in a 64x32 cross compile. */
8248 constant_part
8249 = immed_double_const (TREE_INT_CST_LOW (treeop0),
8250 (HOST_WIDE_INT) 0,
8251 TYPE_MODE (TREE_TYPE (treeop1)));
8252 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8253 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8254 op1 = force_operand (op1, target);
8255 return REDUCE_BIT_FIELD (op1);
8256 }
8257
8258 else if (TREE_CODE (treeop1) == INTEGER_CST
8259 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8260 && TREE_CONSTANT (treeop0))
8261 {
8262 rtx constant_part;
8263
8264 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8265 (modifier == EXPAND_INITIALIZER
8266 ? EXPAND_INITIALIZER : EXPAND_SUM));
8267 if (! CONSTANT_P (op0))
8268 {
8269 op1 = expand_expr (treeop1, NULL_RTX,
8270 VOIDmode, modifier);
8271 /* Return a PLUS if modifier says it's OK. */
8272 if (modifier == EXPAND_SUM
8273 || modifier == EXPAND_INITIALIZER)
8274 return simplify_gen_binary (PLUS, mode, op0, op1);
8275 goto binop2;
8276 }
8277 /* Use immed_double_const to ensure that the constant is
8278 truncated according to the mode of OP1, then sign extended
8279 to a HOST_WIDE_INT. Using the constant directly can result
8280 in non-canonical RTL in a 64x32 cross compile. */
8281 constant_part
8282 = immed_double_const (TREE_INT_CST_LOW (treeop1),
8283 (HOST_WIDE_INT) 0,
8284 TYPE_MODE (TREE_TYPE (treeop0)));
8285 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8286 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8287 op0 = force_operand (op0, target);
8288 return REDUCE_BIT_FIELD (op0);
8289 }
8290 }
8291
8292 /* Use TER to expand pointer addition of a negated value
8293 as pointer subtraction. */
8294 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8295 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8296 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8297 && TREE_CODE (treeop1) == SSA_NAME
8298 && TYPE_MODE (TREE_TYPE (treeop0))
8299 == TYPE_MODE (TREE_TYPE (treeop1)))
8300 {
8301 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8302 if (def)
8303 {
8304 treeop1 = gimple_assign_rhs1 (def);
8305 code = MINUS_EXPR;
8306 goto do_minus;
8307 }
8308 }
8309
8310 /* No sense saving up arithmetic to be done
8311 if it's all in the wrong mode to form part of an address.
8312 And force_operand won't know whether to sign-extend or
8313 zero-extend. */
8314 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8315 || mode != ptr_mode)
8316 {
8317 expand_operands (treeop0, treeop1,
8318 subtarget, &op0, &op1, EXPAND_NORMAL);
8319 if (op0 == const0_rtx)
8320 return op1;
8321 if (op1 == const0_rtx)
8322 return op0;
8323 goto binop2;
8324 }
8325
8326 expand_operands (treeop0, treeop1,
8327 subtarget, &op0, &op1, modifier);
8328 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8329
8330 case MINUS_EXPR:
8331 do_minus:
8332 /* For initializers, we are allowed to return a MINUS of two
8333 symbolic constants. Here we handle all cases when both operands
8334 are constant. */
8335 /* Handle difference of two symbolic constants,
8336 for the sake of an initializer. */
8337 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8338 && really_constant_p (treeop0)
8339 && really_constant_p (treeop1))
8340 {
8341 expand_operands (treeop0, treeop1,
8342 NULL_RTX, &op0, &op1, modifier);
8343
8344 /* If the last operand is a CONST_INT, use plus_constant of
8345 the negated constant. Else make the MINUS. */
8346 if (CONST_INT_P (op1))
8347 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8348 -INTVAL (op1)));
8349 else
8350 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8351 }
8352
8353 /* No sense saving up arithmetic to be done
8354 if it's all in the wrong mode to form part of an address.
8355 And force_operand won't know whether to sign-extend or
8356 zero-extend. */
8357 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8358 || mode != ptr_mode)
8359 goto binop;
8360
8361 expand_operands (treeop0, treeop1,
8362 subtarget, &op0, &op1, modifier);
8363
8364 /* Convert A - const to A + (-const). */
8365 if (CONST_INT_P (op1))
8366 {
8367 op1 = negate_rtx (mode, op1);
8368 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8369 }
8370
8371 goto binop2;
8372
8373 case WIDEN_MULT_PLUS_EXPR:
8374 case WIDEN_MULT_MINUS_EXPR:
8375 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8376 op2 = expand_normal (treeop2);
8377 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8378 target, unsignedp);
8379 return target;
8380
8381 case WIDEN_MULT_EXPR:
8382 /* If first operand is constant, swap them.
8383 Thus the following special case checks need only
8384 check the second operand. */
8385 if (TREE_CODE (treeop0) == INTEGER_CST)
8386 {
8387 tree t1 = treeop0;
8388 treeop0 = treeop1;
8389 treeop1 = t1;
8390 }
8391
8392 /* First, check if we have a multiplication of one signed and one
8393 unsigned operand. */
8394 if (TREE_CODE (treeop1) != INTEGER_CST
8395 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8396 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8397 {
8398 enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8399 this_optab = usmul_widen_optab;
8400 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8401 != CODE_FOR_nothing)
8402 {
8403 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8404 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8405 EXPAND_NORMAL);
8406 else
8407 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8408 EXPAND_NORMAL);
8409 /* op0 and op1 might still be constant, despite the above
8410 != INTEGER_CST check. Handle it. */
8411 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8412 {
8413 op0 = convert_modes (innermode, mode, op0, true);
8414 op1 = convert_modes (innermode, mode, op1, false);
8415 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8416 target, unsignedp));
8417 }
8418 goto binop3;
8419 }
8420 }
8421 /* Check for a multiplication with matching signedness. */
8422 else if ((TREE_CODE (treeop1) == INTEGER_CST
8423 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8424 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8425 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8426 {
8427 tree op0type = TREE_TYPE (treeop0);
8428 enum machine_mode innermode = TYPE_MODE (op0type);
8429 bool zextend_p = TYPE_UNSIGNED (op0type);
8430 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8431 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8432
8433 if (TREE_CODE (treeop0) != INTEGER_CST)
8434 {
8435 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8436 != CODE_FOR_nothing)
8437 {
8438 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8439 EXPAND_NORMAL);
8440 /* op0 and op1 might still be constant, despite the above
8441 != INTEGER_CST check. Handle it. */
8442 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8443 {
8444 widen_mult_const:
8445 op0 = convert_modes (innermode, mode, op0, zextend_p);
8446 op1
8447 = convert_modes (innermode, mode, op1,
8448 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8449 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8450 target,
8451 unsignedp));
8452 }
8453 temp = expand_widening_mult (mode, op0, op1, target,
8454 unsignedp, this_optab);
8455 return REDUCE_BIT_FIELD (temp);
8456 }
8457 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8458 != CODE_FOR_nothing
8459 && innermode == word_mode)
8460 {
8461 rtx htem, hipart;
8462 op0 = expand_normal (treeop0);
8463 if (TREE_CODE (treeop1) == INTEGER_CST)
8464 op1 = convert_modes (innermode, mode,
8465 expand_normal (treeop1),
8466 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8467 else
8468 op1 = expand_normal (treeop1);
8469 /* op0 and op1 might still be constant, despite the above
8470 != INTEGER_CST check. Handle it. */
8471 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8472 goto widen_mult_const;
8473 temp = expand_binop (mode, other_optab, op0, op1, target,
8474 unsignedp, OPTAB_LIB_WIDEN);
8475 hipart = gen_highpart (innermode, temp);
8476 htem = expand_mult_highpart_adjust (innermode, hipart,
8477 op0, op1, hipart,
8478 zextend_p);
8479 if (htem != hipart)
8480 emit_move_insn (hipart, htem);
8481 return REDUCE_BIT_FIELD (temp);
8482 }
8483 }
8484 }
8485 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8486 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8487 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8488 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8489
8490 case FMA_EXPR:
8491 {
8492 optab opt = fma_optab;
8493 gimple def0, def2;
8494
8495 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8496 call. */
8497 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8498 {
8499 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8500 tree call_expr;
8501
8502 gcc_assert (fn != NULL_TREE);
8503 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8504 return expand_builtin (call_expr, target, subtarget, mode, false);
8505 }
8506
8507 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8508 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8509
8510 op0 = op2 = NULL;
8511
8512 if (def0 && def2
8513 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8514 {
8515 opt = fnms_optab;
8516 op0 = expand_normal (gimple_assign_rhs1 (def0));
8517 op2 = expand_normal (gimple_assign_rhs1 (def2));
8518 }
8519 else if (def0
8520 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8521 {
8522 opt = fnma_optab;
8523 op0 = expand_normal (gimple_assign_rhs1 (def0));
8524 }
8525 else if (def2
8526 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8527 {
8528 opt = fms_optab;
8529 op2 = expand_normal (gimple_assign_rhs1 (def2));
8530 }
8531
8532 if (op0 == NULL)
8533 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8534 if (op2 == NULL)
8535 op2 = expand_normal (treeop2);
8536 op1 = expand_normal (treeop1);
8537
8538 return expand_ternary_op (TYPE_MODE (type), opt,
8539 op0, op1, op2, target, 0);
8540 }
8541
8542 case MULT_EXPR:
8543 /* If this is a fixed-point operation, then we cannot use the code
8544 below because "expand_mult" doesn't support sat/no-sat fixed-point
8545 multiplications. */
8546 if (ALL_FIXED_POINT_MODE_P (mode))
8547 goto binop;
8548
8549 /* If first operand is constant, swap them.
8550 Thus the following special case checks need only
8551 check the second operand. */
8552 if (TREE_CODE (treeop0) == INTEGER_CST)
8553 {
8554 tree t1 = treeop0;
8555 treeop0 = treeop1;
8556 treeop1 = t1;
8557 }
8558
8559 /* Attempt to return something suitable for generating an
8560 indexed address, for machines that support that. */
8561
8562 if (modifier == EXPAND_SUM && mode == ptr_mode
8563 && tree_fits_shwi_p (treeop1))
8564 {
8565 tree exp1 = treeop1;
8566
8567 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8568 EXPAND_SUM);
8569
8570 if (!REG_P (op0))
8571 op0 = force_operand (op0, NULL_RTX);
8572 if (!REG_P (op0))
8573 op0 = copy_to_mode_reg (mode, op0);
8574
8575 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8576 gen_int_mode (tree_to_shwi (exp1),
8577 TYPE_MODE (TREE_TYPE (exp1)))));
8578 }
8579
8580 if (modifier == EXPAND_STACK_PARM)
8581 target = 0;
8582
8583 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8584 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8585
8586 case TRUNC_DIV_EXPR:
8587 case FLOOR_DIV_EXPR:
8588 case CEIL_DIV_EXPR:
8589 case ROUND_DIV_EXPR:
8590 case EXACT_DIV_EXPR:
8591 /* If this is a fixed-point operation, then we cannot use the code
8592 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8593 divisions. */
8594 if (ALL_FIXED_POINT_MODE_P (mode))
8595 goto binop;
8596
8597 if (modifier == EXPAND_STACK_PARM)
8598 target = 0;
8599 /* Possible optimization: compute the dividend with EXPAND_SUM
8600 then if the divisor is constant can optimize the case
8601 where some terms of the dividend have coeffs divisible by it. */
8602 expand_operands (treeop0, treeop1,
8603 subtarget, &op0, &op1, EXPAND_NORMAL);
8604 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8605
8606 case RDIV_EXPR:
8607 goto binop;
8608
8609 case MULT_HIGHPART_EXPR:
8610 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8611 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8612 gcc_assert (temp);
8613 return temp;
8614
8615 case TRUNC_MOD_EXPR:
8616 case FLOOR_MOD_EXPR:
8617 case CEIL_MOD_EXPR:
8618 case ROUND_MOD_EXPR:
8619 if (modifier == EXPAND_STACK_PARM)
8620 target = 0;
8621 expand_operands (treeop0, treeop1,
8622 subtarget, &op0, &op1, EXPAND_NORMAL);
8623 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8624
8625 case FIXED_CONVERT_EXPR:
8626 op0 = expand_normal (treeop0);
8627 if (target == 0 || modifier == EXPAND_STACK_PARM)
8628 target = gen_reg_rtx (mode);
8629
8630 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8631 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8632 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8633 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8634 else
8635 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8636 return target;
8637
8638 case FIX_TRUNC_EXPR:
8639 op0 = expand_normal (treeop0);
8640 if (target == 0 || modifier == EXPAND_STACK_PARM)
8641 target = gen_reg_rtx (mode);
8642 expand_fix (target, op0, unsignedp);
8643 return target;
8644
8645 case FLOAT_EXPR:
8646 op0 = expand_normal (treeop0);
8647 if (target == 0 || modifier == EXPAND_STACK_PARM)
8648 target = gen_reg_rtx (mode);
8649 /* expand_float can't figure out what to do if FROM has VOIDmode.
8650 So give it the correct mode. With -O, cse will optimize this. */
8651 if (GET_MODE (op0) == VOIDmode)
8652 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8653 op0);
8654 expand_float (target, op0,
8655 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8656 return target;
8657
8658 case NEGATE_EXPR:
8659 op0 = expand_expr (treeop0, subtarget,
8660 VOIDmode, EXPAND_NORMAL);
8661 if (modifier == EXPAND_STACK_PARM)
8662 target = 0;
8663 temp = expand_unop (mode,
8664 optab_for_tree_code (NEGATE_EXPR, type,
8665 optab_default),
8666 op0, target, 0);
8667 gcc_assert (temp);
8668 return REDUCE_BIT_FIELD (temp);
8669
8670 case ABS_EXPR:
8671 op0 = expand_expr (treeop0, subtarget,
8672 VOIDmode, EXPAND_NORMAL);
8673 if (modifier == EXPAND_STACK_PARM)
8674 target = 0;
8675
8676 /* ABS_EXPR is not valid for complex arguments. */
8677 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8678 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8679
8680 /* Unsigned abs is simply the operand. Testing here means we don't
8681 risk generating incorrect code below. */
8682 if (TYPE_UNSIGNED (type))
8683 return op0;
8684
8685 return expand_abs (mode, op0, target, unsignedp,
8686 safe_from_p (target, treeop0, 1));
8687
8688 case MAX_EXPR:
8689 case MIN_EXPR:
8690 target = original_target;
8691 if (target == 0
8692 || modifier == EXPAND_STACK_PARM
8693 || (MEM_P (target) && MEM_VOLATILE_P (target))
8694 || GET_MODE (target) != mode
8695 || (REG_P (target)
8696 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8697 target = gen_reg_rtx (mode);
8698 expand_operands (treeop0, treeop1,
8699 target, &op0, &op1, EXPAND_NORMAL);
8700
8701 /* First try to do it with a special MIN or MAX instruction.
8702 If that does not win, use a conditional jump to select the proper
8703 value. */
8704 this_optab = optab_for_tree_code (code, type, optab_default);
8705 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8706 OPTAB_WIDEN);
8707 if (temp != 0)
8708 return temp;
8709
8710 /* At this point, a MEM target is no longer useful; we will get better
8711 code without it. */
8712
8713 if (! REG_P (target))
8714 target = gen_reg_rtx (mode);
8715
8716 /* If op1 was placed in target, swap op0 and op1. */
8717 if (target != op0 && target == op1)
8718 {
8719 temp = op0;
8720 op0 = op1;
8721 op1 = temp;
8722 }
8723
8724 /* We generate better code and avoid problems with op1 mentioning
8725 target by forcing op1 into a pseudo if it isn't a constant. */
8726 if (! CONSTANT_P (op1))
8727 op1 = force_reg (mode, op1);
8728
8729 {
8730 enum rtx_code comparison_code;
8731 rtx cmpop1 = op1;
8732
8733 if (code == MAX_EXPR)
8734 comparison_code = unsignedp ? GEU : GE;
8735 else
8736 comparison_code = unsignedp ? LEU : LE;
8737
8738 /* Canonicalize to comparisons against 0. */
8739 if (op1 == const1_rtx)
8740 {
8741 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8742 or (a != 0 ? a : 1) for unsigned.
8743 For MIN we are safe converting (a <= 1 ? a : 1)
8744 into (a <= 0 ? a : 1) */
8745 cmpop1 = const0_rtx;
8746 if (code == MAX_EXPR)
8747 comparison_code = unsignedp ? NE : GT;
8748 }
8749 if (op1 == constm1_rtx && !unsignedp)
8750 {
8751 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8752 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8753 cmpop1 = const0_rtx;
8754 if (code == MIN_EXPR)
8755 comparison_code = LT;
8756 }
8757 #ifdef HAVE_conditional_move
8758 /* Use a conditional move if possible. */
8759 if (can_conditionally_move_p (mode))
8760 {
8761 rtx insn;
8762
8763 /* ??? Same problem as in expmed.c: emit_conditional_move
8764 forces a stack adjustment via compare_from_rtx, and we
8765 lose the stack adjustment if the sequence we are about
8766 to create is discarded. */
8767 do_pending_stack_adjust ();
8768
8769 start_sequence ();
8770
8771 /* Try to emit the conditional move. */
8772 insn = emit_conditional_move (target, comparison_code,
8773 op0, cmpop1, mode,
8774 op0, op1, mode,
8775 unsignedp);
8776
8777 /* If we could do the conditional move, emit the sequence,
8778 and return. */
8779 if (insn)
8780 {
8781 rtx seq = get_insns ();
8782 end_sequence ();
8783 emit_insn (seq);
8784 return target;
8785 }
8786
8787 /* Otherwise discard the sequence and fall back to code with
8788 branches. */
8789 end_sequence ();
8790 }
8791 #endif
8792 if (target != op0)
8793 emit_move_insn (target, op0);
8794
8795 temp = gen_label_rtx ();
8796 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8797 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8798 -1);
8799 }
8800 emit_move_insn (target, op1);
8801 emit_label (temp);
8802 return target;
8803
8804 case BIT_NOT_EXPR:
8805 op0 = expand_expr (treeop0, subtarget,
8806 VOIDmode, EXPAND_NORMAL);
8807 if (modifier == EXPAND_STACK_PARM)
8808 target = 0;
8809 /* In case we have to reduce the result to bitfield precision
8810 for unsigned bitfield expand this as XOR with a proper constant
8811 instead. */
8812 if (reduce_bit_field && TYPE_UNSIGNED (type))
8813 temp = expand_binop (mode, xor_optab, op0,
8814 immed_double_int_const
8815 (double_int::mask (TYPE_PRECISION (type)), mode),
8816 target, 1, OPTAB_LIB_WIDEN);
8817 else
8818 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8819 gcc_assert (temp);
8820 return temp;
8821
8822 /* ??? Can optimize bitwise operations with one arg constant.
8823 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8824 and (a bitwise1 b) bitwise2 b (etc)
8825 but that is probably not worth while. */
8826
8827 case BIT_AND_EXPR:
8828 case BIT_IOR_EXPR:
8829 case BIT_XOR_EXPR:
8830 goto binop;
8831
8832 case LROTATE_EXPR:
8833 case RROTATE_EXPR:
8834 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8835 || (GET_MODE_PRECISION (TYPE_MODE (type))
8836 == TYPE_PRECISION (type)));
8837 /* fall through */
8838
8839 case LSHIFT_EXPR:
8840 case RSHIFT_EXPR:
8841 /* If this is a fixed-point operation, then we cannot use the code
8842 below because "expand_shift" doesn't support sat/no-sat fixed-point
8843 shifts. */
8844 if (ALL_FIXED_POINT_MODE_P (mode))
8845 goto binop;
8846
8847 if (! safe_from_p (subtarget, treeop1, 1))
8848 subtarget = 0;
8849 if (modifier == EXPAND_STACK_PARM)
8850 target = 0;
8851 op0 = expand_expr (treeop0, subtarget,
8852 VOIDmode, EXPAND_NORMAL);
8853 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8854 unsignedp);
8855 if (code == LSHIFT_EXPR)
8856 temp = REDUCE_BIT_FIELD (temp);
8857 return temp;
8858
8859 /* Could determine the answer when only additive constants differ. Also,
8860 the addition of one can be handled by changing the condition. */
8861 case LT_EXPR:
8862 case LE_EXPR:
8863 case GT_EXPR:
8864 case GE_EXPR:
8865 case EQ_EXPR:
8866 case NE_EXPR:
8867 case UNORDERED_EXPR:
8868 case ORDERED_EXPR:
8869 case UNLT_EXPR:
8870 case UNLE_EXPR:
8871 case UNGT_EXPR:
8872 case UNGE_EXPR:
8873 case UNEQ_EXPR:
8874 case LTGT_EXPR:
8875 temp = do_store_flag (ops,
8876 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8877 tmode != VOIDmode ? tmode : mode);
8878 if (temp)
8879 return temp;
8880
8881 /* Use a compare and a jump for BLKmode comparisons, or for function
8882 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8883
8884 if ((target == 0
8885 || modifier == EXPAND_STACK_PARM
8886 || ! safe_from_p (target, treeop0, 1)
8887 || ! safe_from_p (target, treeop1, 1)
8888 /* Make sure we don't have a hard reg (such as function's return
8889 value) live across basic blocks, if not optimizing. */
8890 || (!optimize && REG_P (target)
8891 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8892 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8893
8894 emit_move_insn (target, const0_rtx);
8895
8896 op1 = gen_label_rtx ();
8897 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8898
8899 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8900 emit_move_insn (target, constm1_rtx);
8901 else
8902 emit_move_insn (target, const1_rtx);
8903
8904 emit_label (op1);
8905 return target;
8906
8907 case COMPLEX_EXPR:
8908 /* Get the rtx code of the operands. */
8909 op0 = expand_normal (treeop0);
8910 op1 = expand_normal (treeop1);
8911
8912 if (!target)
8913 target = gen_reg_rtx (TYPE_MODE (type));
8914 else
8915 /* If target overlaps with op1, then either we need to force
8916 op1 into a pseudo (if target also overlaps with op0),
8917 or write the complex parts in reverse order. */
8918 switch (GET_CODE (target))
8919 {
8920 case CONCAT:
8921 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
8922 {
8923 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
8924 {
8925 complex_expr_force_op1:
8926 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
8927 emit_move_insn (temp, op1);
8928 op1 = temp;
8929 break;
8930 }
8931 complex_expr_swap_order:
8932 /* Move the imaginary (op1) and real (op0) parts to their
8933 location. */
8934 write_complex_part (target, op1, true);
8935 write_complex_part (target, op0, false);
8936
8937 return target;
8938 }
8939 break;
8940 case MEM:
8941 temp = adjust_address_nv (target,
8942 GET_MODE_INNER (GET_MODE (target)), 0);
8943 if (reg_overlap_mentioned_p (temp, op1))
8944 {
8945 enum machine_mode imode = GET_MODE_INNER (GET_MODE (target));
8946 temp = adjust_address_nv (target, imode,
8947 GET_MODE_SIZE (imode));
8948 if (reg_overlap_mentioned_p (temp, op0))
8949 goto complex_expr_force_op1;
8950 goto complex_expr_swap_order;
8951 }
8952 break;
8953 default:
8954 if (reg_overlap_mentioned_p (target, op1))
8955 {
8956 if (reg_overlap_mentioned_p (target, op0))
8957 goto complex_expr_force_op1;
8958 goto complex_expr_swap_order;
8959 }
8960 break;
8961 }
8962
8963 /* Move the real (op0) and imaginary (op1) parts to their location. */
8964 write_complex_part (target, op0, false);
8965 write_complex_part (target, op1, true);
8966
8967 return target;
8968
8969 case WIDEN_SUM_EXPR:
8970 {
8971 tree oprnd0 = treeop0;
8972 tree oprnd1 = treeop1;
8973
8974 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8975 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8976 target, unsignedp);
8977 return target;
8978 }
8979
8980 case REDUC_MAX_EXPR:
8981 case REDUC_MIN_EXPR:
8982 case REDUC_PLUS_EXPR:
8983 {
8984 op0 = expand_normal (treeop0);
8985 this_optab = optab_for_tree_code (code, type, optab_default);
8986 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8987 gcc_assert (temp);
8988 return temp;
8989 }
8990
8991 case VEC_LSHIFT_EXPR:
8992 case VEC_RSHIFT_EXPR:
8993 {
8994 target = expand_vec_shift_expr (ops, target);
8995 return target;
8996 }
8997
8998 case VEC_UNPACK_HI_EXPR:
8999 case VEC_UNPACK_LO_EXPR:
9000 {
9001 op0 = expand_normal (treeop0);
9002 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9003 target, unsignedp);
9004 gcc_assert (temp);
9005 return temp;
9006 }
9007
9008 case VEC_UNPACK_FLOAT_HI_EXPR:
9009 case VEC_UNPACK_FLOAT_LO_EXPR:
9010 {
9011 op0 = expand_normal (treeop0);
9012 /* The signedness is determined from input operand. */
9013 temp = expand_widen_pattern_expr
9014 (ops, op0, NULL_RTX, NULL_RTX,
9015 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9016
9017 gcc_assert (temp);
9018 return temp;
9019 }
9020
9021 case VEC_WIDEN_MULT_HI_EXPR:
9022 case VEC_WIDEN_MULT_LO_EXPR:
9023 case VEC_WIDEN_MULT_EVEN_EXPR:
9024 case VEC_WIDEN_MULT_ODD_EXPR:
9025 case VEC_WIDEN_LSHIFT_HI_EXPR:
9026 case VEC_WIDEN_LSHIFT_LO_EXPR:
9027 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9028 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9029 target, unsignedp);
9030 gcc_assert (target);
9031 return target;
9032
9033 case VEC_PACK_TRUNC_EXPR:
9034 case VEC_PACK_SAT_EXPR:
9035 case VEC_PACK_FIX_TRUNC_EXPR:
9036 mode = TYPE_MODE (TREE_TYPE (treeop0));
9037 goto binop;
9038
9039 case VEC_PERM_EXPR:
9040 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9041 op2 = expand_normal (treeop2);
9042
9043 /* Careful here: if the target doesn't support integral vector modes,
9044 a constant selection vector could wind up smooshed into a normal
9045 integral constant. */
9046 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9047 {
9048 tree sel_type = TREE_TYPE (treeop2);
9049 enum machine_mode vmode
9050 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9051 TYPE_VECTOR_SUBPARTS (sel_type));
9052 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9053 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9054 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9055 }
9056 else
9057 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9058
9059 temp = expand_vec_perm (mode, op0, op1, op2, target);
9060 gcc_assert (temp);
9061 return temp;
9062
9063 case DOT_PROD_EXPR:
9064 {
9065 tree oprnd0 = treeop0;
9066 tree oprnd1 = treeop1;
9067 tree oprnd2 = treeop2;
9068 rtx op2;
9069
9070 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9071 op2 = expand_normal (oprnd2);
9072 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9073 target, unsignedp);
9074 return target;
9075 }
9076
9077 case REALIGN_LOAD_EXPR:
9078 {
9079 tree oprnd0 = treeop0;
9080 tree oprnd1 = treeop1;
9081 tree oprnd2 = treeop2;
9082 rtx op2;
9083
9084 this_optab = optab_for_tree_code (code, type, optab_default);
9085 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9086 op2 = expand_normal (oprnd2);
9087 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9088 target, unsignedp);
9089 gcc_assert (temp);
9090 return temp;
9091 }
9092
9093 case COND_EXPR:
9094 /* A COND_EXPR with its type being VOID_TYPE represents a
9095 conditional jump and is handled in
9096 expand_gimple_cond_expr. */
9097 gcc_assert (!VOID_TYPE_P (type));
9098
9099 /* Note that COND_EXPRs whose type is a structure or union
9100 are required to be constructed to contain assignments of
9101 a temporary variable, so that we can evaluate them here
9102 for side effect only. If type is void, we must do likewise. */
9103
9104 gcc_assert (!TREE_ADDRESSABLE (type)
9105 && !ignore
9106 && TREE_TYPE (treeop1) != void_type_node
9107 && TREE_TYPE (treeop2) != void_type_node);
9108
9109 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9110 if (temp)
9111 return temp;
9112
9113 /* If we are not to produce a result, we have no target. Otherwise,
9114 if a target was specified use it; it will not be used as an
9115 intermediate target unless it is safe. If no target, use a
9116 temporary. */
9117
9118 if (modifier != EXPAND_STACK_PARM
9119 && original_target
9120 && safe_from_p (original_target, treeop0, 1)
9121 && GET_MODE (original_target) == mode
9122 && !MEM_P (original_target))
9123 temp = original_target;
9124 else
9125 temp = assign_temp (type, 0, 1);
9126
9127 do_pending_stack_adjust ();
9128 NO_DEFER_POP;
9129 op0 = gen_label_rtx ();
9130 op1 = gen_label_rtx ();
9131 jumpifnot (treeop0, op0, -1);
9132 store_expr (treeop1, temp,
9133 modifier == EXPAND_STACK_PARM,
9134 false);
9135
9136 emit_jump_insn (gen_jump (op1));
9137 emit_barrier ();
9138 emit_label (op0);
9139 store_expr (treeop2, temp,
9140 modifier == EXPAND_STACK_PARM,
9141 false);
9142
9143 emit_label (op1);
9144 OK_DEFER_POP;
9145 return temp;
9146
9147 case VEC_COND_EXPR:
9148 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9149 return target;
9150
9151 default:
9152 gcc_unreachable ();
9153 }
9154
9155 /* Here to do an ordinary binary operator. */
9156 binop:
9157 expand_operands (treeop0, treeop1,
9158 subtarget, &op0, &op1, EXPAND_NORMAL);
9159 binop2:
9160 this_optab = optab_for_tree_code (code, type, optab_default);
9161 binop3:
9162 if (modifier == EXPAND_STACK_PARM)
9163 target = 0;
9164 temp = expand_binop (mode, this_optab, op0, op1, target,
9165 unsignedp, OPTAB_LIB_WIDEN);
9166 gcc_assert (temp);
9167 /* Bitwise operations do not need bitfield reduction as we expect their
9168 operands being properly truncated. */
9169 if (code == BIT_XOR_EXPR
9170 || code == BIT_AND_EXPR
9171 || code == BIT_IOR_EXPR)
9172 return temp;
9173 return REDUCE_BIT_FIELD (temp);
9174 }
9175 #undef REDUCE_BIT_FIELD
9176
9177
9178 /* Return TRUE if expression STMT is suitable for replacement.
9179 Never consider memory loads as replaceable, because those don't ever lead
9180 into constant expressions. */
9181
9182 static bool
9183 stmt_is_replaceable_p (gimple stmt)
9184 {
9185 if (ssa_is_replaceable_p (stmt))
9186 {
9187 /* Don't move around loads. */
9188 if (!gimple_assign_single_p (stmt)
9189 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9190 return true;
9191 }
9192 return false;
9193 }
9194
9195 rtx
9196 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
9197 enum expand_modifier modifier, rtx *alt_rtl)
9198 {
9199 rtx op0, op1, temp, decl_rtl;
9200 tree type;
9201 int unsignedp;
9202 enum machine_mode mode;
9203 enum tree_code code = TREE_CODE (exp);
9204 rtx subtarget, original_target;
9205 int ignore;
9206 tree context;
9207 bool reduce_bit_field;
9208 location_t loc = EXPR_LOCATION (exp);
9209 struct separate_ops ops;
9210 tree treeop0, treeop1, treeop2;
9211 tree ssa_name = NULL_TREE;
9212 gimple g;
9213
9214 type = TREE_TYPE (exp);
9215 mode = TYPE_MODE (type);
9216 unsignedp = TYPE_UNSIGNED (type);
9217
9218 treeop0 = treeop1 = treeop2 = NULL_TREE;
9219 if (!VL_EXP_CLASS_P (exp))
9220 switch (TREE_CODE_LENGTH (code))
9221 {
9222 default:
9223 case 3: treeop2 = TREE_OPERAND (exp, 2);
9224 case 2: treeop1 = TREE_OPERAND (exp, 1);
9225 case 1: treeop0 = TREE_OPERAND (exp, 0);
9226 case 0: break;
9227 }
9228 ops.code = code;
9229 ops.type = type;
9230 ops.op0 = treeop0;
9231 ops.op1 = treeop1;
9232 ops.op2 = treeop2;
9233 ops.location = loc;
9234
9235 ignore = (target == const0_rtx
9236 || ((CONVERT_EXPR_CODE_P (code)
9237 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9238 && TREE_CODE (type) == VOID_TYPE));
9239
9240 /* An operation in what may be a bit-field type needs the
9241 result to be reduced to the precision of the bit-field type,
9242 which is narrower than that of the type's mode. */
9243 reduce_bit_field = (!ignore
9244 && INTEGRAL_TYPE_P (type)
9245 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9246
9247 /* If we are going to ignore this result, we need only do something
9248 if there is a side-effect somewhere in the expression. If there
9249 is, short-circuit the most common cases here. Note that we must
9250 not call expand_expr with anything but const0_rtx in case this
9251 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9252
9253 if (ignore)
9254 {
9255 if (! TREE_SIDE_EFFECTS (exp))
9256 return const0_rtx;
9257
9258 /* Ensure we reference a volatile object even if value is ignored, but
9259 don't do this if all we are doing is taking its address. */
9260 if (TREE_THIS_VOLATILE (exp)
9261 && TREE_CODE (exp) != FUNCTION_DECL
9262 && mode != VOIDmode && mode != BLKmode
9263 && modifier != EXPAND_CONST_ADDRESS)
9264 {
9265 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9266 if (MEM_P (temp))
9267 copy_to_reg (temp);
9268 return const0_rtx;
9269 }
9270
9271 if (TREE_CODE_CLASS (code) == tcc_unary
9272 || code == BIT_FIELD_REF
9273 || code == COMPONENT_REF
9274 || code == INDIRECT_REF)
9275 return expand_expr (treeop0, const0_rtx, VOIDmode,
9276 modifier);
9277
9278 else if (TREE_CODE_CLASS (code) == tcc_binary
9279 || TREE_CODE_CLASS (code) == tcc_comparison
9280 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9281 {
9282 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9283 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9284 return const0_rtx;
9285 }
9286
9287 target = 0;
9288 }
9289
9290 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9291 target = 0;
9292
9293 /* Use subtarget as the target for operand 0 of a binary operation. */
9294 subtarget = get_subtarget (target);
9295 original_target = target;
9296
9297 switch (code)
9298 {
9299 case LABEL_DECL:
9300 {
9301 tree function = decl_function_context (exp);
9302
9303 temp = label_rtx (exp);
9304 temp = gen_rtx_LABEL_REF (Pmode, temp);
9305
9306 if (function != current_function_decl
9307 && function != 0)
9308 LABEL_REF_NONLOCAL_P (temp) = 1;
9309
9310 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9311 return temp;
9312 }
9313
9314 case SSA_NAME:
9315 /* ??? ivopts calls expander, without any preparation from
9316 out-of-ssa. So fake instructions as if this was an access to the
9317 base variable. This unnecessarily allocates a pseudo, see how we can
9318 reuse it, if partition base vars have it set already. */
9319 if (!currently_expanding_to_rtl)
9320 {
9321 tree var = SSA_NAME_VAR (exp);
9322 if (var && DECL_RTL_SET_P (var))
9323 return DECL_RTL (var);
9324 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9325 LAST_VIRTUAL_REGISTER + 1);
9326 }
9327
9328 g = get_gimple_for_ssa_name (exp);
9329 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9330 if (g == NULL
9331 && modifier == EXPAND_INITIALIZER
9332 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9333 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9334 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9335 g = SSA_NAME_DEF_STMT (exp);
9336 if (g)
9337 {
9338 rtx r;
9339 location_t saved_loc = curr_insn_location ();
9340
9341 set_curr_insn_location (gimple_location (g));
9342 r = expand_expr_real (gimple_assign_rhs_to_tree (g), target,
9343 tmode, modifier, NULL);
9344 set_curr_insn_location (saved_loc);
9345 if (REG_P (r) && !REG_EXPR (r))
9346 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9347 return r;
9348 }
9349
9350 ssa_name = exp;
9351 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9352 exp = SSA_NAME_VAR (ssa_name);
9353 goto expand_decl_rtl;
9354
9355 case PARM_DECL:
9356 case VAR_DECL:
9357 /* If a static var's type was incomplete when the decl was written,
9358 but the type is complete now, lay out the decl now. */
9359 if (DECL_SIZE (exp) == 0
9360 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9361 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9362 layout_decl (exp, 0);
9363
9364 /* ... fall through ... */
9365
9366 case FUNCTION_DECL:
9367 case RESULT_DECL:
9368 decl_rtl = DECL_RTL (exp);
9369 expand_decl_rtl:
9370 gcc_assert (decl_rtl);
9371 decl_rtl = copy_rtx (decl_rtl);
9372 /* Record writes to register variables. */
9373 if (modifier == EXPAND_WRITE
9374 && REG_P (decl_rtl)
9375 && HARD_REGISTER_P (decl_rtl))
9376 add_to_hard_reg_set (&crtl->asm_clobbers,
9377 GET_MODE (decl_rtl), REGNO (decl_rtl));
9378
9379 /* Ensure variable marked as used even if it doesn't go through
9380 a parser. If it hasn't be used yet, write out an external
9381 definition. */
9382 TREE_USED (exp) = 1;
9383
9384 /* Show we haven't gotten RTL for this yet. */
9385 temp = 0;
9386
9387 /* Variables inherited from containing functions should have
9388 been lowered by this point. */
9389 context = decl_function_context (exp);
9390 gcc_assert (SCOPE_FILE_SCOPE_P (context)
9391 || context == current_function_decl
9392 || TREE_STATIC (exp)
9393 || DECL_EXTERNAL (exp)
9394 /* ??? C++ creates functions that are not TREE_STATIC. */
9395 || TREE_CODE (exp) == FUNCTION_DECL);
9396
9397 /* This is the case of an array whose size is to be determined
9398 from its initializer, while the initializer is still being parsed.
9399 ??? We aren't parsing while expanding anymore. */
9400
9401 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9402 temp = validize_mem (decl_rtl);
9403
9404 /* If DECL_RTL is memory, we are in the normal case and the
9405 address is not valid, get the address into a register. */
9406
9407 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9408 {
9409 if (alt_rtl)
9410 *alt_rtl = decl_rtl;
9411 decl_rtl = use_anchored_address (decl_rtl);
9412 if (modifier != EXPAND_CONST_ADDRESS
9413 && modifier != EXPAND_SUM
9414 && !memory_address_addr_space_p (DECL_MODE (exp),
9415 XEXP (decl_rtl, 0),
9416 MEM_ADDR_SPACE (decl_rtl)))
9417 temp = replace_equiv_address (decl_rtl,
9418 copy_rtx (XEXP (decl_rtl, 0)));
9419 }
9420
9421 /* If we got something, return it. But first, set the alignment
9422 if the address is a register. */
9423 if (temp != 0)
9424 {
9425 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9426 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9427
9428 return temp;
9429 }
9430
9431 /* If the mode of DECL_RTL does not match that of the decl,
9432 there are two cases: we are dealing with a BLKmode value
9433 that is returned in a register, or we are dealing with
9434 a promoted value. In the latter case, return a SUBREG
9435 of the wanted mode, but mark it so that we know that it
9436 was already extended. */
9437 if (REG_P (decl_rtl)
9438 && DECL_MODE (exp) != BLKmode
9439 && GET_MODE (decl_rtl) != DECL_MODE (exp))
9440 {
9441 enum machine_mode pmode;
9442
9443 /* Get the signedness to be used for this variable. Ensure we get
9444 the same mode we got when the variable was declared. */
9445 if (code == SSA_NAME
9446 && (g = SSA_NAME_DEF_STMT (ssa_name))
9447 && gimple_code (g) == GIMPLE_CALL)
9448 {
9449 gcc_assert (!gimple_call_internal_p (g));
9450 pmode = promote_function_mode (type, mode, &unsignedp,
9451 gimple_call_fntype (g),
9452 2);
9453 }
9454 else
9455 pmode = promote_decl_mode (exp, &unsignedp);
9456 gcc_assert (GET_MODE (decl_rtl) == pmode);
9457
9458 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9459 SUBREG_PROMOTED_VAR_P (temp) = 1;
9460 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
9461 return temp;
9462 }
9463
9464 return decl_rtl;
9465
9466 case INTEGER_CST:
9467 temp = immed_double_const (TREE_INT_CST_LOW (exp),
9468 TREE_INT_CST_HIGH (exp), mode);
9469
9470 return temp;
9471
9472 case VECTOR_CST:
9473 {
9474 tree tmp = NULL_TREE;
9475 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9476 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9477 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9478 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9479 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9480 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9481 return const_vector_from_tree (exp);
9482 if (GET_MODE_CLASS (mode) == MODE_INT)
9483 {
9484 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9485 if (type_for_mode)
9486 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9487 }
9488 if (!tmp)
9489 {
9490 vec<constructor_elt, va_gc> *v;
9491 unsigned i;
9492 vec_alloc (v, VECTOR_CST_NELTS (exp));
9493 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9494 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9495 tmp = build_constructor (type, v);
9496 }
9497 return expand_expr (tmp, ignore ? const0_rtx : target,
9498 tmode, modifier);
9499 }
9500
9501 case CONST_DECL:
9502 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9503
9504 case REAL_CST:
9505 /* If optimized, generate immediate CONST_DOUBLE
9506 which will be turned into memory by reload if necessary.
9507
9508 We used to force a register so that loop.c could see it. But
9509 this does not allow gen_* patterns to perform optimizations with
9510 the constants. It also produces two insns in cases like "x = 1.0;".
9511 On most machines, floating-point constants are not permitted in
9512 many insns, so we'd end up copying it to a register in any case.
9513
9514 Now, we do the copying in expand_binop, if appropriate. */
9515 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9516 TYPE_MODE (TREE_TYPE (exp)));
9517
9518 case FIXED_CST:
9519 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9520 TYPE_MODE (TREE_TYPE (exp)));
9521
9522 case COMPLEX_CST:
9523 /* Handle evaluating a complex constant in a CONCAT target. */
9524 if (original_target && GET_CODE (original_target) == CONCAT)
9525 {
9526 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9527 rtx rtarg, itarg;
9528
9529 rtarg = XEXP (original_target, 0);
9530 itarg = XEXP (original_target, 1);
9531
9532 /* Move the real and imaginary parts separately. */
9533 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9534 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9535
9536 if (op0 != rtarg)
9537 emit_move_insn (rtarg, op0);
9538 if (op1 != itarg)
9539 emit_move_insn (itarg, op1);
9540
9541 return original_target;
9542 }
9543
9544 /* ... fall through ... */
9545
9546 case STRING_CST:
9547 temp = expand_expr_constant (exp, 1, modifier);
9548
9549 /* temp contains a constant address.
9550 On RISC machines where a constant address isn't valid,
9551 make some insns to get that address into a register. */
9552 if (modifier != EXPAND_CONST_ADDRESS
9553 && modifier != EXPAND_INITIALIZER
9554 && modifier != EXPAND_SUM
9555 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9556 MEM_ADDR_SPACE (temp)))
9557 return replace_equiv_address (temp,
9558 copy_rtx (XEXP (temp, 0)));
9559 return temp;
9560
9561 case SAVE_EXPR:
9562 {
9563 tree val = treeop0;
9564 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
9565
9566 if (!SAVE_EXPR_RESOLVED_P (exp))
9567 {
9568 /* We can indeed still hit this case, typically via builtin
9569 expanders calling save_expr immediately before expanding
9570 something. Assume this means that we only have to deal
9571 with non-BLKmode values. */
9572 gcc_assert (GET_MODE (ret) != BLKmode);
9573
9574 val = build_decl (curr_insn_location (),
9575 VAR_DECL, NULL, TREE_TYPE (exp));
9576 DECL_ARTIFICIAL (val) = 1;
9577 DECL_IGNORED_P (val) = 1;
9578 treeop0 = val;
9579 TREE_OPERAND (exp, 0) = treeop0;
9580 SAVE_EXPR_RESOLVED_P (exp) = 1;
9581
9582 if (!CONSTANT_P (ret))
9583 ret = copy_to_reg (ret);
9584 SET_DECL_RTL (val, ret);
9585 }
9586
9587 return ret;
9588 }
9589
9590
9591 case CONSTRUCTOR:
9592 /* If we don't need the result, just ensure we evaluate any
9593 subexpressions. */
9594 if (ignore)
9595 {
9596 unsigned HOST_WIDE_INT idx;
9597 tree value;
9598
9599 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9600 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9601
9602 return const0_rtx;
9603 }
9604
9605 return expand_constructor (exp, target, modifier, false);
9606
9607 case TARGET_MEM_REF:
9608 {
9609 addr_space_t as
9610 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9611 enum insn_code icode;
9612 unsigned int align;
9613
9614 op0 = addr_for_mem_ref (exp, as, true);
9615 op0 = memory_address_addr_space (mode, op0, as);
9616 temp = gen_rtx_MEM (mode, op0);
9617 set_mem_attributes (temp, exp, 0);
9618 set_mem_addr_space (temp, as);
9619 align = get_object_alignment (exp);
9620 if (modifier != EXPAND_WRITE
9621 && modifier != EXPAND_MEMORY
9622 && mode != BLKmode
9623 && align < GET_MODE_ALIGNMENT (mode)
9624 /* If the target does not have special handling for unaligned
9625 loads of mode then it can use regular moves for them. */
9626 && ((icode = optab_handler (movmisalign_optab, mode))
9627 != CODE_FOR_nothing))
9628 {
9629 struct expand_operand ops[2];
9630
9631 /* We've already validated the memory, and we're creating a
9632 new pseudo destination. The predicates really can't fail,
9633 nor can the generator. */
9634 create_output_operand (&ops[0], NULL_RTX, mode);
9635 create_fixed_operand (&ops[1], temp);
9636 expand_insn (icode, 2, ops);
9637 temp = ops[0].value;
9638 }
9639 return temp;
9640 }
9641
9642 case MEM_REF:
9643 {
9644 addr_space_t as
9645 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9646 enum machine_mode address_mode;
9647 tree base = TREE_OPERAND (exp, 0);
9648 gimple def_stmt;
9649 enum insn_code icode;
9650 unsigned align;
9651 /* Handle expansion of non-aliased memory with non-BLKmode. That
9652 might end up in a register. */
9653 if (mem_ref_refers_to_non_mem_p (exp))
9654 {
9655 HOST_WIDE_INT offset = mem_ref_offset (exp).low;
9656 base = TREE_OPERAND (base, 0);
9657 if (offset == 0
9658 && tree_fits_uhwi_p (TYPE_SIZE (type))
9659 && (GET_MODE_BITSIZE (DECL_MODE (base))
9660 == tree_to_uhwi (TYPE_SIZE (type))))
9661 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9662 target, tmode, modifier);
9663 if (TYPE_MODE (type) == BLKmode)
9664 {
9665 temp = assign_stack_temp (DECL_MODE (base),
9666 GET_MODE_SIZE (DECL_MODE (base)));
9667 store_expr (base, temp, 0, false);
9668 temp = adjust_address (temp, BLKmode, offset);
9669 set_mem_size (temp, int_size_in_bytes (type));
9670 return temp;
9671 }
9672 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9673 bitsize_int (offset * BITS_PER_UNIT));
9674 return expand_expr (exp, target, tmode, modifier);
9675 }
9676 address_mode = targetm.addr_space.address_mode (as);
9677 base = TREE_OPERAND (exp, 0);
9678 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9679 {
9680 tree mask = gimple_assign_rhs2 (def_stmt);
9681 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9682 gimple_assign_rhs1 (def_stmt), mask);
9683 TREE_OPERAND (exp, 0) = base;
9684 }
9685 align = get_object_alignment (exp);
9686 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9687 op0 = memory_address_addr_space (mode, op0, as);
9688 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9689 {
9690 rtx off
9691 = immed_double_int_const (mem_ref_offset (exp), address_mode);
9692 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9693 op0 = memory_address_addr_space (mode, op0, as);
9694 }
9695 temp = gen_rtx_MEM (mode, op0);
9696 set_mem_attributes (temp, exp, 0);
9697 set_mem_addr_space (temp, as);
9698 if (TREE_THIS_VOLATILE (exp))
9699 MEM_VOLATILE_P (temp) = 1;
9700 if (modifier != EXPAND_WRITE
9701 && modifier != EXPAND_MEMORY
9702 && mode != BLKmode
9703 && align < GET_MODE_ALIGNMENT (mode))
9704 {
9705 if ((icode = optab_handler (movmisalign_optab, mode))
9706 != CODE_FOR_nothing)
9707 {
9708 struct expand_operand ops[2];
9709
9710 /* We've already validated the memory, and we're creating a
9711 new pseudo destination. The predicates really can't fail,
9712 nor can the generator. */
9713 create_output_operand (&ops[0], NULL_RTX, mode);
9714 create_fixed_operand (&ops[1], temp);
9715 expand_insn (icode, 2, ops);
9716 temp = ops[0].value;
9717 }
9718 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9719 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9720 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9721 (modifier == EXPAND_STACK_PARM
9722 ? NULL_RTX : target),
9723 mode, mode);
9724 }
9725 return temp;
9726 }
9727
9728 case ARRAY_REF:
9729
9730 {
9731 tree array = treeop0;
9732 tree index = treeop1;
9733 tree init;
9734
9735 /* Fold an expression like: "foo"[2].
9736 This is not done in fold so it won't happen inside &.
9737 Don't fold if this is for wide characters since it's too
9738 difficult to do correctly and this is a very rare case. */
9739
9740 if (modifier != EXPAND_CONST_ADDRESS
9741 && modifier != EXPAND_INITIALIZER
9742 && modifier != EXPAND_MEMORY)
9743 {
9744 tree t = fold_read_from_constant_string (exp);
9745
9746 if (t)
9747 return expand_expr (t, target, tmode, modifier);
9748 }
9749
9750 /* If this is a constant index into a constant array,
9751 just get the value from the array. Handle both the cases when
9752 we have an explicit constructor and when our operand is a variable
9753 that was declared const. */
9754
9755 if (modifier != EXPAND_CONST_ADDRESS
9756 && modifier != EXPAND_INITIALIZER
9757 && modifier != EXPAND_MEMORY
9758 && TREE_CODE (array) == CONSTRUCTOR
9759 && ! TREE_SIDE_EFFECTS (array)
9760 && TREE_CODE (index) == INTEGER_CST)
9761 {
9762 unsigned HOST_WIDE_INT ix;
9763 tree field, value;
9764
9765 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9766 field, value)
9767 if (tree_int_cst_equal (field, index))
9768 {
9769 if (!TREE_SIDE_EFFECTS (value))
9770 return expand_expr (fold (value), target, tmode, modifier);
9771 break;
9772 }
9773 }
9774
9775 else if (optimize >= 1
9776 && modifier != EXPAND_CONST_ADDRESS
9777 && modifier != EXPAND_INITIALIZER
9778 && modifier != EXPAND_MEMORY
9779 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9780 && TREE_CODE (index) == INTEGER_CST
9781 && (TREE_CODE (array) == VAR_DECL
9782 || TREE_CODE (array) == CONST_DECL)
9783 && (init = ctor_for_folding (array)) != error_mark_node)
9784 {
9785 if (TREE_CODE (init) == CONSTRUCTOR)
9786 {
9787 unsigned HOST_WIDE_INT ix;
9788 tree field, value;
9789
9790 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
9791 field, value)
9792 if (tree_int_cst_equal (field, index))
9793 {
9794 if (TREE_SIDE_EFFECTS (value))
9795 break;
9796
9797 if (TREE_CODE (value) == CONSTRUCTOR)
9798 {
9799 /* If VALUE is a CONSTRUCTOR, this
9800 optimization is only useful if
9801 this doesn't store the CONSTRUCTOR
9802 into memory. If it does, it is more
9803 efficient to just load the data from
9804 the array directly. */
9805 rtx ret = expand_constructor (value, target,
9806 modifier, true);
9807 if (ret == NULL_RTX)
9808 break;
9809 }
9810
9811 return
9812 expand_expr (fold (value), target, tmode, modifier);
9813 }
9814 }
9815 else if (TREE_CODE (init) == STRING_CST)
9816 {
9817 tree low_bound = array_ref_low_bound (exp);
9818 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
9819
9820 /* Optimize the special case of a zero lower bound.
9821
9822 We convert the lower bound to sizetype to avoid problems
9823 with constant folding. E.g. suppose the lower bound is
9824 1 and its mode is QI. Without the conversion
9825 (ARRAY + (INDEX - (unsigned char)1))
9826 becomes
9827 (ARRAY + (-(unsigned char)1) + INDEX)
9828 which becomes
9829 (ARRAY + 255 + INDEX). Oops! */
9830 if (!integer_zerop (low_bound))
9831 index1 = size_diffop_loc (loc, index1,
9832 fold_convert_loc (loc, sizetype,
9833 low_bound));
9834
9835 if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
9836 {
9837 tree type = TREE_TYPE (TREE_TYPE (init));
9838 enum machine_mode mode = TYPE_MODE (type);
9839
9840 if (GET_MODE_CLASS (mode) == MODE_INT
9841 && GET_MODE_SIZE (mode) == 1)
9842 return gen_int_mode (TREE_STRING_POINTER (init)
9843 [TREE_INT_CST_LOW (index1)],
9844 mode);
9845 }
9846 }
9847 }
9848 }
9849 goto normal_inner_ref;
9850
9851 case COMPONENT_REF:
9852 /* If the operand is a CONSTRUCTOR, we can just extract the
9853 appropriate field if it is present. */
9854 if (TREE_CODE (treeop0) == CONSTRUCTOR)
9855 {
9856 unsigned HOST_WIDE_INT idx;
9857 tree field, value;
9858
9859 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
9860 idx, field, value)
9861 if (field == treeop1
9862 /* We can normally use the value of the field in the
9863 CONSTRUCTOR. However, if this is a bitfield in
9864 an integral mode that we can fit in a HOST_WIDE_INT,
9865 we must mask only the number of bits in the bitfield,
9866 since this is done implicitly by the constructor. If
9867 the bitfield does not meet either of those conditions,
9868 we can't do this optimization. */
9869 && (! DECL_BIT_FIELD (field)
9870 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
9871 && (GET_MODE_PRECISION (DECL_MODE (field))
9872 <= HOST_BITS_PER_WIDE_INT))))
9873 {
9874 if (DECL_BIT_FIELD (field)
9875 && modifier == EXPAND_STACK_PARM)
9876 target = 0;
9877 op0 = expand_expr (value, target, tmode, modifier);
9878 if (DECL_BIT_FIELD (field))
9879 {
9880 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
9881 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
9882
9883 if (TYPE_UNSIGNED (TREE_TYPE (field)))
9884 {
9885 op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
9886 imode);
9887 op0 = expand_and (imode, op0, op1, target);
9888 }
9889 else
9890 {
9891 int count = GET_MODE_PRECISION (imode) - bitsize;
9892
9893 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
9894 target, 0);
9895 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
9896 target, 0);
9897 }
9898 }
9899
9900 return op0;
9901 }
9902 }
9903 goto normal_inner_ref;
9904
9905 case BIT_FIELD_REF:
9906 case ARRAY_RANGE_REF:
9907 normal_inner_ref:
9908 {
9909 enum machine_mode mode1, mode2;
9910 HOST_WIDE_INT bitsize, bitpos;
9911 tree offset;
9912 int volatilep = 0, must_force_mem;
9913 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9914 &mode1, &unsignedp, &volatilep);
9915 rtx orig_op0, memloc;
9916 bool mem_attrs_from_type = false;
9917
9918 /* If we got back the original object, something is wrong. Perhaps
9919 we are evaluating an expression too early. In any event, don't
9920 infinitely recurse. */
9921 gcc_assert (tem != exp);
9922
9923 /* If TEM's type is a union of variable size, pass TARGET to the inner
9924 computation, since it will need a temporary and TARGET is known
9925 to have to do. This occurs in unchecked conversion in Ada. */
9926 orig_op0 = op0
9927 = expand_expr (tem,
9928 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9929 && COMPLETE_TYPE_P (TREE_TYPE (tem))
9930 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9931 != INTEGER_CST)
9932 && modifier != EXPAND_STACK_PARM
9933 ? target : NULL_RTX),
9934 VOIDmode,
9935 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
9936
9937 /* If the bitfield is volatile, we want to access it in the
9938 field's mode, not the computed mode.
9939 If a MEM has VOIDmode (external with incomplete type),
9940 use BLKmode for it instead. */
9941 if (MEM_P (op0))
9942 {
9943 if (volatilep && flag_strict_volatile_bitfields > 0)
9944 op0 = adjust_address (op0, mode1, 0);
9945 else if (GET_MODE (op0) == VOIDmode)
9946 op0 = adjust_address (op0, BLKmode, 0);
9947 }
9948
9949 mode2
9950 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
9951
9952 /* If we have either an offset, a BLKmode result, or a reference
9953 outside the underlying object, we must force it to memory.
9954 Such a case can occur in Ada if we have unchecked conversion
9955 of an expression from a scalar type to an aggregate type or
9956 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
9957 passed a partially uninitialized object or a view-conversion
9958 to a larger size. */
9959 must_force_mem = (offset
9960 || mode1 == BLKmode
9961 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
9962
9963 /* Handle CONCAT first. */
9964 if (GET_CODE (op0) == CONCAT && !must_force_mem)
9965 {
9966 if (bitpos == 0
9967 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
9968 return op0;
9969 if (bitpos == 0
9970 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9971 && bitsize)
9972 {
9973 op0 = XEXP (op0, 0);
9974 mode2 = GET_MODE (op0);
9975 }
9976 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9977 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
9978 && bitpos
9979 && bitsize)
9980 {
9981 op0 = XEXP (op0, 1);
9982 bitpos = 0;
9983 mode2 = GET_MODE (op0);
9984 }
9985 else
9986 /* Otherwise force into memory. */
9987 must_force_mem = 1;
9988 }
9989
9990 /* If this is a constant, put it in a register if it is a legitimate
9991 constant and we don't need a memory reference. */
9992 if (CONSTANT_P (op0)
9993 && mode2 != BLKmode
9994 && targetm.legitimate_constant_p (mode2, op0)
9995 && !must_force_mem)
9996 op0 = force_reg (mode2, op0);
9997
9998 /* Otherwise, if this is a constant, try to force it to the constant
9999 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10000 is a legitimate constant. */
10001 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10002 op0 = validize_mem (memloc);
10003
10004 /* Otherwise, if this is a constant or the object is not in memory
10005 and need be, put it there. */
10006 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10007 {
10008 tree nt = build_qualified_type (TREE_TYPE (tem),
10009 (TYPE_QUALS (TREE_TYPE (tem))
10010 | TYPE_QUAL_CONST));
10011 memloc = assign_temp (nt, 1, 1);
10012 emit_move_insn (memloc, op0);
10013 op0 = memloc;
10014 mem_attrs_from_type = true;
10015 }
10016
10017 if (offset)
10018 {
10019 enum machine_mode address_mode;
10020 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10021 EXPAND_SUM);
10022
10023 gcc_assert (MEM_P (op0));
10024
10025 address_mode = get_address_mode (op0);
10026 if (GET_MODE (offset_rtx) != address_mode)
10027 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10028
10029 if (GET_MODE (op0) == BLKmode
10030 /* A constant address in OP0 can have VOIDmode, we must
10031 not try to call force_reg in that case. */
10032 && GET_MODE (XEXP (op0, 0)) != VOIDmode
10033 && bitsize != 0
10034 && (bitpos % bitsize) == 0
10035 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10036 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
10037 {
10038 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10039 bitpos = 0;
10040 }
10041
10042 op0 = offset_address (op0, offset_rtx,
10043 highest_pow2_factor (offset));
10044 }
10045
10046 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10047 record its alignment as BIGGEST_ALIGNMENT. */
10048 if (MEM_P (op0) && bitpos == 0 && offset != 0
10049 && is_aligning_offset (offset, tem))
10050 set_mem_align (op0, BIGGEST_ALIGNMENT);
10051
10052 /* Don't forget about volatility even if this is a bitfield. */
10053 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10054 {
10055 if (op0 == orig_op0)
10056 op0 = copy_rtx (op0);
10057
10058 MEM_VOLATILE_P (op0) = 1;
10059 }
10060
10061 /* In cases where an aligned union has an unaligned object
10062 as a field, we might be extracting a BLKmode value from
10063 an integer-mode (e.g., SImode) object. Handle this case
10064 by doing the extract into an object as wide as the field
10065 (which we know to be the width of a basic mode), then
10066 storing into memory, and changing the mode to BLKmode. */
10067 if (mode1 == VOIDmode
10068 || REG_P (op0) || GET_CODE (op0) == SUBREG
10069 || (mode1 != BLKmode && ! direct_load[(int) mode1]
10070 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10071 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10072 && modifier != EXPAND_CONST_ADDRESS
10073 && modifier != EXPAND_INITIALIZER
10074 && modifier != EXPAND_MEMORY)
10075 /* If the field is volatile, we always want an aligned
10076 access. Do this in following two situations:
10077 1. the access is not already naturally
10078 aligned, otherwise "normal" (non-bitfield) volatile fields
10079 become non-addressable.
10080 2. the bitsize is narrower than the access size. Need
10081 to extract bitfields from the access. */
10082 || (volatilep && flag_strict_volatile_bitfields > 0
10083 && (bitpos % GET_MODE_ALIGNMENT (mode) != 0
10084 || (mode1 != BLKmode
10085 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)))
10086 /* If the field isn't aligned enough to fetch as a memref,
10087 fetch it as a bit field. */
10088 || (mode1 != BLKmode
10089 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10090 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10091 || (MEM_P (op0)
10092 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10093 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10094 && modifier != EXPAND_MEMORY
10095 && ((modifier == EXPAND_CONST_ADDRESS
10096 || modifier == EXPAND_INITIALIZER)
10097 ? STRICT_ALIGNMENT
10098 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10099 || (bitpos % BITS_PER_UNIT != 0)))
10100 /* If the type and the field are a constant size and the
10101 size of the type isn't the same size as the bitfield,
10102 we must use bitfield operations. */
10103 || (bitsize >= 0
10104 && TYPE_SIZE (TREE_TYPE (exp))
10105 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10106 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10107 bitsize)))
10108 {
10109 enum machine_mode ext_mode = mode;
10110
10111 if (ext_mode == BLKmode
10112 && ! (target != 0 && MEM_P (op0)
10113 && MEM_P (target)
10114 && bitpos % BITS_PER_UNIT == 0))
10115 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10116
10117 if (ext_mode == BLKmode)
10118 {
10119 if (target == 0)
10120 target = assign_temp (type, 1, 1);
10121
10122 if (bitsize == 0)
10123 return target;
10124
10125 /* In this case, BITPOS must start at a byte boundary and
10126 TARGET, if specified, must be a MEM. */
10127 gcc_assert (MEM_P (op0)
10128 && (!target || MEM_P (target))
10129 && !(bitpos % BITS_PER_UNIT));
10130
10131 emit_block_move (target,
10132 adjust_address (op0, VOIDmode,
10133 bitpos / BITS_PER_UNIT),
10134 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10135 / BITS_PER_UNIT),
10136 (modifier == EXPAND_STACK_PARM
10137 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10138
10139 return target;
10140 }
10141
10142 op0 = validize_mem (op0);
10143
10144 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10145 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10146
10147 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10148 (modifier == EXPAND_STACK_PARM
10149 ? NULL_RTX : target),
10150 ext_mode, ext_mode);
10151
10152 /* If the result is a record type and BITSIZE is narrower than
10153 the mode of OP0, an integral mode, and this is a big endian
10154 machine, we must put the field into the high-order bits. */
10155 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10156 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10157 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10158 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10159 GET_MODE_BITSIZE (GET_MODE (op0))
10160 - bitsize, op0, 1);
10161
10162 /* If the result type is BLKmode, store the data into a temporary
10163 of the appropriate type, but with the mode corresponding to the
10164 mode for the data we have (op0's mode). It's tempting to make
10165 this a constant type, since we know it's only being stored once,
10166 but that can cause problems if we are taking the address of this
10167 COMPONENT_REF because the MEM of any reference via that address
10168 will have flags corresponding to the type, which will not
10169 necessarily be constant. */
10170 if (mode == BLKmode)
10171 {
10172 rtx new_rtx;
10173
10174 new_rtx = assign_stack_temp_for_type (ext_mode,
10175 GET_MODE_BITSIZE (ext_mode),
10176 type);
10177 emit_move_insn (new_rtx, op0);
10178 op0 = copy_rtx (new_rtx);
10179 PUT_MODE (op0, BLKmode);
10180 }
10181
10182 return op0;
10183 }
10184
10185 /* If the result is BLKmode, use that to access the object
10186 now as well. */
10187 if (mode == BLKmode)
10188 mode1 = BLKmode;
10189
10190 /* Get a reference to just this component. */
10191 if (modifier == EXPAND_CONST_ADDRESS
10192 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10193 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10194 else
10195 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10196
10197 if (op0 == orig_op0)
10198 op0 = copy_rtx (op0);
10199
10200 /* If op0 is a temporary because of forcing to memory, pass only the
10201 type to set_mem_attributes so that the original expression is never
10202 marked as ADDRESSABLE through MEM_EXPR of the temporary. */
10203 if (mem_attrs_from_type)
10204 set_mem_attributes (op0, type, 0);
10205 else
10206 set_mem_attributes (op0, exp, 0);
10207
10208 if (REG_P (XEXP (op0, 0)))
10209 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10210
10211 MEM_VOLATILE_P (op0) |= volatilep;
10212 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10213 || modifier == EXPAND_CONST_ADDRESS
10214 || modifier == EXPAND_INITIALIZER)
10215 return op0;
10216
10217 if (target == 0)
10218 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10219
10220 convert_move (target, op0, unsignedp);
10221 return target;
10222 }
10223
10224 case OBJ_TYPE_REF:
10225 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10226
10227 case CALL_EXPR:
10228 /* All valid uses of __builtin_va_arg_pack () are removed during
10229 inlining. */
10230 if (CALL_EXPR_VA_ARG_PACK (exp))
10231 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10232 {
10233 tree fndecl = get_callee_fndecl (exp), attr;
10234
10235 if (fndecl
10236 && (attr = lookup_attribute ("error",
10237 DECL_ATTRIBUTES (fndecl))) != NULL)
10238 error ("%Kcall to %qs declared with attribute error: %s",
10239 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10240 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10241 if (fndecl
10242 && (attr = lookup_attribute ("warning",
10243 DECL_ATTRIBUTES (fndecl))) != NULL)
10244 warning_at (tree_nonartificial_location (exp),
10245 0, "%Kcall to %qs declared with attribute warning: %s",
10246 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10247 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10248
10249 /* Check for a built-in function. */
10250 if (fndecl && DECL_BUILT_IN (fndecl))
10251 {
10252 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10253 return expand_builtin (exp, target, subtarget, tmode, ignore);
10254 }
10255 }
10256 return expand_call (exp, target, ignore);
10257
10258 case VIEW_CONVERT_EXPR:
10259 op0 = NULL_RTX;
10260
10261 /* If we are converting to BLKmode, try to avoid an intermediate
10262 temporary by fetching an inner memory reference. */
10263 if (mode == BLKmode
10264 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10265 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10266 && handled_component_p (treeop0))
10267 {
10268 enum machine_mode mode1;
10269 HOST_WIDE_INT bitsize, bitpos;
10270 tree offset;
10271 int unsignedp;
10272 int volatilep = 0;
10273 tree tem
10274 = get_inner_reference (treeop0, &bitsize, &bitpos,
10275 &offset, &mode1, &unsignedp, &volatilep);
10276 rtx orig_op0;
10277
10278 /* ??? We should work harder and deal with non-zero offsets. */
10279 if (!offset
10280 && (bitpos % BITS_PER_UNIT) == 0
10281 && bitsize >= 0
10282 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10283 {
10284 /* See the normal_inner_ref case for the rationale. */
10285 orig_op0
10286 = expand_expr (tem,
10287 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10288 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10289 != INTEGER_CST)
10290 && modifier != EXPAND_STACK_PARM
10291 ? target : NULL_RTX),
10292 VOIDmode,
10293 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
10294
10295 if (MEM_P (orig_op0))
10296 {
10297 op0 = orig_op0;
10298
10299 /* Get a reference to just this component. */
10300 if (modifier == EXPAND_CONST_ADDRESS
10301 || modifier == EXPAND_SUM
10302 || modifier == EXPAND_INITIALIZER)
10303 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10304 else
10305 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10306
10307 if (op0 == orig_op0)
10308 op0 = copy_rtx (op0);
10309
10310 set_mem_attributes (op0, treeop0, 0);
10311 if (REG_P (XEXP (op0, 0)))
10312 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10313
10314 MEM_VOLATILE_P (op0) |= volatilep;
10315 }
10316 }
10317 }
10318
10319 if (!op0)
10320 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
10321
10322 /* If the input and output modes are both the same, we are done. */
10323 if (mode == GET_MODE (op0))
10324 ;
10325 /* If neither mode is BLKmode, and both modes are the same size
10326 then we can use gen_lowpart. */
10327 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10328 && (GET_MODE_PRECISION (mode)
10329 == GET_MODE_PRECISION (GET_MODE (op0)))
10330 && !COMPLEX_MODE_P (GET_MODE (op0)))
10331 {
10332 if (GET_CODE (op0) == SUBREG)
10333 op0 = force_reg (GET_MODE (op0), op0);
10334 temp = gen_lowpart_common (mode, op0);
10335 if (temp)
10336 op0 = temp;
10337 else
10338 {
10339 if (!REG_P (op0) && !MEM_P (op0))
10340 op0 = force_reg (GET_MODE (op0), op0);
10341 op0 = gen_lowpart (mode, op0);
10342 }
10343 }
10344 /* If both types are integral, convert from one mode to the other. */
10345 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10346 op0 = convert_modes (mode, GET_MODE (op0), op0,
10347 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10348 /* As a last resort, spill op0 to memory, and reload it in a
10349 different mode. */
10350 else if (!MEM_P (op0))
10351 {
10352 /* If the operand is not a MEM, force it into memory. Since we
10353 are going to be changing the mode of the MEM, don't call
10354 force_const_mem for constants because we don't allow pool
10355 constants to change mode. */
10356 tree inner_type = TREE_TYPE (treeop0);
10357
10358 gcc_assert (!TREE_ADDRESSABLE (exp));
10359
10360 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10361 target
10362 = assign_stack_temp_for_type
10363 (TYPE_MODE (inner_type),
10364 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10365
10366 emit_move_insn (target, op0);
10367 op0 = target;
10368 }
10369
10370 /* At this point, OP0 is in the correct mode. If the output type is
10371 such that the operand is known to be aligned, indicate that it is.
10372 Otherwise, we need only be concerned about alignment for non-BLKmode
10373 results. */
10374 if (MEM_P (op0))
10375 {
10376 enum insn_code icode;
10377
10378 if (TYPE_ALIGN_OK (type))
10379 {
10380 /* ??? Copying the MEM without substantially changing it might
10381 run afoul of the code handling volatile memory references in
10382 store_expr, which assumes that TARGET is returned unmodified
10383 if it has been used. */
10384 op0 = copy_rtx (op0);
10385 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10386 }
10387 else if (mode != BLKmode
10388 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode)
10389 /* If the target does have special handling for unaligned
10390 loads of mode then use them. */
10391 && ((icode = optab_handler (movmisalign_optab, mode))
10392 != CODE_FOR_nothing))
10393 {
10394 rtx reg, insn;
10395
10396 op0 = adjust_address (op0, mode, 0);
10397 /* We've already validated the memory, and we're creating a
10398 new pseudo destination. The predicates really can't
10399 fail. */
10400 reg = gen_reg_rtx (mode);
10401
10402 /* Nor can the insn generator. */
10403 insn = GEN_FCN (icode) (reg, op0);
10404 emit_insn (insn);
10405 return reg;
10406 }
10407 else if (STRICT_ALIGNMENT
10408 && mode != BLKmode
10409 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10410 {
10411 tree inner_type = TREE_TYPE (treeop0);
10412 HOST_WIDE_INT temp_size
10413 = MAX (int_size_in_bytes (inner_type),
10414 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10415 rtx new_rtx
10416 = assign_stack_temp_for_type (mode, temp_size, type);
10417 rtx new_with_op0_mode
10418 = adjust_address (new_rtx, GET_MODE (op0), 0);
10419
10420 gcc_assert (!TREE_ADDRESSABLE (exp));
10421
10422 if (GET_MODE (op0) == BLKmode)
10423 emit_block_move (new_with_op0_mode, op0,
10424 GEN_INT (GET_MODE_SIZE (mode)),
10425 (modifier == EXPAND_STACK_PARM
10426 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10427 else
10428 emit_move_insn (new_with_op0_mode, op0);
10429
10430 op0 = new_rtx;
10431 }
10432
10433 op0 = adjust_address (op0, mode, 0);
10434 }
10435
10436 return op0;
10437
10438 case MODIFY_EXPR:
10439 {
10440 tree lhs = treeop0;
10441 tree rhs = treeop1;
10442 gcc_assert (ignore);
10443
10444 /* Check for |= or &= of a bitfield of size one into another bitfield
10445 of size 1. In this case, (unless we need the result of the
10446 assignment) we can do this more efficiently with a
10447 test followed by an assignment, if necessary.
10448
10449 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10450 things change so we do, this code should be enhanced to
10451 support it. */
10452 if (TREE_CODE (lhs) == COMPONENT_REF
10453 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10454 || TREE_CODE (rhs) == BIT_AND_EXPR)
10455 && TREE_OPERAND (rhs, 0) == lhs
10456 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10457 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10458 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10459 {
10460 rtx label = gen_label_rtx ();
10461 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10462 do_jump (TREE_OPERAND (rhs, 1),
10463 value ? label : 0,
10464 value ? 0 : label, -1);
10465 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10466 false);
10467 do_pending_stack_adjust ();
10468 emit_label (label);
10469 return const0_rtx;
10470 }
10471
10472 expand_assignment (lhs, rhs, false);
10473 return const0_rtx;
10474 }
10475
10476 case ADDR_EXPR:
10477 return expand_expr_addr_expr (exp, target, tmode, modifier);
10478
10479 case REALPART_EXPR:
10480 op0 = expand_normal (treeop0);
10481 return read_complex_part (op0, false);
10482
10483 case IMAGPART_EXPR:
10484 op0 = expand_normal (treeop0);
10485 return read_complex_part (op0, true);
10486
10487 case RETURN_EXPR:
10488 case LABEL_EXPR:
10489 case GOTO_EXPR:
10490 case SWITCH_EXPR:
10491 case ASM_EXPR:
10492 /* Expanded in cfgexpand.c. */
10493 gcc_unreachable ();
10494
10495 case TRY_CATCH_EXPR:
10496 case CATCH_EXPR:
10497 case EH_FILTER_EXPR:
10498 case TRY_FINALLY_EXPR:
10499 /* Lowered by tree-eh.c. */
10500 gcc_unreachable ();
10501
10502 case WITH_CLEANUP_EXPR:
10503 case CLEANUP_POINT_EXPR:
10504 case TARGET_EXPR:
10505 case CASE_LABEL_EXPR:
10506 case VA_ARG_EXPR:
10507 case BIND_EXPR:
10508 case INIT_EXPR:
10509 case CONJ_EXPR:
10510 case COMPOUND_EXPR:
10511 case PREINCREMENT_EXPR:
10512 case PREDECREMENT_EXPR:
10513 case POSTINCREMENT_EXPR:
10514 case POSTDECREMENT_EXPR:
10515 case LOOP_EXPR:
10516 case EXIT_EXPR:
10517 case COMPOUND_LITERAL_EXPR:
10518 /* Lowered by gimplify.c. */
10519 gcc_unreachable ();
10520
10521 case FDESC_EXPR:
10522 /* Function descriptors are not valid except for as
10523 initialization constants, and should not be expanded. */
10524 gcc_unreachable ();
10525
10526 case WITH_SIZE_EXPR:
10527 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10528 have pulled out the size to use in whatever context it needed. */
10529 return expand_expr_real (treeop0, original_target, tmode,
10530 modifier, alt_rtl);
10531
10532 default:
10533 return expand_expr_real_2 (&ops, target, tmode, modifier);
10534 }
10535 }
10536 \f
10537 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10538 signedness of TYPE), possibly returning the result in TARGET. */
10539 static rtx
10540 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10541 {
10542 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10543 if (target && GET_MODE (target) != GET_MODE (exp))
10544 target = 0;
10545 /* For constant values, reduce using build_int_cst_type. */
10546 if (CONST_INT_P (exp))
10547 {
10548 HOST_WIDE_INT value = INTVAL (exp);
10549 tree t = build_int_cst_type (type, value);
10550 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10551 }
10552 else if (TYPE_UNSIGNED (type))
10553 {
10554 rtx mask = immed_double_int_const (double_int::mask (prec),
10555 GET_MODE (exp));
10556 return expand_and (GET_MODE (exp), exp, mask, target);
10557 }
10558 else
10559 {
10560 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10561 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10562 exp, count, target, 0);
10563 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10564 exp, count, target, 0);
10565 }
10566 }
10567 \f
10568 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10569 when applied to the address of EXP produces an address known to be
10570 aligned more than BIGGEST_ALIGNMENT. */
10571
10572 static int
10573 is_aligning_offset (const_tree offset, const_tree exp)
10574 {
10575 /* Strip off any conversions. */
10576 while (CONVERT_EXPR_P (offset))
10577 offset = TREE_OPERAND (offset, 0);
10578
10579 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10580 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10581 if (TREE_CODE (offset) != BIT_AND_EXPR
10582 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
10583 || compare_tree_int (TREE_OPERAND (offset, 1),
10584 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10585 || !exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
10586 return 0;
10587
10588 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10589 It must be NEGATE_EXPR. Then strip any more conversions. */
10590 offset = TREE_OPERAND (offset, 0);
10591 while (CONVERT_EXPR_P (offset))
10592 offset = TREE_OPERAND (offset, 0);
10593
10594 if (TREE_CODE (offset) != NEGATE_EXPR)
10595 return 0;
10596
10597 offset = TREE_OPERAND (offset, 0);
10598 while (CONVERT_EXPR_P (offset))
10599 offset = TREE_OPERAND (offset, 0);
10600
10601 /* This must now be the address of EXP. */
10602 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10603 }
10604 \f
10605 /* Return the tree node if an ARG corresponds to a string constant or zero
10606 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10607 in bytes within the string that ARG is accessing. The type of the
10608 offset will be `sizetype'. */
10609
10610 tree
10611 string_constant (tree arg, tree *ptr_offset)
10612 {
10613 tree array, offset, lower_bound;
10614 STRIP_NOPS (arg);
10615
10616 if (TREE_CODE (arg) == ADDR_EXPR)
10617 {
10618 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10619 {
10620 *ptr_offset = size_zero_node;
10621 return TREE_OPERAND (arg, 0);
10622 }
10623 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10624 {
10625 array = TREE_OPERAND (arg, 0);
10626 offset = size_zero_node;
10627 }
10628 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10629 {
10630 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10631 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10632 if (TREE_CODE (array) != STRING_CST
10633 && TREE_CODE (array) != VAR_DECL)
10634 return 0;
10635
10636 /* Check if the array has a nonzero lower bound. */
10637 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10638 if (!integer_zerop (lower_bound))
10639 {
10640 /* If the offset and base aren't both constants, return 0. */
10641 if (TREE_CODE (lower_bound) != INTEGER_CST)
10642 return 0;
10643 if (TREE_CODE (offset) != INTEGER_CST)
10644 return 0;
10645 /* Adjust offset by the lower bound. */
10646 offset = size_diffop (fold_convert (sizetype, offset),
10647 fold_convert (sizetype, lower_bound));
10648 }
10649 }
10650 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10651 {
10652 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10653 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10654 if (TREE_CODE (array) != ADDR_EXPR)
10655 return 0;
10656 array = TREE_OPERAND (array, 0);
10657 if (TREE_CODE (array) != STRING_CST
10658 && TREE_CODE (array) != VAR_DECL)
10659 return 0;
10660 }
10661 else
10662 return 0;
10663 }
10664 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10665 {
10666 tree arg0 = TREE_OPERAND (arg, 0);
10667 tree arg1 = TREE_OPERAND (arg, 1);
10668
10669 STRIP_NOPS (arg0);
10670 STRIP_NOPS (arg1);
10671
10672 if (TREE_CODE (arg0) == ADDR_EXPR
10673 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10674 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10675 {
10676 array = TREE_OPERAND (arg0, 0);
10677 offset = arg1;
10678 }
10679 else if (TREE_CODE (arg1) == ADDR_EXPR
10680 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10681 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10682 {
10683 array = TREE_OPERAND (arg1, 0);
10684 offset = arg0;
10685 }
10686 else
10687 return 0;
10688 }
10689 else
10690 return 0;
10691
10692 if (TREE_CODE (array) == STRING_CST)
10693 {
10694 *ptr_offset = fold_convert (sizetype, offset);
10695 return array;
10696 }
10697 else if (TREE_CODE (array) == VAR_DECL
10698 || TREE_CODE (array) == CONST_DECL)
10699 {
10700 int length;
10701 tree init = ctor_for_folding (array);
10702
10703 /* Variables initialized to string literals can be handled too. */
10704 if (init == error_mark_node
10705 || !init
10706 || TREE_CODE (init) != STRING_CST)
10707 return 0;
10708
10709 /* Avoid const char foo[4] = "abcde"; */
10710 if (DECL_SIZE_UNIT (array) == NULL_TREE
10711 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10712 || (length = TREE_STRING_LENGTH (init)) <= 0
10713 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10714 return 0;
10715
10716 /* If variable is bigger than the string literal, OFFSET must be constant
10717 and inside of the bounds of the string literal. */
10718 offset = fold_convert (sizetype, offset);
10719 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10720 && (! tree_fits_uhwi_p (offset)
10721 || compare_tree_int (offset, length) >= 0))
10722 return 0;
10723
10724 *ptr_offset = offset;
10725 return init;
10726 }
10727
10728 return 0;
10729 }
10730 \f
10731 /* Generate code to calculate OPS, and exploded expression
10732 using a store-flag instruction and return an rtx for the result.
10733 OPS reflects a comparison.
10734
10735 If TARGET is nonzero, store the result there if convenient.
10736
10737 Return zero if there is no suitable set-flag instruction
10738 available on this machine.
10739
10740 Once expand_expr has been called on the arguments of the comparison,
10741 we are committed to doing the store flag, since it is not safe to
10742 re-evaluate the expression. We emit the store-flag insn by calling
10743 emit_store_flag, but only expand the arguments if we have a reason
10744 to believe that emit_store_flag will be successful. If we think that
10745 it will, but it isn't, we have to simulate the store-flag with a
10746 set/jump/set sequence. */
10747
10748 static rtx
10749 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
10750 {
10751 enum rtx_code code;
10752 tree arg0, arg1, type;
10753 tree tem;
10754 enum machine_mode operand_mode;
10755 int unsignedp;
10756 rtx op0, op1;
10757 rtx subtarget = target;
10758 location_t loc = ops->location;
10759
10760 arg0 = ops->op0;
10761 arg1 = ops->op1;
10762
10763 /* Don't crash if the comparison was erroneous. */
10764 if (arg0 == error_mark_node || arg1 == error_mark_node)
10765 return const0_rtx;
10766
10767 type = TREE_TYPE (arg0);
10768 operand_mode = TYPE_MODE (type);
10769 unsignedp = TYPE_UNSIGNED (type);
10770
10771 /* We won't bother with BLKmode store-flag operations because it would mean
10772 passing a lot of information to emit_store_flag. */
10773 if (operand_mode == BLKmode)
10774 return 0;
10775
10776 /* We won't bother with store-flag operations involving function pointers
10777 when function pointers must be canonicalized before comparisons. */
10778 #ifdef HAVE_canonicalize_funcptr_for_compare
10779 if (HAVE_canonicalize_funcptr_for_compare
10780 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
10781 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
10782 == FUNCTION_TYPE))
10783 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
10784 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
10785 == FUNCTION_TYPE))))
10786 return 0;
10787 #endif
10788
10789 STRIP_NOPS (arg0);
10790 STRIP_NOPS (arg1);
10791
10792 /* For vector typed comparisons emit code to generate the desired
10793 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10794 expander for this. */
10795 if (TREE_CODE (ops->type) == VECTOR_TYPE)
10796 {
10797 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10798 tree if_true = constant_boolean_node (true, ops->type);
10799 tree if_false = constant_boolean_node (false, ops->type);
10800 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10801 }
10802
10803 /* Get the rtx comparison code to use. We know that EXP is a comparison
10804 operation of some type. Some comparisons against 1 and -1 can be
10805 converted to comparisons with zero. Do so here so that the tests
10806 below will be aware that we have a comparison with zero. These
10807 tests will not catch constants in the first operand, but constants
10808 are rarely passed as the first operand. */
10809
10810 switch (ops->code)
10811 {
10812 case EQ_EXPR:
10813 code = EQ;
10814 break;
10815 case NE_EXPR:
10816 code = NE;
10817 break;
10818 case LT_EXPR:
10819 if (integer_onep (arg1))
10820 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10821 else
10822 code = unsignedp ? LTU : LT;
10823 break;
10824 case LE_EXPR:
10825 if (! unsignedp && integer_all_onesp (arg1))
10826 arg1 = integer_zero_node, code = LT;
10827 else
10828 code = unsignedp ? LEU : LE;
10829 break;
10830 case GT_EXPR:
10831 if (! unsignedp && integer_all_onesp (arg1))
10832 arg1 = integer_zero_node, code = GE;
10833 else
10834 code = unsignedp ? GTU : GT;
10835 break;
10836 case GE_EXPR:
10837 if (integer_onep (arg1))
10838 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10839 else
10840 code = unsignedp ? GEU : GE;
10841 break;
10842
10843 case UNORDERED_EXPR:
10844 code = UNORDERED;
10845 break;
10846 case ORDERED_EXPR:
10847 code = ORDERED;
10848 break;
10849 case UNLT_EXPR:
10850 code = UNLT;
10851 break;
10852 case UNLE_EXPR:
10853 code = UNLE;
10854 break;
10855 case UNGT_EXPR:
10856 code = UNGT;
10857 break;
10858 case UNGE_EXPR:
10859 code = UNGE;
10860 break;
10861 case UNEQ_EXPR:
10862 code = UNEQ;
10863 break;
10864 case LTGT_EXPR:
10865 code = LTGT;
10866 break;
10867
10868 default:
10869 gcc_unreachable ();
10870 }
10871
10872 /* Put a constant second. */
10873 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10874 || TREE_CODE (arg0) == FIXED_CST)
10875 {
10876 tem = arg0; arg0 = arg1; arg1 = tem;
10877 code = swap_condition (code);
10878 }
10879
10880 /* If this is an equality or inequality test of a single bit, we can
10881 do this by shifting the bit being tested to the low-order bit and
10882 masking the result with the constant 1. If the condition was EQ,
10883 we xor it with 1. This does not require an scc insn and is faster
10884 than an scc insn even if we have it.
10885
10886 The code to make this transformation was moved into fold_single_bit_test,
10887 so we just call into the folder and expand its result. */
10888
10889 if ((code == NE || code == EQ)
10890 && integer_zerop (arg1)
10891 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
10892 {
10893 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
10894 if (srcstmt
10895 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
10896 {
10897 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
10898 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
10899 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
10900 gimple_assign_rhs1 (srcstmt),
10901 gimple_assign_rhs2 (srcstmt));
10902 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
10903 if (temp)
10904 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
10905 }
10906 }
10907
10908 if (! get_subtarget (target)
10909 || GET_MODE (subtarget) != operand_mode)
10910 subtarget = 0;
10911
10912 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
10913
10914 if (target == 0)
10915 target = gen_reg_rtx (mode);
10916
10917 /* Try a cstore if possible. */
10918 return emit_store_flag_force (target, code, op0, op1,
10919 operand_mode, unsignedp,
10920 (TYPE_PRECISION (ops->type) == 1
10921 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
10922 }
10923 \f
10924
10925 /* Stubs in case we haven't got a casesi insn. */
10926 #ifndef HAVE_casesi
10927 # define HAVE_casesi 0
10928 # define gen_casesi(a, b, c, d, e) (0)
10929 # define CODE_FOR_casesi CODE_FOR_nothing
10930 #endif
10931
10932 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10933 0 otherwise (i.e. if there is no casesi instruction).
10934
10935 DEFAULT_PROBABILITY is the probability of jumping to the default
10936 label. */
10937 int
10938 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10939 rtx table_label, rtx default_label, rtx fallback_label,
10940 int default_probability)
10941 {
10942 struct expand_operand ops[5];
10943 enum machine_mode index_mode = SImode;
10944 rtx op1, op2, index;
10945
10946 if (! HAVE_casesi)
10947 return 0;
10948
10949 /* Convert the index to SImode. */
10950 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10951 {
10952 enum machine_mode omode = TYPE_MODE (index_type);
10953 rtx rangertx = expand_normal (range);
10954
10955 /* We must handle the endpoints in the original mode. */
10956 index_expr = build2 (MINUS_EXPR, index_type,
10957 index_expr, minval);
10958 minval = integer_zero_node;
10959 index = expand_normal (index_expr);
10960 if (default_label)
10961 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10962 omode, 1, default_label,
10963 default_probability);
10964 /* Now we can safely truncate. */
10965 index = convert_to_mode (index_mode, index, 0);
10966 }
10967 else
10968 {
10969 if (TYPE_MODE (index_type) != index_mode)
10970 {
10971 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
10972 index_expr = fold_convert (index_type, index_expr);
10973 }
10974
10975 index = expand_normal (index_expr);
10976 }
10977
10978 do_pending_stack_adjust ();
10979
10980 op1 = expand_normal (minval);
10981 op2 = expand_normal (range);
10982
10983 create_input_operand (&ops[0], index, index_mode);
10984 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
10985 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
10986 create_fixed_operand (&ops[3], table_label);
10987 create_fixed_operand (&ops[4], (default_label
10988 ? default_label
10989 : fallback_label));
10990 expand_jump_insn (CODE_FOR_casesi, 5, ops);
10991 return 1;
10992 }
10993
10994 /* Attempt to generate a tablejump instruction; same concept. */
10995 #ifndef HAVE_tablejump
10996 #define HAVE_tablejump 0
10997 #define gen_tablejump(x, y) (0)
10998 #endif
10999
11000 /* Subroutine of the next function.
11001
11002 INDEX is the value being switched on, with the lowest value
11003 in the table already subtracted.
11004 MODE is its expected mode (needed if INDEX is constant).
11005 RANGE is the length of the jump table.
11006 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11007
11008 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11009 index value is out of range.
11010 DEFAULT_PROBABILITY is the probability of jumping to
11011 the default label. */
11012
11013 static void
11014 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
11015 rtx default_label, int default_probability)
11016 {
11017 rtx temp, vector;
11018
11019 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11020 cfun->cfg->max_jumptable_ents = INTVAL (range);
11021
11022 /* Do an unsigned comparison (in the proper mode) between the index
11023 expression and the value which represents the length of the range.
11024 Since we just finished subtracting the lower bound of the range
11025 from the index expression, this comparison allows us to simultaneously
11026 check that the original index expression value is both greater than
11027 or equal to the minimum value of the range and less than or equal to
11028 the maximum value of the range. */
11029
11030 if (default_label)
11031 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11032 default_label, default_probability);
11033
11034
11035 /* If index is in range, it must fit in Pmode.
11036 Convert to Pmode so we can index with it. */
11037 if (mode != Pmode)
11038 index = convert_to_mode (Pmode, index, 1);
11039
11040 /* Don't let a MEM slip through, because then INDEX that comes
11041 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11042 and break_out_memory_refs will go to work on it and mess it up. */
11043 #ifdef PIC_CASE_VECTOR_ADDRESS
11044 if (flag_pic && !REG_P (index))
11045 index = copy_to_mode_reg (Pmode, index);
11046 #endif
11047
11048 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11049 GET_MODE_SIZE, because this indicates how large insns are. The other
11050 uses should all be Pmode, because they are addresses. This code
11051 could fail if addresses and insns are not the same size. */
11052 index = gen_rtx_PLUS
11053 (Pmode,
11054 gen_rtx_MULT (Pmode, index,
11055 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE), Pmode)),
11056 gen_rtx_LABEL_REF (Pmode, table_label));
11057 #ifdef PIC_CASE_VECTOR_ADDRESS
11058 if (flag_pic)
11059 index = PIC_CASE_VECTOR_ADDRESS (index);
11060 else
11061 #endif
11062 index = memory_address (CASE_VECTOR_MODE, index);
11063 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11064 vector = gen_const_mem (CASE_VECTOR_MODE, index);
11065 convert_move (temp, vector, 0);
11066
11067 emit_jump_insn (gen_tablejump (temp, table_label));
11068
11069 /* If we are generating PIC code or if the table is PC-relative, the
11070 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11071 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11072 emit_barrier ();
11073 }
11074
11075 int
11076 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11077 rtx table_label, rtx default_label, int default_probability)
11078 {
11079 rtx index;
11080
11081 if (! HAVE_tablejump)
11082 return 0;
11083
11084 index_expr = fold_build2 (MINUS_EXPR, index_type,
11085 fold_convert (index_type, index_expr),
11086 fold_convert (index_type, minval));
11087 index = expand_normal (index_expr);
11088 do_pending_stack_adjust ();
11089
11090 do_tablejump (index, TYPE_MODE (index_type),
11091 convert_modes (TYPE_MODE (index_type),
11092 TYPE_MODE (TREE_TYPE (range)),
11093 expand_normal (range),
11094 TYPE_UNSIGNED (TREE_TYPE (range))),
11095 table_label, default_label, default_probability);
11096 return 1;
11097 }
11098
11099 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11100 static rtx
11101 const_vector_from_tree (tree exp)
11102 {
11103 rtvec v;
11104 unsigned i;
11105 int units;
11106 tree elt;
11107 enum machine_mode inner, mode;
11108
11109 mode = TYPE_MODE (TREE_TYPE (exp));
11110
11111 if (initializer_zerop (exp))
11112 return CONST0_RTX (mode);
11113
11114 units = GET_MODE_NUNITS (mode);
11115 inner = GET_MODE_INNER (mode);
11116
11117 v = rtvec_alloc (units);
11118
11119 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11120 {
11121 elt = VECTOR_CST_ELT (exp, i);
11122
11123 if (TREE_CODE (elt) == REAL_CST)
11124 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11125 inner);
11126 else if (TREE_CODE (elt) == FIXED_CST)
11127 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11128 inner);
11129 else
11130 RTVEC_ELT (v, i) = immed_double_int_const (tree_to_double_int (elt),
11131 inner);
11132 }
11133
11134 return gen_rtx_CONST_VECTOR (mode, v);
11135 }
11136
11137 /* Build a decl for a personality function given a language prefix. */
11138
11139 tree
11140 build_personality_function (const char *lang)
11141 {
11142 const char *unwind_and_version;
11143 tree decl, type;
11144 char *name;
11145
11146 switch (targetm_common.except_unwind_info (&global_options))
11147 {
11148 case UI_NONE:
11149 return NULL;
11150 case UI_SJLJ:
11151 unwind_and_version = "_sj0";
11152 break;
11153 case UI_DWARF2:
11154 case UI_TARGET:
11155 unwind_and_version = "_v0";
11156 break;
11157 case UI_SEH:
11158 unwind_and_version = "_seh0";
11159 break;
11160 default:
11161 gcc_unreachable ();
11162 }
11163
11164 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11165
11166 type = build_function_type_list (integer_type_node, integer_type_node,
11167 long_long_unsigned_type_node,
11168 ptr_type_node, ptr_type_node, NULL_TREE);
11169 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11170 get_identifier (name), type);
11171 DECL_ARTIFICIAL (decl) = 1;
11172 DECL_EXTERNAL (decl) = 1;
11173 TREE_PUBLIC (decl) = 1;
11174
11175 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11176 are the flags assigned by targetm.encode_section_info. */
11177 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11178
11179 return decl;
11180 }
11181
11182 /* Extracts the personality function of DECL and returns the corresponding
11183 libfunc. */
11184
11185 rtx
11186 get_personality_function (tree decl)
11187 {
11188 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11189 enum eh_personality_kind pk;
11190
11191 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11192 if (pk == eh_personality_none)
11193 return NULL;
11194
11195 if (!personality
11196 && pk == eh_personality_any)
11197 personality = lang_hooks.eh_personality ();
11198
11199 if (pk == eh_personality_lang)
11200 gcc_assert (personality != NULL_TREE);
11201
11202 return XEXP (DECL_RTL (personality), 0);
11203 }
11204
11205 #include "gt-expr.h"