i386.c (legitimize_tls_address): Generate tls_initial_exec_64_sun only when !TARGET_X32.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "reload.h"
42 #include "output.h"
43 #include "typeclass.h"
44 #include "toplev.h"
45 #include "langhooks.h"
46 #include "intl.h"
47 #include "tm_p.h"
48 #include "tree-iterator.h"
49 #include "tree-pass.h"
50 #include "tree-flow.h"
51 #include "target.h"
52 #include "common/common-target.h"
53 #include "timevar.h"
54 #include "df.h"
55 #include "diagnostic.h"
56 #include "ssaexpand.h"
57 #include "target-globals.h"
58 #include "params.h"
59
60 /* Decide whether a function's arguments should be processed
61 from first to last or from last to first.
62
63 They should if the stack and args grow in opposite directions, but
64 only if we have push insns. */
65
66 #ifdef PUSH_ROUNDING
67
68 #ifndef PUSH_ARGS_REVERSED
69 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
70 #define PUSH_ARGS_REVERSED /* If it's last to first. */
71 #endif
72 #endif
73
74 #endif
75
76 #ifndef STACK_PUSH_CODE
77 #ifdef STACK_GROWS_DOWNWARD
78 #define STACK_PUSH_CODE PRE_DEC
79 #else
80 #define STACK_PUSH_CODE PRE_INC
81 #endif
82 #endif
83
84
85 /* If this is nonzero, we do not bother generating VOLATILE
86 around volatile memory references, and we are willing to
87 output indirect addresses. If cse is to follow, we reject
88 indirect addresses so a useful potential cse is generated;
89 if it is used only once, instruction combination will produce
90 the same indirect address eventually. */
91 int cse_not_expected;
92
93 /* This structure is used by move_by_pieces to describe the move to
94 be performed. */
95 struct move_by_pieces_d
96 {
97 rtx to;
98 rtx to_addr;
99 int autinc_to;
100 int explicit_inc_to;
101 rtx from;
102 rtx from_addr;
103 int autinc_from;
104 int explicit_inc_from;
105 unsigned HOST_WIDE_INT len;
106 HOST_WIDE_INT offset;
107 int reverse;
108 };
109
110 /* This structure is used by store_by_pieces to describe the clear to
111 be performed. */
112
113 struct store_by_pieces_d
114 {
115 rtx to;
116 rtx to_addr;
117 int autinc_to;
118 int explicit_inc_to;
119 unsigned HOST_WIDE_INT len;
120 HOST_WIDE_INT offset;
121 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
122 void *constfundata;
123 int reverse;
124 };
125
126 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces_d *);
128 static bool block_move_libcall_safe_for_call_parm (void);
129 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces_d *);
137 static tree clear_storage_libcall_fn (int);
138 static rtx compress_float_constant (rtx, rtx);
139 static rtx get_subtarget (rtx);
140 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
142 tree, tree, int, alias_set_type);
143 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
144 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
145 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
146 enum machine_mode,
147 tree, tree, alias_set_type, bool);
148
149 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
150
151 static int is_aligning_offset (const_tree, const_tree);
152 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
153 enum expand_modifier);
154 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
155 static rtx do_store_flag (sepops, rtx, enum machine_mode);
156 #ifdef PUSH_ROUNDING
157 static void emit_single_push_insn (enum machine_mode, rtx, tree);
158 #endif
159 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
160 static rtx const_vector_from_tree (tree);
161 static void write_complex_part (rtx, rtx, bool);
162
163 /* This macro is used to determine whether move_by_pieces should be called
164 to perform a structure copy. */
165 #ifndef MOVE_BY_PIECES_P
166 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
167 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
168 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
169 #endif
170
171 /* This macro is used to determine whether clear_by_pieces should be
172 called to clear storage. */
173 #ifndef CLEAR_BY_PIECES_P
174 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
175 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
176 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
177 #endif
178
179 /* This macro is used to determine whether store_by_pieces should be
180 called to "memset" storage with byte values other than zero. */
181 #ifndef SET_BY_PIECES_P
182 #define SET_BY_PIECES_P(SIZE, ALIGN) \
183 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
184 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
185 #endif
186
187 /* This macro is used to determine whether store_by_pieces should be
188 called to "memcpy" storage when the source is a constant string. */
189 #ifndef STORE_BY_PIECES_P
190 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
191 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
192 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
193 #endif
194
195 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
196
197 #ifndef SLOW_UNALIGNED_ACCESS
198 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
199 #endif
200 \f
201 /* This is run to set up which modes can be used
202 directly in memory and to initialize the block move optab. It is run
203 at the beginning of compilation and when the target is reinitialized. */
204
205 void
206 init_expr_target (void)
207 {
208 rtx insn, pat;
209 enum machine_mode mode;
210 int num_clobbers;
211 rtx mem, mem1;
212 rtx reg;
213
214 /* Try indexing by frame ptr and try by stack ptr.
215 It is known that on the Convex the stack ptr isn't a valid index.
216 With luck, one or the other is valid on any machine. */
217 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
218 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
219
220 /* A scratch register we can modify in-place below to avoid
221 useless RTL allocations. */
222 reg = gen_rtx_REG (VOIDmode, -1);
223
224 insn = rtx_alloc (INSN);
225 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
226 PATTERN (insn) = pat;
227
228 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
229 mode = (enum machine_mode) ((int) mode + 1))
230 {
231 int regno;
232
233 direct_load[(int) mode] = direct_store[(int) mode] = 0;
234 PUT_MODE (mem, mode);
235 PUT_MODE (mem1, mode);
236 PUT_MODE (reg, mode);
237
238 /* See if there is some register that can be used in this mode and
239 directly loaded or stored from memory. */
240
241 if (mode != VOIDmode && mode != BLKmode)
242 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
243 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
244 regno++)
245 {
246 if (! HARD_REGNO_MODE_OK (regno, mode))
247 continue;
248
249 SET_REGNO (reg, regno);
250
251 SET_SRC (pat) = mem;
252 SET_DEST (pat) = reg;
253 if (recog (pat, insn, &num_clobbers) >= 0)
254 direct_load[(int) mode] = 1;
255
256 SET_SRC (pat) = mem1;
257 SET_DEST (pat) = reg;
258 if (recog (pat, insn, &num_clobbers) >= 0)
259 direct_load[(int) mode] = 1;
260
261 SET_SRC (pat) = reg;
262 SET_DEST (pat) = mem;
263 if (recog (pat, insn, &num_clobbers) >= 0)
264 direct_store[(int) mode] = 1;
265
266 SET_SRC (pat) = reg;
267 SET_DEST (pat) = mem1;
268 if (recog (pat, insn, &num_clobbers) >= 0)
269 direct_store[(int) mode] = 1;
270 }
271 }
272
273 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
274
275 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
276 mode = GET_MODE_WIDER_MODE (mode))
277 {
278 enum machine_mode srcmode;
279 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
280 srcmode = GET_MODE_WIDER_MODE (srcmode))
281 {
282 enum insn_code ic;
283
284 ic = can_extend_p (mode, srcmode, 0);
285 if (ic == CODE_FOR_nothing)
286 continue;
287
288 PUT_MODE (mem, srcmode);
289
290 if (insn_operand_matches (ic, 1, mem))
291 float_extend_from_mem[mode][srcmode] = true;
292 }
293 }
294 }
295
296 /* This is run at the start of compiling a function. */
297
298 void
299 init_expr (void)
300 {
301 memset (&crtl->expr, 0, sizeof (crtl->expr));
302 }
303 \f
304 /* Copy data from FROM to TO, where the machine modes are not the same.
305 Both modes may be integer, or both may be floating, or both may be
306 fixed-point.
307 UNSIGNEDP should be nonzero if FROM is an unsigned type.
308 This causes zero-extension instead of sign-extension. */
309
310 void
311 convert_move (rtx to, rtx from, int unsignedp)
312 {
313 enum machine_mode to_mode = GET_MODE (to);
314 enum machine_mode from_mode = GET_MODE (from);
315 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
316 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
317 enum insn_code code;
318 rtx libcall;
319
320 /* rtx code for making an equivalent value. */
321 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
322 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
323
324
325 gcc_assert (to_real == from_real);
326 gcc_assert (to_mode != BLKmode);
327 gcc_assert (from_mode != BLKmode);
328
329 /* If the source and destination are already the same, then there's
330 nothing to do. */
331 if (to == from)
332 return;
333
334 /* If FROM is a SUBREG that indicates that we have already done at least
335 the required extension, strip it. We don't handle such SUBREGs as
336 TO here. */
337
338 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
339 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
340 >= GET_MODE_PRECISION (to_mode))
341 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
342 from = gen_lowpart (to_mode, from), from_mode = to_mode;
343
344 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
345
346 if (to_mode == from_mode
347 || (from_mode == VOIDmode && CONSTANT_P (from)))
348 {
349 emit_move_insn (to, from);
350 return;
351 }
352
353 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
354 {
355 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
356
357 if (VECTOR_MODE_P (to_mode))
358 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
359 else
360 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
361
362 emit_move_insn (to, from);
363 return;
364 }
365
366 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
367 {
368 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
369 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
370 return;
371 }
372
373 if (to_real)
374 {
375 rtx value, insns;
376 convert_optab tab;
377
378 gcc_assert ((GET_MODE_PRECISION (from_mode)
379 != GET_MODE_PRECISION (to_mode))
380 || (DECIMAL_FLOAT_MODE_P (from_mode)
381 != DECIMAL_FLOAT_MODE_P (to_mode)));
382
383 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
384 /* Conversion between decimal float and binary float, same size. */
385 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
386 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
387 tab = sext_optab;
388 else
389 tab = trunc_optab;
390
391 /* Try converting directly if the insn is supported. */
392
393 code = convert_optab_handler (tab, to_mode, from_mode);
394 if (code != CODE_FOR_nothing)
395 {
396 emit_unop_insn (code, to, from,
397 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
398 return;
399 }
400
401 /* Otherwise use a libcall. */
402 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
403
404 /* Is this conversion implemented yet? */
405 gcc_assert (libcall);
406
407 start_sequence ();
408 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
409 1, from, from_mode);
410 insns = get_insns ();
411 end_sequence ();
412 emit_libcall_block (insns, to, value,
413 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
414 from)
415 : gen_rtx_FLOAT_EXTEND (to_mode, from));
416 return;
417 }
418
419 /* Handle pointer conversion. */ /* SPEE 900220. */
420 /* Targets are expected to provide conversion insns between PxImode and
421 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
422 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
423 {
424 enum machine_mode full_mode
425 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
426
427 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
428 != CODE_FOR_nothing);
429
430 if (full_mode != from_mode)
431 from = convert_to_mode (full_mode, from, unsignedp);
432 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
433 to, from, UNKNOWN);
434 return;
435 }
436 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
437 {
438 rtx new_from;
439 enum machine_mode full_mode
440 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
441 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
442 enum insn_code icode;
443
444 icode = convert_optab_handler (ctab, full_mode, from_mode);
445 gcc_assert (icode != CODE_FOR_nothing);
446
447 if (to_mode == full_mode)
448 {
449 emit_unop_insn (icode, to, from, UNKNOWN);
450 return;
451 }
452
453 new_from = gen_reg_rtx (full_mode);
454 emit_unop_insn (icode, new_from, from, UNKNOWN);
455
456 /* else proceed to integer conversions below. */
457 from_mode = full_mode;
458 from = new_from;
459 }
460
461 /* Make sure both are fixed-point modes or both are not. */
462 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
463 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
464 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
465 {
466 /* If we widen from_mode to to_mode and they are in the same class,
467 we won't saturate the result.
468 Otherwise, always saturate the result to play safe. */
469 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
470 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
471 expand_fixed_convert (to, from, 0, 0);
472 else
473 expand_fixed_convert (to, from, 0, 1);
474 return;
475 }
476
477 /* Now both modes are integers. */
478
479 /* Handle expanding beyond a word. */
480 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
481 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
482 {
483 rtx insns;
484 rtx lowpart;
485 rtx fill_value;
486 rtx lowfrom;
487 int i;
488 enum machine_mode lowpart_mode;
489 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
490
491 /* Try converting directly if the insn is supported. */
492 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
493 != CODE_FOR_nothing)
494 {
495 /* If FROM is a SUBREG, put it into a register. Do this
496 so that we always generate the same set of insns for
497 better cse'ing; if an intermediate assignment occurred,
498 we won't be doing the operation directly on the SUBREG. */
499 if (optimize > 0 && GET_CODE (from) == SUBREG)
500 from = force_reg (from_mode, from);
501 emit_unop_insn (code, to, from, equiv_code);
502 return;
503 }
504 /* Next, try converting via full word. */
505 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
506 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
507 != CODE_FOR_nothing))
508 {
509 rtx word_to = gen_reg_rtx (word_mode);
510 if (REG_P (to))
511 {
512 if (reg_overlap_mentioned_p (to, from))
513 from = force_reg (from_mode, from);
514 emit_clobber (to);
515 }
516 convert_move (word_to, from, unsignedp);
517 emit_unop_insn (code, to, word_to, equiv_code);
518 return;
519 }
520
521 /* No special multiword conversion insn; do it by hand. */
522 start_sequence ();
523
524 /* Since we will turn this into a no conflict block, we must ensure
525 that the source does not overlap the target. */
526
527 if (reg_overlap_mentioned_p (to, from))
528 from = force_reg (from_mode, from);
529
530 /* Get a copy of FROM widened to a word, if necessary. */
531 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
532 lowpart_mode = word_mode;
533 else
534 lowpart_mode = from_mode;
535
536 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
537
538 lowpart = gen_lowpart (lowpart_mode, to);
539 emit_move_insn (lowpart, lowfrom);
540
541 /* Compute the value to put in each remaining word. */
542 if (unsignedp)
543 fill_value = const0_rtx;
544 else
545 fill_value = emit_store_flag (gen_reg_rtx (word_mode),
546 LT, lowfrom, const0_rtx,
547 VOIDmode, 0, -1);
548
549 /* Fill the remaining words. */
550 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
551 {
552 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
553 rtx subword = operand_subword (to, index, 1, to_mode);
554
555 gcc_assert (subword);
556
557 if (fill_value != subword)
558 emit_move_insn (subword, fill_value);
559 }
560
561 insns = get_insns ();
562 end_sequence ();
563
564 emit_insn (insns);
565 return;
566 }
567
568 /* Truncating multi-word to a word or less. */
569 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
570 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
571 {
572 if (!((MEM_P (from)
573 && ! MEM_VOLATILE_P (from)
574 && direct_load[(int) to_mode]
575 && ! mode_dependent_address_p (XEXP (from, 0)))
576 || REG_P (from)
577 || GET_CODE (from) == SUBREG))
578 from = force_reg (from_mode, from);
579 convert_move (to, gen_lowpart (word_mode, from), 0);
580 return;
581 }
582
583 /* Now follow all the conversions between integers
584 no more than a word long. */
585
586 /* For truncation, usually we can just refer to FROM in a narrower mode. */
587 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
588 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
589 {
590 if (!((MEM_P (from)
591 && ! MEM_VOLATILE_P (from)
592 && direct_load[(int) to_mode]
593 && ! mode_dependent_address_p (XEXP (from, 0)))
594 || REG_P (from)
595 || GET_CODE (from) == SUBREG))
596 from = force_reg (from_mode, from);
597 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
598 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
599 from = copy_to_reg (from);
600 emit_move_insn (to, gen_lowpart (to_mode, from));
601 return;
602 }
603
604 /* Handle extension. */
605 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
606 {
607 /* Convert directly if that works. */
608 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
609 != CODE_FOR_nothing)
610 {
611 emit_unop_insn (code, to, from, equiv_code);
612 return;
613 }
614 else
615 {
616 enum machine_mode intermediate;
617 rtx tmp;
618 int shift_amount;
619
620 /* Search for a mode to convert via. */
621 for (intermediate = from_mode; intermediate != VOIDmode;
622 intermediate = GET_MODE_WIDER_MODE (intermediate))
623 if (((can_extend_p (to_mode, intermediate, unsignedp)
624 != CODE_FOR_nothing)
625 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
626 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
627 && (can_extend_p (intermediate, from_mode, unsignedp)
628 != CODE_FOR_nothing))
629 {
630 convert_move (to, convert_to_mode (intermediate, from,
631 unsignedp), unsignedp);
632 return;
633 }
634
635 /* No suitable intermediate mode.
636 Generate what we need with shifts. */
637 shift_amount = (GET_MODE_PRECISION (to_mode)
638 - GET_MODE_PRECISION (from_mode));
639 from = gen_lowpart (to_mode, force_reg (from_mode, from));
640 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
641 to, unsignedp);
642 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
643 to, unsignedp);
644 if (tmp != to)
645 emit_move_insn (to, tmp);
646 return;
647 }
648 }
649
650 /* Support special truncate insns for certain modes. */
651 if (convert_optab_handler (trunc_optab, to_mode,
652 from_mode) != CODE_FOR_nothing)
653 {
654 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
655 to, from, UNKNOWN);
656 return;
657 }
658
659 /* Handle truncation of volatile memrefs, and so on;
660 the things that couldn't be truncated directly,
661 and for which there was no special instruction.
662
663 ??? Code above formerly short-circuited this, for most integer
664 mode pairs, with a force_reg in from_mode followed by a recursive
665 call to this routine. Appears always to have been wrong. */
666 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
667 {
668 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
669 emit_move_insn (to, temp);
670 return;
671 }
672
673 /* Mode combination is not recognized. */
674 gcc_unreachable ();
675 }
676
677 /* Return an rtx for a value that would result
678 from converting X to mode MODE.
679 Both X and MODE may be floating, or both integer.
680 UNSIGNEDP is nonzero if X is an unsigned value.
681 This can be done by referring to a part of X in place
682 or by copying to a new temporary with conversion. */
683
684 rtx
685 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
686 {
687 return convert_modes (mode, VOIDmode, x, unsignedp);
688 }
689
690 /* Return an rtx for a value that would result
691 from converting X from mode OLDMODE to mode MODE.
692 Both modes may be floating, or both integer.
693 UNSIGNEDP is nonzero if X is an unsigned value.
694
695 This can be done by referring to a part of X in place
696 or by copying to a new temporary with conversion.
697
698 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
699
700 rtx
701 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
702 {
703 rtx temp;
704
705 /* If FROM is a SUBREG that indicates that we have already done at least
706 the required extension, strip it. */
707
708 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
709 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
710 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
711 x = gen_lowpart (mode, x);
712
713 if (GET_MODE (x) != VOIDmode)
714 oldmode = GET_MODE (x);
715
716 if (mode == oldmode)
717 return x;
718
719 /* There is one case that we must handle specially: If we are converting
720 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
721 we are to interpret the constant as unsigned, gen_lowpart will do
722 the wrong if the constant appears negative. What we want to do is
723 make the high-order word of the constant zero, not all ones. */
724
725 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
726 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
727 && CONST_INT_P (x) && INTVAL (x) < 0)
728 {
729 double_int val = uhwi_to_double_int (INTVAL (x));
730
731 /* We need to zero extend VAL. */
732 if (oldmode != VOIDmode)
733 val = double_int_zext (val, GET_MODE_BITSIZE (oldmode));
734
735 return immed_double_int_const (val, mode);
736 }
737
738 /* We can do this with a gen_lowpart if both desired and current modes
739 are integer, and this is either a constant integer, a register, or a
740 non-volatile MEM. Except for the constant case where MODE is no
741 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
742
743 if ((CONST_INT_P (x)
744 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT)
745 || (GET_MODE_CLASS (mode) == MODE_INT
746 && GET_MODE_CLASS (oldmode) == MODE_INT
747 && (GET_CODE (x) == CONST_DOUBLE
748 || (GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
749 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
750 && direct_load[(int) mode])
751 || (REG_P (x)
752 && (! HARD_REGISTER_P (x)
753 || HARD_REGNO_MODE_OK (REGNO (x), mode))
754 && TRULY_NOOP_TRUNCATION_MODES_P (mode,
755 GET_MODE (x))))))))
756 {
757 /* ?? If we don't know OLDMODE, we have to assume here that
758 X does not need sign- or zero-extension. This may not be
759 the case, but it's the best we can do. */
760 if (CONST_INT_P (x) && oldmode != VOIDmode
761 && GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (oldmode))
762 {
763 HOST_WIDE_INT val = INTVAL (x);
764
765 /* We must sign or zero-extend in this case. Start by
766 zero-extending, then sign extend if we need to. */
767 val &= GET_MODE_MASK (oldmode);
768 if (! unsignedp
769 && val_signbit_known_set_p (oldmode, val))
770 val |= ~GET_MODE_MASK (oldmode);
771
772 return gen_int_mode (val, mode);
773 }
774
775 return gen_lowpart (mode, x);
776 }
777
778 /* Converting from integer constant into mode is always equivalent to an
779 subreg operation. */
780 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
781 {
782 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
783 return simplify_gen_subreg (mode, x, oldmode, 0);
784 }
785
786 temp = gen_reg_rtx (mode);
787 convert_move (temp, x, unsignedp);
788 return temp;
789 }
790 \f
791 /* Return the largest alignment we can use for doing a move (or store)
792 of MAX_PIECES. ALIGN is the largest alignment we could use. */
793
794 static unsigned int
795 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
796 {
797 enum machine_mode tmode;
798
799 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
800 if (align >= GET_MODE_ALIGNMENT (tmode))
801 align = GET_MODE_ALIGNMENT (tmode);
802 else
803 {
804 enum machine_mode tmode, xmode;
805
806 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
807 tmode != VOIDmode;
808 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
809 if (GET_MODE_SIZE (tmode) > max_pieces
810 || SLOW_UNALIGNED_ACCESS (tmode, align))
811 break;
812
813 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
814 }
815
816 return align;
817 }
818
819 /* Return the widest integer mode no wider than SIZE. If no such mode
820 can be found, return VOIDmode. */
821
822 static enum machine_mode
823 widest_int_mode_for_size (unsigned int size)
824 {
825 enum machine_mode tmode, mode = VOIDmode;
826
827 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
828 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
829 if (GET_MODE_SIZE (tmode) < size)
830 mode = tmode;
831
832 return mode;
833 }
834
835 /* STORE_MAX_PIECES is the number of bytes at a time that we can
836 store efficiently. Due to internal GCC limitations, this is
837 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
838 for an immediate constant. */
839
840 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
841
842 /* Determine whether the LEN bytes can be moved by using several move
843 instructions. Return nonzero if a call to move_by_pieces should
844 succeed. */
845
846 int
847 can_move_by_pieces (unsigned HOST_WIDE_INT len,
848 unsigned int align ATTRIBUTE_UNUSED)
849 {
850 return MOVE_BY_PIECES_P (len, align);
851 }
852
853 /* Generate several move instructions to copy LEN bytes from block FROM to
854 block TO. (These are MEM rtx's with BLKmode).
855
856 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
857 used to push FROM to the stack.
858
859 ALIGN is maximum stack alignment we can assume.
860
861 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
862 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
863 stpcpy. */
864
865 rtx
866 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
867 unsigned int align, int endp)
868 {
869 struct move_by_pieces_d data;
870 enum machine_mode to_addr_mode;
871 enum machine_mode from_addr_mode = get_address_mode (from);
872 rtx to_addr, from_addr = XEXP (from, 0);
873 unsigned int max_size = MOVE_MAX_PIECES + 1;
874 enum insn_code icode;
875
876 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
877
878 data.offset = 0;
879 data.from_addr = from_addr;
880 if (to)
881 {
882 to_addr_mode = get_address_mode (to);
883 to_addr = XEXP (to, 0);
884 data.to = to;
885 data.autinc_to
886 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
887 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
888 data.reverse
889 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
890 }
891 else
892 {
893 to_addr_mode = VOIDmode;
894 to_addr = NULL_RTX;
895 data.to = NULL_RTX;
896 data.autinc_to = 1;
897 #ifdef STACK_GROWS_DOWNWARD
898 data.reverse = 1;
899 #else
900 data.reverse = 0;
901 #endif
902 }
903 data.to_addr = to_addr;
904 data.from = from;
905 data.autinc_from
906 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
907 || GET_CODE (from_addr) == POST_INC
908 || GET_CODE (from_addr) == POST_DEC);
909
910 data.explicit_inc_from = 0;
911 data.explicit_inc_to = 0;
912 if (data.reverse) data.offset = len;
913 data.len = len;
914
915 /* If copying requires more than two move insns,
916 copy addresses to registers (to make displacements shorter)
917 and use post-increment if available. */
918 if (!(data.autinc_from && data.autinc_to)
919 && move_by_pieces_ninsns (len, align, max_size) > 2)
920 {
921 /* Find the mode of the largest move...
922 MODE might not be used depending on the definitions of the
923 USE_* macros below. */
924 enum machine_mode mode ATTRIBUTE_UNUSED
925 = widest_int_mode_for_size (max_size);
926
927 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
928 {
929 data.from_addr = copy_to_mode_reg (from_addr_mode,
930 plus_constant (from_addr_mode,
931 from_addr, len));
932 data.autinc_from = 1;
933 data.explicit_inc_from = -1;
934 }
935 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
936 {
937 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
938 data.autinc_from = 1;
939 data.explicit_inc_from = 1;
940 }
941 if (!data.autinc_from && CONSTANT_P (from_addr))
942 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
943 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
944 {
945 data.to_addr = copy_to_mode_reg (to_addr_mode,
946 plus_constant (to_addr_mode,
947 to_addr, len));
948 data.autinc_to = 1;
949 data.explicit_inc_to = -1;
950 }
951 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
952 {
953 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
954 data.autinc_to = 1;
955 data.explicit_inc_to = 1;
956 }
957 if (!data.autinc_to && CONSTANT_P (to_addr))
958 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
959 }
960
961 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
962
963 /* First move what we can in the largest integer mode, then go to
964 successively smaller modes. */
965
966 while (max_size > 1)
967 {
968 enum machine_mode mode = widest_int_mode_for_size (max_size);
969
970 if (mode == VOIDmode)
971 break;
972
973 icode = optab_handler (mov_optab, mode);
974 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
975 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
976
977 max_size = GET_MODE_SIZE (mode);
978 }
979
980 /* The code above should have handled everything. */
981 gcc_assert (!data.len);
982
983 if (endp)
984 {
985 rtx to1;
986
987 gcc_assert (!data.reverse);
988 if (data.autinc_to)
989 {
990 if (endp == 2)
991 {
992 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
993 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
994 else
995 data.to_addr = copy_to_mode_reg (to_addr_mode,
996 plus_constant (to_addr_mode,
997 data.to_addr,
998 -1));
999 }
1000 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1001 data.offset);
1002 }
1003 else
1004 {
1005 if (endp == 2)
1006 --data.offset;
1007 to1 = adjust_address (data.to, QImode, data.offset);
1008 }
1009 return to1;
1010 }
1011 else
1012 return data.to;
1013 }
1014
1015 /* Return number of insns required to move L bytes by pieces.
1016 ALIGN (in bits) is maximum alignment we can assume. */
1017
1018 unsigned HOST_WIDE_INT
1019 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1020 unsigned int max_size)
1021 {
1022 unsigned HOST_WIDE_INT n_insns = 0;
1023
1024 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1025
1026 while (max_size > 1)
1027 {
1028 enum machine_mode mode;
1029 enum insn_code icode;
1030
1031 mode = widest_int_mode_for_size (max_size);
1032
1033 if (mode == VOIDmode)
1034 break;
1035
1036 icode = optab_handler (mov_optab, mode);
1037 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1038 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1039
1040 max_size = GET_MODE_SIZE (mode);
1041 }
1042
1043 gcc_assert (!l);
1044 return n_insns;
1045 }
1046
1047 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1048 with move instructions for mode MODE. GENFUN is the gen_... function
1049 to make a move insn for that mode. DATA has all the other info. */
1050
1051 static void
1052 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1053 struct move_by_pieces_d *data)
1054 {
1055 unsigned int size = GET_MODE_SIZE (mode);
1056 rtx to1 = NULL_RTX, from1;
1057
1058 while (data->len >= size)
1059 {
1060 if (data->reverse)
1061 data->offset -= size;
1062
1063 if (data->to)
1064 {
1065 if (data->autinc_to)
1066 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1067 data->offset);
1068 else
1069 to1 = adjust_address (data->to, mode, data->offset);
1070 }
1071
1072 if (data->autinc_from)
1073 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1074 data->offset);
1075 else
1076 from1 = adjust_address (data->from, mode, data->offset);
1077
1078 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1079 emit_insn (gen_add2_insn (data->to_addr,
1080 GEN_INT (-(HOST_WIDE_INT)size)));
1081 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1082 emit_insn (gen_add2_insn (data->from_addr,
1083 GEN_INT (-(HOST_WIDE_INT)size)));
1084
1085 if (data->to)
1086 emit_insn ((*genfun) (to1, from1));
1087 else
1088 {
1089 #ifdef PUSH_ROUNDING
1090 emit_single_push_insn (mode, from1, NULL);
1091 #else
1092 gcc_unreachable ();
1093 #endif
1094 }
1095
1096 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1097 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1098 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1099 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1100
1101 if (! data->reverse)
1102 data->offset += size;
1103
1104 data->len -= size;
1105 }
1106 }
1107 \f
1108 /* Emit code to move a block Y to a block X. This may be done with
1109 string-move instructions, with multiple scalar move instructions,
1110 or with a library call.
1111
1112 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1113 SIZE is an rtx that says how long they are.
1114 ALIGN is the maximum alignment we can assume they have.
1115 METHOD describes what kind of copy this is, and what mechanisms may be used.
1116
1117 Return the address of the new block, if memcpy is called and returns it,
1118 0 otherwise. */
1119
1120 rtx
1121 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1122 unsigned int expected_align, HOST_WIDE_INT expected_size)
1123 {
1124 bool may_use_call;
1125 rtx retval = 0;
1126 unsigned int align;
1127
1128 gcc_assert (size);
1129 if (CONST_INT_P (size)
1130 && INTVAL (size) == 0)
1131 return 0;
1132
1133 switch (method)
1134 {
1135 case BLOCK_OP_NORMAL:
1136 case BLOCK_OP_TAILCALL:
1137 may_use_call = true;
1138 break;
1139
1140 case BLOCK_OP_CALL_PARM:
1141 may_use_call = block_move_libcall_safe_for_call_parm ();
1142
1143 /* Make inhibit_defer_pop nonzero around the library call
1144 to force it to pop the arguments right away. */
1145 NO_DEFER_POP;
1146 break;
1147
1148 case BLOCK_OP_NO_LIBCALL:
1149 may_use_call = false;
1150 break;
1151
1152 default:
1153 gcc_unreachable ();
1154 }
1155
1156 gcc_assert (MEM_P (x) && MEM_P (y));
1157 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1158 gcc_assert (align >= BITS_PER_UNIT);
1159
1160 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1161 block copy is more efficient for other large modes, e.g. DCmode. */
1162 x = adjust_address (x, BLKmode, 0);
1163 y = adjust_address (y, BLKmode, 0);
1164
1165 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1166 can be incorrect is coming from __builtin_memcpy. */
1167 if (CONST_INT_P (size))
1168 {
1169 x = shallow_copy_rtx (x);
1170 y = shallow_copy_rtx (y);
1171 set_mem_size (x, INTVAL (size));
1172 set_mem_size (y, INTVAL (size));
1173 }
1174
1175 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1176 move_by_pieces (x, y, INTVAL (size), align, 0);
1177 else if (emit_block_move_via_movmem (x, y, size, align,
1178 expected_align, expected_size))
1179 ;
1180 else if (may_use_call
1181 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1182 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1183 {
1184 /* Since x and y are passed to a libcall, mark the corresponding
1185 tree EXPR as addressable. */
1186 tree y_expr = MEM_EXPR (y);
1187 tree x_expr = MEM_EXPR (x);
1188 if (y_expr)
1189 mark_addressable (y_expr);
1190 if (x_expr)
1191 mark_addressable (x_expr);
1192 retval = emit_block_move_via_libcall (x, y, size,
1193 method == BLOCK_OP_TAILCALL);
1194 }
1195
1196 else
1197 emit_block_move_via_loop (x, y, size, align);
1198
1199 if (method == BLOCK_OP_CALL_PARM)
1200 OK_DEFER_POP;
1201
1202 return retval;
1203 }
1204
1205 rtx
1206 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1207 {
1208 return emit_block_move_hints (x, y, size, method, 0, -1);
1209 }
1210
1211 /* A subroutine of emit_block_move. Returns true if calling the
1212 block move libcall will not clobber any parameters which may have
1213 already been placed on the stack. */
1214
1215 static bool
1216 block_move_libcall_safe_for_call_parm (void)
1217 {
1218 #if defined (REG_PARM_STACK_SPACE)
1219 tree fn;
1220 #endif
1221
1222 /* If arguments are pushed on the stack, then they're safe. */
1223 if (PUSH_ARGS)
1224 return true;
1225
1226 /* If registers go on the stack anyway, any argument is sure to clobber
1227 an outgoing argument. */
1228 #if defined (REG_PARM_STACK_SPACE)
1229 fn = emit_block_move_libcall_fn (false);
1230 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1231 depend on its argument. */
1232 (void) fn;
1233 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1234 && REG_PARM_STACK_SPACE (fn) != 0)
1235 return false;
1236 #endif
1237
1238 /* If any argument goes in memory, then it might clobber an outgoing
1239 argument. */
1240 {
1241 CUMULATIVE_ARGS args_so_far_v;
1242 cumulative_args_t args_so_far;
1243 tree fn, arg;
1244
1245 fn = emit_block_move_libcall_fn (false);
1246 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1247 args_so_far = pack_cumulative_args (&args_so_far_v);
1248
1249 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1250 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1251 {
1252 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1253 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1254 NULL_TREE, true);
1255 if (!tmp || !REG_P (tmp))
1256 return false;
1257 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1258 return false;
1259 targetm.calls.function_arg_advance (args_so_far, mode,
1260 NULL_TREE, true);
1261 }
1262 }
1263 return true;
1264 }
1265
1266 /* A subroutine of emit_block_move. Expand a movmem pattern;
1267 return true if successful. */
1268
1269 static bool
1270 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1271 unsigned int expected_align, HOST_WIDE_INT expected_size)
1272 {
1273 int save_volatile_ok = volatile_ok;
1274 enum machine_mode mode;
1275
1276 if (expected_align < align)
1277 expected_align = align;
1278
1279 /* Since this is a move insn, we don't care about volatility. */
1280 volatile_ok = 1;
1281
1282 /* Try the most limited insn first, because there's no point
1283 including more than one in the machine description unless
1284 the more limited one has some advantage. */
1285
1286 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1287 mode = GET_MODE_WIDER_MODE (mode))
1288 {
1289 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1290
1291 if (code != CODE_FOR_nothing
1292 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1293 here because if SIZE is less than the mode mask, as it is
1294 returned by the macro, it will definitely be less than the
1295 actual mode mask. */
1296 && ((CONST_INT_P (size)
1297 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1298 <= (GET_MODE_MASK (mode) >> 1)))
1299 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
1300 {
1301 struct expand_operand ops[6];
1302 unsigned int nops;
1303
1304 /* ??? When called via emit_block_move_for_call, it'd be
1305 nice if there were some way to inform the backend, so
1306 that it doesn't fail the expansion because it thinks
1307 emitting the libcall would be more efficient. */
1308 nops = insn_data[(int) code].n_generator_args;
1309 gcc_assert (nops == 4 || nops == 6);
1310
1311 create_fixed_operand (&ops[0], x);
1312 create_fixed_operand (&ops[1], y);
1313 /* The check above guarantees that this size conversion is valid. */
1314 create_convert_operand_to (&ops[2], size, mode, true);
1315 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1316 if (nops == 6)
1317 {
1318 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1319 create_integer_operand (&ops[5], expected_size);
1320 }
1321 if (maybe_expand_insn (code, nops, ops))
1322 {
1323 volatile_ok = save_volatile_ok;
1324 return true;
1325 }
1326 }
1327 }
1328
1329 volatile_ok = save_volatile_ok;
1330 return false;
1331 }
1332
1333 /* A subroutine of emit_block_move. Expand a call to memcpy.
1334 Return the return value from memcpy, 0 otherwise. */
1335
1336 rtx
1337 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1338 {
1339 rtx dst_addr, src_addr;
1340 tree call_expr, fn, src_tree, dst_tree, size_tree;
1341 enum machine_mode size_mode;
1342 rtx retval;
1343
1344 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1345 pseudos. We can then place those new pseudos into a VAR_DECL and
1346 use them later. */
1347
1348 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1349 src_addr = copy_addr_to_reg (XEXP (src, 0));
1350
1351 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1352 src_addr = convert_memory_address (ptr_mode, src_addr);
1353
1354 dst_tree = make_tree (ptr_type_node, dst_addr);
1355 src_tree = make_tree (ptr_type_node, src_addr);
1356
1357 size_mode = TYPE_MODE (sizetype);
1358
1359 size = convert_to_mode (size_mode, size, 1);
1360 size = copy_to_mode_reg (size_mode, size);
1361
1362 /* It is incorrect to use the libcall calling conventions to call
1363 memcpy in this context. This could be a user call to memcpy and
1364 the user may wish to examine the return value from memcpy. For
1365 targets where libcalls and normal calls have different conventions
1366 for returning pointers, we could end up generating incorrect code. */
1367
1368 size_tree = make_tree (sizetype, size);
1369
1370 fn = emit_block_move_libcall_fn (true);
1371 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1372 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1373
1374 retval = expand_normal (call_expr);
1375
1376 return retval;
1377 }
1378
1379 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1380 for the function we use for block copies. */
1381
1382 static GTY(()) tree block_move_fn;
1383
1384 void
1385 init_block_move_fn (const char *asmspec)
1386 {
1387 if (!block_move_fn)
1388 {
1389 tree args, fn, attrs, attr_args;
1390
1391 fn = get_identifier ("memcpy");
1392 args = build_function_type_list (ptr_type_node, ptr_type_node,
1393 const_ptr_type_node, sizetype,
1394 NULL_TREE);
1395
1396 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1397 DECL_EXTERNAL (fn) = 1;
1398 TREE_PUBLIC (fn) = 1;
1399 DECL_ARTIFICIAL (fn) = 1;
1400 TREE_NOTHROW (fn) = 1;
1401 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1402 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1403
1404 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1405 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1406
1407 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1408
1409 block_move_fn = fn;
1410 }
1411
1412 if (asmspec)
1413 set_user_assembler_name (block_move_fn, asmspec);
1414 }
1415
1416 static tree
1417 emit_block_move_libcall_fn (int for_call)
1418 {
1419 static bool emitted_extern;
1420
1421 if (!block_move_fn)
1422 init_block_move_fn (NULL);
1423
1424 if (for_call && !emitted_extern)
1425 {
1426 emitted_extern = true;
1427 make_decl_rtl (block_move_fn);
1428 }
1429
1430 return block_move_fn;
1431 }
1432
1433 /* A subroutine of emit_block_move. Copy the data via an explicit
1434 loop. This is used only when libcalls are forbidden. */
1435 /* ??? It'd be nice to copy in hunks larger than QImode. */
1436
1437 static void
1438 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1439 unsigned int align ATTRIBUTE_UNUSED)
1440 {
1441 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1442 enum machine_mode x_addr_mode = get_address_mode (x);
1443 enum machine_mode y_addr_mode = get_address_mode (y);
1444 enum machine_mode iter_mode;
1445
1446 iter_mode = GET_MODE (size);
1447 if (iter_mode == VOIDmode)
1448 iter_mode = word_mode;
1449
1450 top_label = gen_label_rtx ();
1451 cmp_label = gen_label_rtx ();
1452 iter = gen_reg_rtx (iter_mode);
1453
1454 emit_move_insn (iter, const0_rtx);
1455
1456 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1457 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1458 do_pending_stack_adjust ();
1459
1460 emit_jump (cmp_label);
1461 emit_label (top_label);
1462
1463 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1464 x_addr = gen_rtx_PLUS (x_addr_mode, x_addr, tmp);
1465
1466 if (x_addr_mode != y_addr_mode)
1467 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1468 y_addr = gen_rtx_PLUS (y_addr_mode, y_addr, tmp);
1469
1470 x = change_address (x, QImode, x_addr);
1471 y = change_address (y, QImode, y_addr);
1472
1473 emit_move_insn (x, y);
1474
1475 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1476 true, OPTAB_LIB_WIDEN);
1477 if (tmp != iter)
1478 emit_move_insn (iter, tmp);
1479
1480 emit_label (cmp_label);
1481
1482 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1483 true, top_label);
1484 }
1485 \f
1486 /* Copy all or part of a value X into registers starting at REGNO.
1487 The number of registers to be filled is NREGS. */
1488
1489 void
1490 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1491 {
1492 int i;
1493 #ifdef HAVE_load_multiple
1494 rtx pat;
1495 rtx last;
1496 #endif
1497
1498 if (nregs == 0)
1499 return;
1500
1501 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1502 x = validize_mem (force_const_mem (mode, x));
1503
1504 /* See if the machine can do this with a load multiple insn. */
1505 #ifdef HAVE_load_multiple
1506 if (HAVE_load_multiple)
1507 {
1508 last = get_last_insn ();
1509 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1510 GEN_INT (nregs));
1511 if (pat)
1512 {
1513 emit_insn (pat);
1514 return;
1515 }
1516 else
1517 delete_insns_since (last);
1518 }
1519 #endif
1520
1521 for (i = 0; i < nregs; i++)
1522 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1523 operand_subword_force (x, i, mode));
1524 }
1525
1526 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1527 The number of registers to be filled is NREGS. */
1528
1529 void
1530 move_block_from_reg (int regno, rtx x, int nregs)
1531 {
1532 int i;
1533
1534 if (nregs == 0)
1535 return;
1536
1537 /* See if the machine can do this with a store multiple insn. */
1538 #ifdef HAVE_store_multiple
1539 if (HAVE_store_multiple)
1540 {
1541 rtx last = get_last_insn ();
1542 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1543 GEN_INT (nregs));
1544 if (pat)
1545 {
1546 emit_insn (pat);
1547 return;
1548 }
1549 else
1550 delete_insns_since (last);
1551 }
1552 #endif
1553
1554 for (i = 0; i < nregs; i++)
1555 {
1556 rtx tem = operand_subword (x, i, 1, BLKmode);
1557
1558 gcc_assert (tem);
1559
1560 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1561 }
1562 }
1563
1564 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1565 ORIG, where ORIG is a non-consecutive group of registers represented by
1566 a PARALLEL. The clone is identical to the original except in that the
1567 original set of registers is replaced by a new set of pseudo registers.
1568 The new set has the same modes as the original set. */
1569
1570 rtx
1571 gen_group_rtx (rtx orig)
1572 {
1573 int i, length;
1574 rtx *tmps;
1575
1576 gcc_assert (GET_CODE (orig) == PARALLEL);
1577
1578 length = XVECLEN (orig, 0);
1579 tmps = XALLOCAVEC (rtx, length);
1580
1581 /* Skip a NULL entry in first slot. */
1582 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1583
1584 if (i)
1585 tmps[0] = 0;
1586
1587 for (; i < length; i++)
1588 {
1589 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1590 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1591
1592 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1593 }
1594
1595 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1596 }
1597
1598 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1599 except that values are placed in TMPS[i], and must later be moved
1600 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1601
1602 static void
1603 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1604 {
1605 rtx src;
1606 int start, i;
1607 enum machine_mode m = GET_MODE (orig_src);
1608
1609 gcc_assert (GET_CODE (dst) == PARALLEL);
1610
1611 if (m != VOIDmode
1612 && !SCALAR_INT_MODE_P (m)
1613 && !MEM_P (orig_src)
1614 && GET_CODE (orig_src) != CONCAT)
1615 {
1616 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1617 if (imode == BLKmode)
1618 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1619 else
1620 src = gen_reg_rtx (imode);
1621 if (imode != BLKmode)
1622 src = gen_lowpart (GET_MODE (orig_src), src);
1623 emit_move_insn (src, orig_src);
1624 /* ...and back again. */
1625 if (imode != BLKmode)
1626 src = gen_lowpart (imode, src);
1627 emit_group_load_1 (tmps, dst, src, type, ssize);
1628 return;
1629 }
1630
1631 /* Check for a NULL entry, used to indicate that the parameter goes
1632 both on the stack and in registers. */
1633 if (XEXP (XVECEXP (dst, 0, 0), 0))
1634 start = 0;
1635 else
1636 start = 1;
1637
1638 /* Process the pieces. */
1639 for (i = start; i < XVECLEN (dst, 0); i++)
1640 {
1641 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1642 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1643 unsigned int bytelen = GET_MODE_SIZE (mode);
1644 int shift = 0;
1645
1646 /* Handle trailing fragments that run over the size of the struct. */
1647 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1648 {
1649 /* Arrange to shift the fragment to where it belongs.
1650 extract_bit_field loads to the lsb of the reg. */
1651 if (
1652 #ifdef BLOCK_REG_PADDING
1653 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1654 == (BYTES_BIG_ENDIAN ? upward : downward)
1655 #else
1656 BYTES_BIG_ENDIAN
1657 #endif
1658 )
1659 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1660 bytelen = ssize - bytepos;
1661 gcc_assert (bytelen > 0);
1662 }
1663
1664 /* If we won't be loading directly from memory, protect the real source
1665 from strange tricks we might play; but make sure that the source can
1666 be loaded directly into the destination. */
1667 src = orig_src;
1668 if (!MEM_P (orig_src)
1669 && (!CONSTANT_P (orig_src)
1670 || (GET_MODE (orig_src) != mode
1671 && GET_MODE (orig_src) != VOIDmode)))
1672 {
1673 if (GET_MODE (orig_src) == VOIDmode)
1674 src = gen_reg_rtx (mode);
1675 else
1676 src = gen_reg_rtx (GET_MODE (orig_src));
1677
1678 emit_move_insn (src, orig_src);
1679 }
1680
1681 /* Optimize the access just a bit. */
1682 if (MEM_P (src)
1683 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1684 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1685 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1686 && bytelen == GET_MODE_SIZE (mode))
1687 {
1688 tmps[i] = gen_reg_rtx (mode);
1689 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1690 }
1691 else if (COMPLEX_MODE_P (mode)
1692 && GET_MODE (src) == mode
1693 && bytelen == GET_MODE_SIZE (mode))
1694 /* Let emit_move_complex do the bulk of the work. */
1695 tmps[i] = src;
1696 else if (GET_CODE (src) == CONCAT)
1697 {
1698 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1699 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1700
1701 if ((bytepos == 0 && bytelen == slen0)
1702 || (bytepos != 0 && bytepos + bytelen <= slen))
1703 {
1704 /* The following assumes that the concatenated objects all
1705 have the same size. In this case, a simple calculation
1706 can be used to determine the object and the bit field
1707 to be extracted. */
1708 tmps[i] = XEXP (src, bytepos / slen0);
1709 if (! CONSTANT_P (tmps[i])
1710 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1711 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1712 (bytepos % slen0) * BITS_PER_UNIT,
1713 1, false, NULL_RTX, mode, mode);
1714 }
1715 else
1716 {
1717 rtx mem;
1718
1719 gcc_assert (!bytepos);
1720 mem = assign_stack_temp (GET_MODE (src), slen);
1721 emit_move_insn (mem, src);
1722 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1723 0, 1, false, NULL_RTX, mode, mode);
1724 }
1725 }
1726 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1727 SIMD register, which is currently broken. While we get GCC
1728 to emit proper RTL for these cases, let's dump to memory. */
1729 else if (VECTOR_MODE_P (GET_MODE (dst))
1730 && REG_P (src))
1731 {
1732 int slen = GET_MODE_SIZE (GET_MODE (src));
1733 rtx mem;
1734
1735 mem = assign_stack_temp (GET_MODE (src), slen);
1736 emit_move_insn (mem, src);
1737 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1738 }
1739 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1740 && XVECLEN (dst, 0) > 1)
1741 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1742 else if (CONSTANT_P (src))
1743 {
1744 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1745
1746 if (len == ssize)
1747 tmps[i] = src;
1748 else
1749 {
1750 rtx first, second;
1751
1752 gcc_assert (2 * len == ssize);
1753 split_double (src, &first, &second);
1754 if (i)
1755 tmps[i] = second;
1756 else
1757 tmps[i] = first;
1758 }
1759 }
1760 else if (REG_P (src) && GET_MODE (src) == mode)
1761 tmps[i] = src;
1762 else
1763 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1764 bytepos * BITS_PER_UNIT, 1, false, NULL_RTX,
1765 mode, mode);
1766
1767 if (shift)
1768 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1769 shift, tmps[i], 0);
1770 }
1771 }
1772
1773 /* Emit code to move a block SRC of type TYPE to a block DST,
1774 where DST is non-consecutive registers represented by a PARALLEL.
1775 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1776 if not known. */
1777
1778 void
1779 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1780 {
1781 rtx *tmps;
1782 int i;
1783
1784 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1785 emit_group_load_1 (tmps, dst, src, type, ssize);
1786
1787 /* Copy the extracted pieces into the proper (probable) hard regs. */
1788 for (i = 0; i < XVECLEN (dst, 0); i++)
1789 {
1790 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1791 if (d == NULL)
1792 continue;
1793 emit_move_insn (d, tmps[i]);
1794 }
1795 }
1796
1797 /* Similar, but load SRC into new pseudos in a format that looks like
1798 PARALLEL. This can later be fed to emit_group_move to get things
1799 in the right place. */
1800
1801 rtx
1802 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1803 {
1804 rtvec vec;
1805 int i;
1806
1807 vec = rtvec_alloc (XVECLEN (parallel, 0));
1808 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1809
1810 /* Convert the vector to look just like the original PARALLEL, except
1811 with the computed values. */
1812 for (i = 0; i < XVECLEN (parallel, 0); i++)
1813 {
1814 rtx e = XVECEXP (parallel, 0, i);
1815 rtx d = XEXP (e, 0);
1816
1817 if (d)
1818 {
1819 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1820 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1821 }
1822 RTVEC_ELT (vec, i) = e;
1823 }
1824
1825 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1826 }
1827
1828 /* Emit code to move a block SRC to block DST, where SRC and DST are
1829 non-consecutive groups of registers, each represented by a PARALLEL. */
1830
1831 void
1832 emit_group_move (rtx dst, rtx src)
1833 {
1834 int i;
1835
1836 gcc_assert (GET_CODE (src) == PARALLEL
1837 && GET_CODE (dst) == PARALLEL
1838 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1839
1840 /* Skip first entry if NULL. */
1841 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1842 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1843 XEXP (XVECEXP (src, 0, i), 0));
1844 }
1845
1846 /* Move a group of registers represented by a PARALLEL into pseudos. */
1847
1848 rtx
1849 emit_group_move_into_temps (rtx src)
1850 {
1851 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1852 int i;
1853
1854 for (i = 0; i < XVECLEN (src, 0); i++)
1855 {
1856 rtx e = XVECEXP (src, 0, i);
1857 rtx d = XEXP (e, 0);
1858
1859 if (d)
1860 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1861 RTVEC_ELT (vec, i) = e;
1862 }
1863
1864 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1865 }
1866
1867 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1868 where SRC is non-consecutive registers represented by a PARALLEL.
1869 SSIZE represents the total size of block ORIG_DST, or -1 if not
1870 known. */
1871
1872 void
1873 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1874 {
1875 rtx *tmps, dst;
1876 int start, finish, i;
1877 enum machine_mode m = GET_MODE (orig_dst);
1878
1879 gcc_assert (GET_CODE (src) == PARALLEL);
1880
1881 if (!SCALAR_INT_MODE_P (m)
1882 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1883 {
1884 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1885 if (imode == BLKmode)
1886 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1887 else
1888 dst = gen_reg_rtx (imode);
1889 emit_group_store (dst, src, type, ssize);
1890 if (imode != BLKmode)
1891 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1892 emit_move_insn (orig_dst, dst);
1893 return;
1894 }
1895
1896 /* Check for a NULL entry, used to indicate that the parameter goes
1897 both on the stack and in registers. */
1898 if (XEXP (XVECEXP (src, 0, 0), 0))
1899 start = 0;
1900 else
1901 start = 1;
1902 finish = XVECLEN (src, 0);
1903
1904 tmps = XALLOCAVEC (rtx, finish);
1905
1906 /* Copy the (probable) hard regs into pseudos. */
1907 for (i = start; i < finish; i++)
1908 {
1909 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1910 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1911 {
1912 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1913 emit_move_insn (tmps[i], reg);
1914 }
1915 else
1916 tmps[i] = reg;
1917 }
1918
1919 /* If we won't be storing directly into memory, protect the real destination
1920 from strange tricks we might play. */
1921 dst = orig_dst;
1922 if (GET_CODE (dst) == PARALLEL)
1923 {
1924 rtx temp;
1925
1926 /* We can get a PARALLEL dst if there is a conditional expression in
1927 a return statement. In that case, the dst and src are the same,
1928 so no action is necessary. */
1929 if (rtx_equal_p (dst, src))
1930 return;
1931
1932 /* It is unclear if we can ever reach here, but we may as well handle
1933 it. Allocate a temporary, and split this into a store/load to/from
1934 the temporary. */
1935
1936 temp = assign_stack_temp (GET_MODE (dst), ssize);
1937 emit_group_store (temp, src, type, ssize);
1938 emit_group_load (dst, temp, type, ssize);
1939 return;
1940 }
1941 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1942 {
1943 enum machine_mode outer = GET_MODE (dst);
1944 enum machine_mode inner;
1945 HOST_WIDE_INT bytepos;
1946 bool done = false;
1947 rtx temp;
1948
1949 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1950 dst = gen_reg_rtx (outer);
1951
1952 /* Make life a bit easier for combine. */
1953 /* If the first element of the vector is the low part
1954 of the destination mode, use a paradoxical subreg to
1955 initialize the destination. */
1956 if (start < finish)
1957 {
1958 inner = GET_MODE (tmps[start]);
1959 bytepos = subreg_lowpart_offset (inner, outer);
1960 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1961 {
1962 temp = simplify_gen_subreg (outer, tmps[start],
1963 inner, 0);
1964 if (temp)
1965 {
1966 emit_move_insn (dst, temp);
1967 done = true;
1968 start++;
1969 }
1970 }
1971 }
1972
1973 /* If the first element wasn't the low part, try the last. */
1974 if (!done
1975 && start < finish - 1)
1976 {
1977 inner = GET_MODE (tmps[finish - 1]);
1978 bytepos = subreg_lowpart_offset (inner, outer);
1979 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1980 {
1981 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1982 inner, 0);
1983 if (temp)
1984 {
1985 emit_move_insn (dst, temp);
1986 done = true;
1987 finish--;
1988 }
1989 }
1990 }
1991
1992 /* Otherwise, simply initialize the result to zero. */
1993 if (!done)
1994 emit_move_insn (dst, CONST0_RTX (outer));
1995 }
1996
1997 /* Process the pieces. */
1998 for (i = start; i < finish; i++)
1999 {
2000 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2001 enum machine_mode mode = GET_MODE (tmps[i]);
2002 unsigned int bytelen = GET_MODE_SIZE (mode);
2003 unsigned int adj_bytelen = bytelen;
2004 rtx dest = dst;
2005
2006 /* Handle trailing fragments that run over the size of the struct. */
2007 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2008 adj_bytelen = ssize - bytepos;
2009
2010 if (GET_CODE (dst) == CONCAT)
2011 {
2012 if (bytepos + adj_bytelen
2013 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2014 dest = XEXP (dst, 0);
2015 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2016 {
2017 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2018 dest = XEXP (dst, 1);
2019 }
2020 else
2021 {
2022 enum machine_mode dest_mode = GET_MODE (dest);
2023 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2024
2025 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2026
2027 if (GET_MODE_ALIGNMENT (dest_mode)
2028 >= GET_MODE_ALIGNMENT (tmp_mode))
2029 {
2030 dest = assign_stack_temp (dest_mode,
2031 GET_MODE_SIZE (dest_mode));
2032 emit_move_insn (adjust_address (dest,
2033 tmp_mode,
2034 bytepos),
2035 tmps[i]);
2036 dst = dest;
2037 }
2038 else
2039 {
2040 dest = assign_stack_temp (tmp_mode,
2041 GET_MODE_SIZE (tmp_mode));
2042 emit_move_insn (dest, tmps[i]);
2043 dst = adjust_address (dest, dest_mode, bytepos);
2044 }
2045 break;
2046 }
2047 }
2048
2049 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2050 {
2051 /* store_bit_field always takes its value from the lsb.
2052 Move the fragment to the lsb if it's not already there. */
2053 if (
2054 #ifdef BLOCK_REG_PADDING
2055 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2056 == (BYTES_BIG_ENDIAN ? upward : downward)
2057 #else
2058 BYTES_BIG_ENDIAN
2059 #endif
2060 )
2061 {
2062 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2063 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2064 shift, tmps[i], 0);
2065 }
2066 bytelen = adj_bytelen;
2067 }
2068
2069 /* Optimize the access just a bit. */
2070 if (MEM_P (dest)
2071 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2072 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2073 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2074 && bytelen == GET_MODE_SIZE (mode))
2075 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2076 else
2077 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2078 0, 0, mode, tmps[i]);
2079 }
2080
2081 /* Copy from the pseudo into the (probable) hard reg. */
2082 if (orig_dst != dst)
2083 emit_move_insn (orig_dst, dst);
2084 }
2085
2086 /* Generate code to copy a BLKmode object of TYPE out of a
2087 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2088 is null, a stack temporary is created. TGTBLK is returned.
2089
2090 The purpose of this routine is to handle functions that return
2091 BLKmode structures in registers. Some machines (the PA for example)
2092 want to return all small structures in registers regardless of the
2093 structure's alignment. */
2094
2095 rtx
2096 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2097 {
2098 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2099 rtx src = NULL, dst = NULL;
2100 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2101 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2102 enum machine_mode copy_mode;
2103
2104 if (tgtblk == 0)
2105 {
2106 tgtblk = assign_temp (build_qualified_type (type,
2107 (TYPE_QUALS (type)
2108 | TYPE_QUAL_CONST)),
2109 1, 1);
2110 preserve_temp_slots (tgtblk);
2111 }
2112
2113 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2114 into a new pseudo which is a full word. */
2115
2116 if (GET_MODE (srcreg) != BLKmode
2117 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2118 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2119
2120 /* If the structure doesn't take up a whole number of words, see whether
2121 SRCREG is padded on the left or on the right. If it's on the left,
2122 set PADDING_CORRECTION to the number of bits to skip.
2123
2124 In most ABIs, the structure will be returned at the least end of
2125 the register, which translates to right padding on little-endian
2126 targets and left padding on big-endian targets. The opposite
2127 holds if the structure is returned at the most significant
2128 end of the register. */
2129 if (bytes % UNITS_PER_WORD != 0
2130 && (targetm.calls.return_in_msb (type)
2131 ? !BYTES_BIG_ENDIAN
2132 : BYTES_BIG_ENDIAN))
2133 padding_correction
2134 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2135
2136 /* Copy the structure BITSIZE bits at a time. If the target lives in
2137 memory, take care of not reading/writing past its end by selecting
2138 a copy mode suited to BITSIZE. This should always be possible given
2139 how it is computed.
2140
2141 We could probably emit more efficient code for machines which do not use
2142 strict alignment, but it doesn't seem worth the effort at the current
2143 time. */
2144
2145 copy_mode = word_mode;
2146 if (MEM_P (tgtblk))
2147 {
2148 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2149 if (mem_mode != BLKmode)
2150 copy_mode = mem_mode;
2151 }
2152
2153 for (bitpos = 0, xbitpos = padding_correction;
2154 bitpos < bytes * BITS_PER_UNIT;
2155 bitpos += bitsize, xbitpos += bitsize)
2156 {
2157 /* We need a new source operand each time xbitpos is on a
2158 word boundary and when xbitpos == padding_correction
2159 (the first time through). */
2160 if (xbitpos % BITS_PER_WORD == 0
2161 || xbitpos == padding_correction)
2162 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2163 GET_MODE (srcreg));
2164
2165 /* We need a new destination operand each time bitpos is on
2166 a word boundary. */
2167 if (bitpos % BITS_PER_WORD == 0)
2168 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2169
2170 /* Use xbitpos for the source extraction (right justified) and
2171 bitpos for the destination store (left justified). */
2172 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2173 extract_bit_field (src, bitsize,
2174 xbitpos % BITS_PER_WORD, 1, false,
2175 NULL_RTX, copy_mode, copy_mode));
2176 }
2177
2178 return tgtblk;
2179 }
2180
2181 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2182 register if it contains any data, otherwise return null.
2183
2184 This is used on targets that return BLKmode values in registers. */
2185
2186 rtx
2187 copy_blkmode_to_reg (enum machine_mode mode, tree src)
2188 {
2189 int i, n_regs;
2190 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2191 unsigned int bitsize;
2192 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2193 enum machine_mode dst_mode;
2194
2195 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2196
2197 x = expand_normal (src);
2198
2199 bytes = int_size_in_bytes (TREE_TYPE (src));
2200 if (bytes == 0)
2201 return NULL_RTX;
2202
2203 /* If the structure doesn't take up a whole number of words, see
2204 whether the register value should be padded on the left or on
2205 the right. Set PADDING_CORRECTION to the number of padding
2206 bits needed on the left side.
2207
2208 In most ABIs, the structure will be returned at the least end of
2209 the register, which translates to right padding on little-endian
2210 targets and left padding on big-endian targets. The opposite
2211 holds if the structure is returned at the most significant
2212 end of the register. */
2213 if (bytes % UNITS_PER_WORD != 0
2214 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2215 ? !BYTES_BIG_ENDIAN
2216 : BYTES_BIG_ENDIAN))
2217 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2218 * BITS_PER_UNIT));
2219
2220 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2221 dst_words = XALLOCAVEC (rtx, n_regs);
2222 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2223
2224 /* Copy the structure BITSIZE bits at a time. */
2225 for (bitpos = 0, xbitpos = padding_correction;
2226 bitpos < bytes * BITS_PER_UNIT;
2227 bitpos += bitsize, xbitpos += bitsize)
2228 {
2229 /* We need a new destination pseudo each time xbitpos is
2230 on a word boundary and when xbitpos == padding_correction
2231 (the first time through). */
2232 if (xbitpos % BITS_PER_WORD == 0
2233 || xbitpos == padding_correction)
2234 {
2235 /* Generate an appropriate register. */
2236 dst_word = gen_reg_rtx (word_mode);
2237 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2238
2239 /* Clear the destination before we move anything into it. */
2240 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2241 }
2242
2243 /* We need a new source operand each time bitpos is on a word
2244 boundary. */
2245 if (bitpos % BITS_PER_WORD == 0)
2246 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2247
2248 /* Use bitpos for the source extraction (left justified) and
2249 xbitpos for the destination store (right justified). */
2250 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2251 0, 0, word_mode,
2252 extract_bit_field (src_word, bitsize,
2253 bitpos % BITS_PER_WORD, 1, false,
2254 NULL_RTX, word_mode, word_mode));
2255 }
2256
2257 if (mode == BLKmode)
2258 {
2259 /* Find the smallest integer mode large enough to hold the
2260 entire structure. */
2261 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2262 mode != VOIDmode;
2263 mode = GET_MODE_WIDER_MODE (mode))
2264 /* Have we found a large enough mode? */
2265 if (GET_MODE_SIZE (mode) >= bytes)
2266 break;
2267
2268 /* A suitable mode should have been found. */
2269 gcc_assert (mode != VOIDmode);
2270 }
2271
2272 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2273 dst_mode = word_mode;
2274 else
2275 dst_mode = mode;
2276 dst = gen_reg_rtx (dst_mode);
2277
2278 for (i = 0; i < n_regs; i++)
2279 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2280
2281 if (mode != dst_mode)
2282 dst = gen_lowpart (mode, dst);
2283
2284 return dst;
2285 }
2286
2287 /* Add a USE expression for REG to the (possibly empty) list pointed
2288 to by CALL_FUSAGE. REG must denote a hard register. */
2289
2290 void
2291 use_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode)
2292 {
2293 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2294
2295 *call_fusage
2296 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2297 }
2298
2299 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2300 starting at REGNO. All of these registers must be hard registers. */
2301
2302 void
2303 use_regs (rtx *call_fusage, int regno, int nregs)
2304 {
2305 int i;
2306
2307 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2308
2309 for (i = 0; i < nregs; i++)
2310 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2311 }
2312
2313 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2314 PARALLEL REGS. This is for calls that pass values in multiple
2315 non-contiguous locations. The Irix 6 ABI has examples of this. */
2316
2317 void
2318 use_group_regs (rtx *call_fusage, rtx regs)
2319 {
2320 int i;
2321
2322 for (i = 0; i < XVECLEN (regs, 0); i++)
2323 {
2324 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2325
2326 /* A NULL entry means the parameter goes both on the stack and in
2327 registers. This can also be a MEM for targets that pass values
2328 partially on the stack and partially in registers. */
2329 if (reg != 0 && REG_P (reg))
2330 use_reg (call_fusage, reg);
2331 }
2332 }
2333
2334 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2335 assigment and the code of the expresion on the RHS is CODE. Return
2336 NULL otherwise. */
2337
2338 static gimple
2339 get_def_for_expr (tree name, enum tree_code code)
2340 {
2341 gimple def_stmt;
2342
2343 if (TREE_CODE (name) != SSA_NAME)
2344 return NULL;
2345
2346 def_stmt = get_gimple_for_ssa_name (name);
2347 if (!def_stmt
2348 || gimple_assign_rhs_code (def_stmt) != code)
2349 return NULL;
2350
2351 return def_stmt;
2352 }
2353
2354 #ifdef HAVE_conditional_move
2355 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2356 assigment and the class of the expresion on the RHS is CLASS. Return
2357 NULL otherwise. */
2358
2359 static gimple
2360 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2361 {
2362 gimple def_stmt;
2363
2364 if (TREE_CODE (name) != SSA_NAME)
2365 return NULL;
2366
2367 def_stmt = get_gimple_for_ssa_name (name);
2368 if (!def_stmt
2369 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2370 return NULL;
2371
2372 return def_stmt;
2373 }
2374 #endif
2375 \f
2376
2377 /* Determine whether the LEN bytes generated by CONSTFUN can be
2378 stored to memory using several move instructions. CONSTFUNDATA is
2379 a pointer which will be passed as argument in every CONSTFUN call.
2380 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2381 a memset operation and false if it's a copy of a constant string.
2382 Return nonzero if a call to store_by_pieces should succeed. */
2383
2384 int
2385 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2386 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2387 void *constfundata, unsigned int align, bool memsetp)
2388 {
2389 unsigned HOST_WIDE_INT l;
2390 unsigned int max_size;
2391 HOST_WIDE_INT offset = 0;
2392 enum machine_mode mode;
2393 enum insn_code icode;
2394 int reverse;
2395 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2396 rtx cst ATTRIBUTE_UNUSED;
2397
2398 if (len == 0)
2399 return 1;
2400
2401 if (! (memsetp
2402 ? SET_BY_PIECES_P (len, align)
2403 : STORE_BY_PIECES_P (len, align)))
2404 return 0;
2405
2406 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2407
2408 /* We would first store what we can in the largest integer mode, then go to
2409 successively smaller modes. */
2410
2411 for (reverse = 0;
2412 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2413 reverse++)
2414 {
2415 l = len;
2416 max_size = STORE_MAX_PIECES + 1;
2417 while (max_size > 1)
2418 {
2419 mode = widest_int_mode_for_size (max_size);
2420
2421 if (mode == VOIDmode)
2422 break;
2423
2424 icode = optab_handler (mov_optab, mode);
2425 if (icode != CODE_FOR_nothing
2426 && align >= GET_MODE_ALIGNMENT (mode))
2427 {
2428 unsigned int size = GET_MODE_SIZE (mode);
2429
2430 while (l >= size)
2431 {
2432 if (reverse)
2433 offset -= size;
2434
2435 cst = (*constfun) (constfundata, offset, mode);
2436 if (!targetm.legitimate_constant_p (mode, cst))
2437 return 0;
2438
2439 if (!reverse)
2440 offset += size;
2441
2442 l -= size;
2443 }
2444 }
2445
2446 max_size = GET_MODE_SIZE (mode);
2447 }
2448
2449 /* The code above should have handled everything. */
2450 gcc_assert (!l);
2451 }
2452
2453 return 1;
2454 }
2455
2456 /* Generate several move instructions to store LEN bytes generated by
2457 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2458 pointer which will be passed as argument in every CONSTFUN call.
2459 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2460 a memset operation and false if it's a copy of a constant string.
2461 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2462 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2463 stpcpy. */
2464
2465 rtx
2466 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2467 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2468 void *constfundata, unsigned int align, bool memsetp, int endp)
2469 {
2470 enum machine_mode to_addr_mode = get_address_mode (to);
2471 struct store_by_pieces_d data;
2472
2473 if (len == 0)
2474 {
2475 gcc_assert (endp != 2);
2476 return to;
2477 }
2478
2479 gcc_assert (memsetp
2480 ? SET_BY_PIECES_P (len, align)
2481 : STORE_BY_PIECES_P (len, align));
2482 data.constfun = constfun;
2483 data.constfundata = constfundata;
2484 data.len = len;
2485 data.to = to;
2486 store_by_pieces_1 (&data, align);
2487 if (endp)
2488 {
2489 rtx to1;
2490
2491 gcc_assert (!data.reverse);
2492 if (data.autinc_to)
2493 {
2494 if (endp == 2)
2495 {
2496 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2497 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2498 else
2499 data.to_addr = copy_to_mode_reg (to_addr_mode,
2500 plus_constant (to_addr_mode,
2501 data.to_addr,
2502 -1));
2503 }
2504 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2505 data.offset);
2506 }
2507 else
2508 {
2509 if (endp == 2)
2510 --data.offset;
2511 to1 = adjust_address (data.to, QImode, data.offset);
2512 }
2513 return to1;
2514 }
2515 else
2516 return data.to;
2517 }
2518
2519 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2520 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2521
2522 static void
2523 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2524 {
2525 struct store_by_pieces_d data;
2526
2527 if (len == 0)
2528 return;
2529
2530 data.constfun = clear_by_pieces_1;
2531 data.constfundata = NULL;
2532 data.len = len;
2533 data.to = to;
2534 store_by_pieces_1 (&data, align);
2535 }
2536
2537 /* Callback routine for clear_by_pieces.
2538 Return const0_rtx unconditionally. */
2539
2540 static rtx
2541 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2542 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2543 enum machine_mode mode ATTRIBUTE_UNUSED)
2544 {
2545 return const0_rtx;
2546 }
2547
2548 /* Subroutine of clear_by_pieces and store_by_pieces.
2549 Generate several move instructions to store LEN bytes of block TO. (A MEM
2550 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2551
2552 static void
2553 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2554 unsigned int align ATTRIBUTE_UNUSED)
2555 {
2556 enum machine_mode to_addr_mode = get_address_mode (data->to);
2557 rtx to_addr = XEXP (data->to, 0);
2558 unsigned int max_size = STORE_MAX_PIECES + 1;
2559 enum insn_code icode;
2560
2561 data->offset = 0;
2562 data->to_addr = to_addr;
2563 data->autinc_to
2564 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2565 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2566
2567 data->explicit_inc_to = 0;
2568 data->reverse
2569 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2570 if (data->reverse)
2571 data->offset = data->len;
2572
2573 /* If storing requires more than two move insns,
2574 copy addresses to registers (to make displacements shorter)
2575 and use post-increment if available. */
2576 if (!data->autinc_to
2577 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2578 {
2579 /* Determine the main mode we'll be using.
2580 MODE might not be used depending on the definitions of the
2581 USE_* macros below. */
2582 enum machine_mode mode ATTRIBUTE_UNUSED
2583 = widest_int_mode_for_size (max_size);
2584
2585 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2586 {
2587 data->to_addr = copy_to_mode_reg (to_addr_mode,
2588 plus_constant (to_addr_mode,
2589 to_addr,
2590 data->len));
2591 data->autinc_to = 1;
2592 data->explicit_inc_to = -1;
2593 }
2594
2595 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2596 && ! data->autinc_to)
2597 {
2598 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2599 data->autinc_to = 1;
2600 data->explicit_inc_to = 1;
2601 }
2602
2603 if ( !data->autinc_to && CONSTANT_P (to_addr))
2604 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2605 }
2606
2607 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2608
2609 /* First store what we can in the largest integer mode, then go to
2610 successively smaller modes. */
2611
2612 while (max_size > 1)
2613 {
2614 enum machine_mode mode = widest_int_mode_for_size (max_size);
2615
2616 if (mode == VOIDmode)
2617 break;
2618
2619 icode = optab_handler (mov_optab, mode);
2620 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2621 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2622
2623 max_size = GET_MODE_SIZE (mode);
2624 }
2625
2626 /* The code above should have handled everything. */
2627 gcc_assert (!data->len);
2628 }
2629
2630 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2631 with move instructions for mode MODE. GENFUN is the gen_... function
2632 to make a move insn for that mode. DATA has all the other info. */
2633
2634 static void
2635 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2636 struct store_by_pieces_d *data)
2637 {
2638 unsigned int size = GET_MODE_SIZE (mode);
2639 rtx to1, cst;
2640
2641 while (data->len >= size)
2642 {
2643 if (data->reverse)
2644 data->offset -= size;
2645
2646 if (data->autinc_to)
2647 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2648 data->offset);
2649 else
2650 to1 = adjust_address (data->to, mode, data->offset);
2651
2652 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2653 emit_insn (gen_add2_insn (data->to_addr,
2654 GEN_INT (-(HOST_WIDE_INT) size)));
2655
2656 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2657 emit_insn ((*genfun) (to1, cst));
2658
2659 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2660 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2661
2662 if (! data->reverse)
2663 data->offset += size;
2664
2665 data->len -= size;
2666 }
2667 }
2668 \f
2669 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2670 its length in bytes. */
2671
2672 rtx
2673 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2674 unsigned int expected_align, HOST_WIDE_INT expected_size)
2675 {
2676 enum machine_mode mode = GET_MODE (object);
2677 unsigned int align;
2678
2679 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2680
2681 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2682 just move a zero. Otherwise, do this a piece at a time. */
2683 if (mode != BLKmode
2684 && CONST_INT_P (size)
2685 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2686 {
2687 rtx zero = CONST0_RTX (mode);
2688 if (zero != NULL)
2689 {
2690 emit_move_insn (object, zero);
2691 return NULL;
2692 }
2693
2694 if (COMPLEX_MODE_P (mode))
2695 {
2696 zero = CONST0_RTX (GET_MODE_INNER (mode));
2697 if (zero != NULL)
2698 {
2699 write_complex_part (object, zero, 0);
2700 write_complex_part (object, zero, 1);
2701 return NULL;
2702 }
2703 }
2704 }
2705
2706 if (size == const0_rtx)
2707 return NULL;
2708
2709 align = MEM_ALIGN (object);
2710
2711 if (CONST_INT_P (size)
2712 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2713 clear_by_pieces (object, INTVAL (size), align);
2714 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2715 expected_align, expected_size))
2716 ;
2717 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2718 return set_storage_via_libcall (object, size, const0_rtx,
2719 method == BLOCK_OP_TAILCALL);
2720 else
2721 gcc_unreachable ();
2722
2723 return NULL;
2724 }
2725
2726 rtx
2727 clear_storage (rtx object, rtx size, enum block_op_methods method)
2728 {
2729 return clear_storage_hints (object, size, method, 0, -1);
2730 }
2731
2732
2733 /* A subroutine of clear_storage. Expand a call to memset.
2734 Return the return value of memset, 0 otherwise. */
2735
2736 rtx
2737 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2738 {
2739 tree call_expr, fn, object_tree, size_tree, val_tree;
2740 enum machine_mode size_mode;
2741 rtx retval;
2742
2743 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2744 place those into new pseudos into a VAR_DECL and use them later. */
2745
2746 object = copy_addr_to_reg (XEXP (object, 0));
2747
2748 size_mode = TYPE_MODE (sizetype);
2749 size = convert_to_mode (size_mode, size, 1);
2750 size = copy_to_mode_reg (size_mode, size);
2751
2752 /* It is incorrect to use the libcall calling conventions to call
2753 memset in this context. This could be a user call to memset and
2754 the user may wish to examine the return value from memset. For
2755 targets where libcalls and normal calls have different conventions
2756 for returning pointers, we could end up generating incorrect code. */
2757
2758 object_tree = make_tree (ptr_type_node, object);
2759 if (!CONST_INT_P (val))
2760 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2761 size_tree = make_tree (sizetype, size);
2762 val_tree = make_tree (integer_type_node, val);
2763
2764 fn = clear_storage_libcall_fn (true);
2765 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2766 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2767
2768 retval = expand_normal (call_expr);
2769
2770 return retval;
2771 }
2772
2773 /* A subroutine of set_storage_via_libcall. Create the tree node
2774 for the function we use for block clears. */
2775
2776 tree block_clear_fn;
2777
2778 void
2779 init_block_clear_fn (const char *asmspec)
2780 {
2781 if (!block_clear_fn)
2782 {
2783 tree fn, args;
2784
2785 fn = get_identifier ("memset");
2786 args = build_function_type_list (ptr_type_node, ptr_type_node,
2787 integer_type_node, sizetype,
2788 NULL_TREE);
2789
2790 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2791 DECL_EXTERNAL (fn) = 1;
2792 TREE_PUBLIC (fn) = 1;
2793 DECL_ARTIFICIAL (fn) = 1;
2794 TREE_NOTHROW (fn) = 1;
2795 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2796 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2797
2798 block_clear_fn = fn;
2799 }
2800
2801 if (asmspec)
2802 set_user_assembler_name (block_clear_fn, asmspec);
2803 }
2804
2805 static tree
2806 clear_storage_libcall_fn (int for_call)
2807 {
2808 static bool emitted_extern;
2809
2810 if (!block_clear_fn)
2811 init_block_clear_fn (NULL);
2812
2813 if (for_call && !emitted_extern)
2814 {
2815 emitted_extern = true;
2816 make_decl_rtl (block_clear_fn);
2817 }
2818
2819 return block_clear_fn;
2820 }
2821 \f
2822 /* Expand a setmem pattern; return true if successful. */
2823
2824 bool
2825 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2826 unsigned int expected_align, HOST_WIDE_INT expected_size)
2827 {
2828 /* Try the most limited insn first, because there's no point
2829 including more than one in the machine description unless
2830 the more limited one has some advantage. */
2831
2832 enum machine_mode mode;
2833
2834 if (expected_align < align)
2835 expected_align = align;
2836
2837 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2838 mode = GET_MODE_WIDER_MODE (mode))
2839 {
2840 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2841
2842 if (code != CODE_FOR_nothing
2843 /* We don't need MODE to be narrower than
2844 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2845 the mode mask, as it is returned by the macro, it will
2846 definitely be less than the actual mode mask. */
2847 && ((CONST_INT_P (size)
2848 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2849 <= (GET_MODE_MASK (mode) >> 1)))
2850 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
2851 {
2852 struct expand_operand ops[6];
2853 unsigned int nops;
2854
2855 nops = insn_data[(int) code].n_generator_args;
2856 gcc_assert (nops == 4 || nops == 6);
2857
2858 create_fixed_operand (&ops[0], object);
2859 /* The check above guarantees that this size conversion is valid. */
2860 create_convert_operand_to (&ops[1], size, mode, true);
2861 create_convert_operand_from (&ops[2], val, byte_mode, true);
2862 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2863 if (nops == 6)
2864 {
2865 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2866 create_integer_operand (&ops[5], expected_size);
2867 }
2868 if (maybe_expand_insn (code, nops, ops))
2869 return true;
2870 }
2871 }
2872
2873 return false;
2874 }
2875
2876 \f
2877 /* Write to one of the components of the complex value CPLX. Write VAL to
2878 the real part if IMAG_P is false, and the imaginary part if its true. */
2879
2880 static void
2881 write_complex_part (rtx cplx, rtx val, bool imag_p)
2882 {
2883 enum machine_mode cmode;
2884 enum machine_mode imode;
2885 unsigned ibitsize;
2886
2887 if (GET_CODE (cplx) == CONCAT)
2888 {
2889 emit_move_insn (XEXP (cplx, imag_p), val);
2890 return;
2891 }
2892
2893 cmode = GET_MODE (cplx);
2894 imode = GET_MODE_INNER (cmode);
2895 ibitsize = GET_MODE_BITSIZE (imode);
2896
2897 /* For MEMs simplify_gen_subreg may generate an invalid new address
2898 because, e.g., the original address is considered mode-dependent
2899 by the target, which restricts simplify_subreg from invoking
2900 adjust_address_nv. Instead of preparing fallback support for an
2901 invalid address, we call adjust_address_nv directly. */
2902 if (MEM_P (cplx))
2903 {
2904 emit_move_insn (adjust_address_nv (cplx, imode,
2905 imag_p ? GET_MODE_SIZE (imode) : 0),
2906 val);
2907 return;
2908 }
2909
2910 /* If the sub-object is at least word sized, then we know that subregging
2911 will work. This special case is important, since store_bit_field
2912 wants to operate on integer modes, and there's rarely an OImode to
2913 correspond to TCmode. */
2914 if (ibitsize >= BITS_PER_WORD
2915 /* For hard regs we have exact predicates. Assume we can split
2916 the original object if it spans an even number of hard regs.
2917 This special case is important for SCmode on 64-bit platforms
2918 where the natural size of floating-point regs is 32-bit. */
2919 || (REG_P (cplx)
2920 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2921 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2922 {
2923 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2924 imag_p ? GET_MODE_SIZE (imode) : 0);
2925 if (part)
2926 {
2927 emit_move_insn (part, val);
2928 return;
2929 }
2930 else
2931 /* simplify_gen_subreg may fail for sub-word MEMs. */
2932 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2933 }
2934
2935 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
2936 }
2937
2938 /* Extract one of the components of the complex value CPLX. Extract the
2939 real part if IMAG_P is false, and the imaginary part if it's true. */
2940
2941 static rtx
2942 read_complex_part (rtx cplx, bool imag_p)
2943 {
2944 enum machine_mode cmode, imode;
2945 unsigned ibitsize;
2946
2947 if (GET_CODE (cplx) == CONCAT)
2948 return XEXP (cplx, imag_p);
2949
2950 cmode = GET_MODE (cplx);
2951 imode = GET_MODE_INNER (cmode);
2952 ibitsize = GET_MODE_BITSIZE (imode);
2953
2954 /* Special case reads from complex constants that got spilled to memory. */
2955 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2956 {
2957 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2958 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2959 {
2960 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2961 if (CONSTANT_CLASS_P (part))
2962 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2963 }
2964 }
2965
2966 /* For MEMs simplify_gen_subreg may generate an invalid new address
2967 because, e.g., the original address is considered mode-dependent
2968 by the target, which restricts simplify_subreg from invoking
2969 adjust_address_nv. Instead of preparing fallback support for an
2970 invalid address, we call adjust_address_nv directly. */
2971 if (MEM_P (cplx))
2972 return adjust_address_nv (cplx, imode,
2973 imag_p ? GET_MODE_SIZE (imode) : 0);
2974
2975 /* If the sub-object is at least word sized, then we know that subregging
2976 will work. This special case is important, since extract_bit_field
2977 wants to operate on integer modes, and there's rarely an OImode to
2978 correspond to TCmode. */
2979 if (ibitsize >= BITS_PER_WORD
2980 /* For hard regs we have exact predicates. Assume we can split
2981 the original object if it spans an even number of hard regs.
2982 This special case is important for SCmode on 64-bit platforms
2983 where the natural size of floating-point regs is 32-bit. */
2984 || (REG_P (cplx)
2985 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2986 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2987 {
2988 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2989 imag_p ? GET_MODE_SIZE (imode) : 0);
2990 if (ret)
2991 return ret;
2992 else
2993 /* simplify_gen_subreg may fail for sub-word MEMs. */
2994 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2995 }
2996
2997 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2998 true, false, NULL_RTX, imode, imode);
2999 }
3000 \f
3001 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3002 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3003 represented in NEW_MODE. If FORCE is true, this will never happen, as
3004 we'll force-create a SUBREG if needed. */
3005
3006 static rtx
3007 emit_move_change_mode (enum machine_mode new_mode,
3008 enum machine_mode old_mode, rtx x, bool force)
3009 {
3010 rtx ret;
3011
3012 if (push_operand (x, GET_MODE (x)))
3013 {
3014 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3015 MEM_COPY_ATTRIBUTES (ret, x);
3016 }
3017 else if (MEM_P (x))
3018 {
3019 /* We don't have to worry about changing the address since the
3020 size in bytes is supposed to be the same. */
3021 if (reload_in_progress)
3022 {
3023 /* Copy the MEM to change the mode and move any
3024 substitutions from the old MEM to the new one. */
3025 ret = adjust_address_nv (x, new_mode, 0);
3026 copy_replacements (x, ret);
3027 }
3028 else
3029 ret = adjust_address (x, new_mode, 0);
3030 }
3031 else
3032 {
3033 /* Note that we do want simplify_subreg's behavior of validating
3034 that the new mode is ok for a hard register. If we were to use
3035 simplify_gen_subreg, we would create the subreg, but would
3036 probably run into the target not being able to implement it. */
3037 /* Except, of course, when FORCE is true, when this is exactly what
3038 we want. Which is needed for CCmodes on some targets. */
3039 if (force)
3040 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3041 else
3042 ret = simplify_subreg (new_mode, x, old_mode, 0);
3043 }
3044
3045 return ret;
3046 }
3047
3048 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3049 an integer mode of the same size as MODE. Returns the instruction
3050 emitted, or NULL if such a move could not be generated. */
3051
3052 static rtx
3053 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
3054 {
3055 enum machine_mode imode;
3056 enum insn_code code;
3057
3058 /* There must exist a mode of the exact size we require. */
3059 imode = int_mode_for_mode (mode);
3060 if (imode == BLKmode)
3061 return NULL_RTX;
3062
3063 /* The target must support moves in this mode. */
3064 code = optab_handler (mov_optab, imode);
3065 if (code == CODE_FOR_nothing)
3066 return NULL_RTX;
3067
3068 x = emit_move_change_mode (imode, mode, x, force);
3069 if (x == NULL_RTX)
3070 return NULL_RTX;
3071 y = emit_move_change_mode (imode, mode, y, force);
3072 if (y == NULL_RTX)
3073 return NULL_RTX;
3074 return emit_insn (GEN_FCN (code) (x, y));
3075 }
3076
3077 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3078 Return an equivalent MEM that does not use an auto-increment. */
3079
3080 static rtx
3081 emit_move_resolve_push (enum machine_mode mode, rtx x)
3082 {
3083 enum rtx_code code = GET_CODE (XEXP (x, 0));
3084 HOST_WIDE_INT adjust;
3085 rtx temp;
3086
3087 adjust = GET_MODE_SIZE (mode);
3088 #ifdef PUSH_ROUNDING
3089 adjust = PUSH_ROUNDING (adjust);
3090 #endif
3091 if (code == PRE_DEC || code == POST_DEC)
3092 adjust = -adjust;
3093 else if (code == PRE_MODIFY || code == POST_MODIFY)
3094 {
3095 rtx expr = XEXP (XEXP (x, 0), 1);
3096 HOST_WIDE_INT val;
3097
3098 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3099 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3100 val = INTVAL (XEXP (expr, 1));
3101 if (GET_CODE (expr) == MINUS)
3102 val = -val;
3103 gcc_assert (adjust == val || adjust == -val);
3104 adjust = val;
3105 }
3106
3107 /* Do not use anti_adjust_stack, since we don't want to update
3108 stack_pointer_delta. */
3109 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3110 GEN_INT (adjust), stack_pointer_rtx,
3111 0, OPTAB_LIB_WIDEN);
3112 if (temp != stack_pointer_rtx)
3113 emit_move_insn (stack_pointer_rtx, temp);
3114
3115 switch (code)
3116 {
3117 case PRE_INC:
3118 case PRE_DEC:
3119 case PRE_MODIFY:
3120 temp = stack_pointer_rtx;
3121 break;
3122 case POST_INC:
3123 case POST_DEC:
3124 case POST_MODIFY:
3125 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3126 break;
3127 default:
3128 gcc_unreachable ();
3129 }
3130
3131 return replace_equiv_address (x, temp);
3132 }
3133
3134 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3135 X is known to satisfy push_operand, and MODE is known to be complex.
3136 Returns the last instruction emitted. */
3137
3138 rtx
3139 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3140 {
3141 enum machine_mode submode = GET_MODE_INNER (mode);
3142 bool imag_first;
3143
3144 #ifdef PUSH_ROUNDING
3145 unsigned int submodesize = GET_MODE_SIZE (submode);
3146
3147 /* In case we output to the stack, but the size is smaller than the
3148 machine can push exactly, we need to use move instructions. */
3149 if (PUSH_ROUNDING (submodesize) != submodesize)
3150 {
3151 x = emit_move_resolve_push (mode, x);
3152 return emit_move_insn (x, y);
3153 }
3154 #endif
3155
3156 /* Note that the real part always precedes the imag part in memory
3157 regardless of machine's endianness. */
3158 switch (GET_CODE (XEXP (x, 0)))
3159 {
3160 case PRE_DEC:
3161 case POST_DEC:
3162 imag_first = true;
3163 break;
3164 case PRE_INC:
3165 case POST_INC:
3166 imag_first = false;
3167 break;
3168 default:
3169 gcc_unreachable ();
3170 }
3171
3172 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3173 read_complex_part (y, imag_first));
3174 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3175 read_complex_part (y, !imag_first));
3176 }
3177
3178 /* A subroutine of emit_move_complex. Perform the move from Y to X
3179 via two moves of the parts. Returns the last instruction emitted. */
3180
3181 rtx
3182 emit_move_complex_parts (rtx x, rtx y)
3183 {
3184 /* Show the output dies here. This is necessary for SUBREGs
3185 of pseudos since we cannot track their lifetimes correctly;
3186 hard regs shouldn't appear here except as return values. */
3187 if (!reload_completed && !reload_in_progress
3188 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3189 emit_clobber (x);
3190
3191 write_complex_part (x, read_complex_part (y, false), false);
3192 write_complex_part (x, read_complex_part (y, true), true);
3193
3194 return get_last_insn ();
3195 }
3196
3197 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3198 MODE is known to be complex. Returns the last instruction emitted. */
3199
3200 static rtx
3201 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3202 {
3203 bool try_int;
3204
3205 /* Need to take special care for pushes, to maintain proper ordering
3206 of the data, and possibly extra padding. */
3207 if (push_operand (x, mode))
3208 return emit_move_complex_push (mode, x, y);
3209
3210 /* See if we can coerce the target into moving both values at once. */
3211
3212 /* Move floating point as parts. */
3213 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3214 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing)
3215 try_int = false;
3216 /* Not possible if the values are inherently not adjacent. */
3217 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3218 try_int = false;
3219 /* Is possible if both are registers (or subregs of registers). */
3220 else if (register_operand (x, mode) && register_operand (y, mode))
3221 try_int = true;
3222 /* If one of the operands is a memory, and alignment constraints
3223 are friendly enough, we may be able to do combined memory operations.
3224 We do not attempt this if Y is a constant because that combination is
3225 usually better with the by-parts thing below. */
3226 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3227 && (!STRICT_ALIGNMENT
3228 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3229 try_int = true;
3230 else
3231 try_int = false;
3232
3233 if (try_int)
3234 {
3235 rtx ret;
3236
3237 /* For memory to memory moves, optimal behavior can be had with the
3238 existing block move logic. */
3239 if (MEM_P (x) && MEM_P (y))
3240 {
3241 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3242 BLOCK_OP_NO_LIBCALL);
3243 return get_last_insn ();
3244 }
3245
3246 ret = emit_move_via_integer (mode, x, y, true);
3247 if (ret)
3248 return ret;
3249 }
3250
3251 return emit_move_complex_parts (x, y);
3252 }
3253
3254 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3255 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3256
3257 static rtx
3258 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3259 {
3260 rtx ret;
3261
3262 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3263 if (mode != CCmode)
3264 {
3265 enum insn_code code = optab_handler (mov_optab, CCmode);
3266 if (code != CODE_FOR_nothing)
3267 {
3268 x = emit_move_change_mode (CCmode, mode, x, true);
3269 y = emit_move_change_mode (CCmode, mode, y, true);
3270 return emit_insn (GEN_FCN (code) (x, y));
3271 }
3272 }
3273
3274 /* Otherwise, find the MODE_INT mode of the same width. */
3275 ret = emit_move_via_integer (mode, x, y, false);
3276 gcc_assert (ret != NULL);
3277 return ret;
3278 }
3279
3280 /* Return true if word I of OP lies entirely in the
3281 undefined bits of a paradoxical subreg. */
3282
3283 static bool
3284 undefined_operand_subword_p (const_rtx op, int i)
3285 {
3286 enum machine_mode innermode, innermostmode;
3287 int offset;
3288 if (GET_CODE (op) != SUBREG)
3289 return false;
3290 innermode = GET_MODE (op);
3291 innermostmode = GET_MODE (SUBREG_REG (op));
3292 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3293 /* The SUBREG_BYTE represents offset, as if the value were stored in
3294 memory, except for a paradoxical subreg where we define
3295 SUBREG_BYTE to be 0; undo this exception as in
3296 simplify_subreg. */
3297 if (SUBREG_BYTE (op) == 0
3298 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3299 {
3300 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3301 if (WORDS_BIG_ENDIAN)
3302 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3303 if (BYTES_BIG_ENDIAN)
3304 offset += difference % UNITS_PER_WORD;
3305 }
3306 if (offset >= GET_MODE_SIZE (innermostmode)
3307 || offset <= -GET_MODE_SIZE (word_mode))
3308 return true;
3309 return false;
3310 }
3311
3312 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3313 MODE is any multi-word or full-word mode that lacks a move_insn
3314 pattern. Note that you will get better code if you define such
3315 patterns, even if they must turn into multiple assembler instructions. */
3316
3317 static rtx
3318 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3319 {
3320 rtx last_insn = 0;
3321 rtx seq, inner;
3322 bool need_clobber;
3323 int i;
3324
3325 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3326
3327 /* If X is a push on the stack, do the push now and replace
3328 X with a reference to the stack pointer. */
3329 if (push_operand (x, mode))
3330 x = emit_move_resolve_push (mode, x);
3331
3332 /* If we are in reload, see if either operand is a MEM whose address
3333 is scheduled for replacement. */
3334 if (reload_in_progress && MEM_P (x)
3335 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3336 x = replace_equiv_address_nv (x, inner);
3337 if (reload_in_progress && MEM_P (y)
3338 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3339 y = replace_equiv_address_nv (y, inner);
3340
3341 start_sequence ();
3342
3343 need_clobber = false;
3344 for (i = 0;
3345 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3346 i++)
3347 {
3348 rtx xpart = operand_subword (x, i, 1, mode);
3349 rtx ypart;
3350
3351 /* Do not generate code for a move if it would come entirely
3352 from the undefined bits of a paradoxical subreg. */
3353 if (undefined_operand_subword_p (y, i))
3354 continue;
3355
3356 ypart = operand_subword (y, i, 1, mode);
3357
3358 /* If we can't get a part of Y, put Y into memory if it is a
3359 constant. Otherwise, force it into a register. Then we must
3360 be able to get a part of Y. */
3361 if (ypart == 0 && CONSTANT_P (y))
3362 {
3363 y = use_anchored_address (force_const_mem (mode, y));
3364 ypart = operand_subword (y, i, 1, mode);
3365 }
3366 else if (ypart == 0)
3367 ypart = operand_subword_force (y, i, mode);
3368
3369 gcc_assert (xpart && ypart);
3370
3371 need_clobber |= (GET_CODE (xpart) == SUBREG);
3372
3373 last_insn = emit_move_insn (xpart, ypart);
3374 }
3375
3376 seq = get_insns ();
3377 end_sequence ();
3378
3379 /* Show the output dies here. This is necessary for SUBREGs
3380 of pseudos since we cannot track their lifetimes correctly;
3381 hard regs shouldn't appear here except as return values.
3382 We never want to emit such a clobber after reload. */
3383 if (x != y
3384 && ! (reload_in_progress || reload_completed)
3385 && need_clobber != 0)
3386 emit_clobber (x);
3387
3388 emit_insn (seq);
3389
3390 return last_insn;
3391 }
3392
3393 /* Low level part of emit_move_insn.
3394 Called just like emit_move_insn, but assumes X and Y
3395 are basically valid. */
3396
3397 rtx
3398 emit_move_insn_1 (rtx x, rtx y)
3399 {
3400 enum machine_mode mode = GET_MODE (x);
3401 enum insn_code code;
3402
3403 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3404
3405 code = optab_handler (mov_optab, mode);
3406 if (code != CODE_FOR_nothing)
3407 return emit_insn (GEN_FCN (code) (x, y));
3408
3409 /* Expand complex moves by moving real part and imag part. */
3410 if (COMPLEX_MODE_P (mode))
3411 return emit_move_complex (mode, x, y);
3412
3413 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3414 || ALL_FIXED_POINT_MODE_P (mode))
3415 {
3416 rtx result = emit_move_via_integer (mode, x, y, true);
3417
3418 /* If we can't find an integer mode, use multi words. */
3419 if (result)
3420 return result;
3421 else
3422 return emit_move_multi_word (mode, x, y);
3423 }
3424
3425 if (GET_MODE_CLASS (mode) == MODE_CC)
3426 return emit_move_ccmode (mode, x, y);
3427
3428 /* Try using a move pattern for the corresponding integer mode. This is
3429 only safe when simplify_subreg can convert MODE constants into integer
3430 constants. At present, it can only do this reliably if the value
3431 fits within a HOST_WIDE_INT. */
3432 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3433 {
3434 rtx ret = emit_move_via_integer (mode, x, y, false);
3435 if (ret)
3436 return ret;
3437 }
3438
3439 return emit_move_multi_word (mode, x, y);
3440 }
3441
3442 /* Generate code to copy Y into X.
3443 Both Y and X must have the same mode, except that
3444 Y can be a constant with VOIDmode.
3445 This mode cannot be BLKmode; use emit_block_move for that.
3446
3447 Return the last instruction emitted. */
3448
3449 rtx
3450 emit_move_insn (rtx x, rtx y)
3451 {
3452 enum machine_mode mode = GET_MODE (x);
3453 rtx y_cst = NULL_RTX;
3454 rtx last_insn, set;
3455
3456 gcc_assert (mode != BLKmode
3457 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3458
3459 if (CONSTANT_P (y))
3460 {
3461 if (optimize
3462 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3463 && (last_insn = compress_float_constant (x, y)))
3464 return last_insn;
3465
3466 y_cst = y;
3467
3468 if (!targetm.legitimate_constant_p (mode, y))
3469 {
3470 y = force_const_mem (mode, y);
3471
3472 /* If the target's cannot_force_const_mem prevented the spill,
3473 assume that the target's move expanders will also take care
3474 of the non-legitimate constant. */
3475 if (!y)
3476 y = y_cst;
3477 else
3478 y = use_anchored_address (y);
3479 }
3480 }
3481
3482 /* If X or Y are memory references, verify that their addresses are valid
3483 for the machine. */
3484 if (MEM_P (x)
3485 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3486 MEM_ADDR_SPACE (x))
3487 && ! push_operand (x, GET_MODE (x))))
3488 x = validize_mem (x);
3489
3490 if (MEM_P (y)
3491 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3492 MEM_ADDR_SPACE (y)))
3493 y = validize_mem (y);
3494
3495 gcc_assert (mode != BLKmode);
3496
3497 last_insn = emit_move_insn_1 (x, y);
3498
3499 if (y_cst && REG_P (x)
3500 && (set = single_set (last_insn)) != NULL_RTX
3501 && SET_DEST (set) == x
3502 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3503 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3504
3505 return last_insn;
3506 }
3507
3508 /* If Y is representable exactly in a narrower mode, and the target can
3509 perform the extension directly from constant or memory, then emit the
3510 move as an extension. */
3511
3512 static rtx
3513 compress_float_constant (rtx x, rtx y)
3514 {
3515 enum machine_mode dstmode = GET_MODE (x);
3516 enum machine_mode orig_srcmode = GET_MODE (y);
3517 enum machine_mode srcmode;
3518 REAL_VALUE_TYPE r;
3519 int oldcost, newcost;
3520 bool speed = optimize_insn_for_speed_p ();
3521
3522 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3523
3524 if (targetm.legitimate_constant_p (dstmode, y))
3525 oldcost = set_src_cost (y, speed);
3526 else
3527 oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3528
3529 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3530 srcmode != orig_srcmode;
3531 srcmode = GET_MODE_WIDER_MODE (srcmode))
3532 {
3533 enum insn_code ic;
3534 rtx trunc_y, last_insn;
3535
3536 /* Skip if the target can't extend this way. */
3537 ic = can_extend_p (dstmode, srcmode, 0);
3538 if (ic == CODE_FOR_nothing)
3539 continue;
3540
3541 /* Skip if the narrowed value isn't exact. */
3542 if (! exact_real_truncate (srcmode, &r))
3543 continue;
3544
3545 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3546
3547 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3548 {
3549 /* Skip if the target needs extra instructions to perform
3550 the extension. */
3551 if (!insn_operand_matches (ic, 1, trunc_y))
3552 continue;
3553 /* This is valid, but may not be cheaper than the original. */
3554 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3555 speed);
3556 if (oldcost < newcost)
3557 continue;
3558 }
3559 else if (float_extend_from_mem[dstmode][srcmode])
3560 {
3561 trunc_y = force_const_mem (srcmode, trunc_y);
3562 /* This is valid, but may not be cheaper than the original. */
3563 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3564 speed);
3565 if (oldcost < newcost)
3566 continue;
3567 trunc_y = validize_mem (trunc_y);
3568 }
3569 else
3570 continue;
3571
3572 /* For CSE's benefit, force the compressed constant pool entry
3573 into a new pseudo. This constant may be used in different modes,
3574 and if not, combine will put things back together for us. */
3575 trunc_y = force_reg (srcmode, trunc_y);
3576 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3577 last_insn = get_last_insn ();
3578
3579 if (REG_P (x))
3580 set_unique_reg_note (last_insn, REG_EQUAL, y);
3581
3582 return last_insn;
3583 }
3584
3585 return NULL_RTX;
3586 }
3587 \f
3588 /* Pushing data onto the stack. */
3589
3590 /* Push a block of length SIZE (perhaps variable)
3591 and return an rtx to address the beginning of the block.
3592 The value may be virtual_outgoing_args_rtx.
3593
3594 EXTRA is the number of bytes of padding to push in addition to SIZE.
3595 BELOW nonzero means this padding comes at low addresses;
3596 otherwise, the padding comes at high addresses. */
3597
3598 rtx
3599 push_block (rtx size, int extra, int below)
3600 {
3601 rtx temp;
3602
3603 size = convert_modes (Pmode, ptr_mode, size, 1);
3604 if (CONSTANT_P (size))
3605 anti_adjust_stack (plus_constant (Pmode, size, extra));
3606 else if (REG_P (size) && extra == 0)
3607 anti_adjust_stack (size);
3608 else
3609 {
3610 temp = copy_to_mode_reg (Pmode, size);
3611 if (extra != 0)
3612 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3613 temp, 0, OPTAB_LIB_WIDEN);
3614 anti_adjust_stack (temp);
3615 }
3616
3617 #ifndef STACK_GROWS_DOWNWARD
3618 if (0)
3619 #else
3620 if (1)
3621 #endif
3622 {
3623 temp = virtual_outgoing_args_rtx;
3624 if (extra != 0 && below)
3625 temp = plus_constant (Pmode, temp, extra);
3626 }
3627 else
3628 {
3629 if (CONST_INT_P (size))
3630 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3631 -INTVAL (size) - (below ? 0 : extra));
3632 else if (extra != 0 && !below)
3633 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3634 negate_rtx (Pmode, plus_constant (Pmode, size,
3635 extra)));
3636 else
3637 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3638 negate_rtx (Pmode, size));
3639 }
3640
3641 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3642 }
3643
3644 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3645
3646 static rtx
3647 mem_autoinc_base (rtx mem)
3648 {
3649 if (MEM_P (mem))
3650 {
3651 rtx addr = XEXP (mem, 0);
3652 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3653 return XEXP (addr, 0);
3654 }
3655 return NULL;
3656 }
3657
3658 /* A utility routine used here, in reload, and in try_split. The insns
3659 after PREV up to and including LAST are known to adjust the stack,
3660 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3661 placing notes as appropriate. PREV may be NULL, indicating the
3662 entire insn sequence prior to LAST should be scanned.
3663
3664 The set of allowed stack pointer modifications is small:
3665 (1) One or more auto-inc style memory references (aka pushes),
3666 (2) One or more addition/subtraction with the SP as destination,
3667 (3) A single move insn with the SP as destination,
3668 (4) A call_pop insn,
3669 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3670
3671 Insns in the sequence that do not modify the SP are ignored,
3672 except for noreturn calls.
3673
3674 The return value is the amount of adjustment that can be trivially
3675 verified, via immediate operand or auto-inc. If the adjustment
3676 cannot be trivially extracted, the return value is INT_MIN. */
3677
3678 HOST_WIDE_INT
3679 find_args_size_adjust (rtx insn)
3680 {
3681 rtx dest, set, pat;
3682 int i;
3683
3684 pat = PATTERN (insn);
3685 set = NULL;
3686
3687 /* Look for a call_pop pattern. */
3688 if (CALL_P (insn))
3689 {
3690 /* We have to allow non-call_pop patterns for the case
3691 of emit_single_push_insn of a TLS address. */
3692 if (GET_CODE (pat) != PARALLEL)
3693 return 0;
3694
3695 /* All call_pop have a stack pointer adjust in the parallel.
3696 The call itself is always first, and the stack adjust is
3697 usually last, so search from the end. */
3698 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3699 {
3700 set = XVECEXP (pat, 0, i);
3701 if (GET_CODE (set) != SET)
3702 continue;
3703 dest = SET_DEST (set);
3704 if (dest == stack_pointer_rtx)
3705 break;
3706 }
3707 /* We'd better have found the stack pointer adjust. */
3708 if (i == 0)
3709 return 0;
3710 /* Fall through to process the extracted SET and DEST
3711 as if it was a standalone insn. */
3712 }
3713 else if (GET_CODE (pat) == SET)
3714 set = pat;
3715 else if ((set = single_set (insn)) != NULL)
3716 ;
3717 else if (GET_CODE (pat) == PARALLEL)
3718 {
3719 /* ??? Some older ports use a parallel with a stack adjust
3720 and a store for a PUSH_ROUNDING pattern, rather than a
3721 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3722 /* ??? See h8300 and m68k, pushqi1. */
3723 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3724 {
3725 set = XVECEXP (pat, 0, i);
3726 if (GET_CODE (set) != SET)
3727 continue;
3728 dest = SET_DEST (set);
3729 if (dest == stack_pointer_rtx)
3730 break;
3731
3732 /* We do not expect an auto-inc of the sp in the parallel. */
3733 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3734 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3735 != stack_pointer_rtx);
3736 }
3737 if (i < 0)
3738 return 0;
3739 }
3740 else
3741 return 0;
3742
3743 dest = SET_DEST (set);
3744
3745 /* Look for direct modifications of the stack pointer. */
3746 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3747 {
3748 /* Look for a trivial adjustment, otherwise assume nothing. */
3749 /* Note that the SPU restore_stack_block pattern refers to
3750 the stack pointer in V4SImode. Consider that non-trivial. */
3751 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3752 && GET_CODE (SET_SRC (set)) == PLUS
3753 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3754 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3755 return INTVAL (XEXP (SET_SRC (set), 1));
3756 /* ??? Reload can generate no-op moves, which will be cleaned
3757 up later. Recognize it and continue searching. */
3758 else if (rtx_equal_p (dest, SET_SRC (set)))
3759 return 0;
3760 else
3761 return HOST_WIDE_INT_MIN;
3762 }
3763 else
3764 {
3765 rtx mem, addr;
3766
3767 /* Otherwise only think about autoinc patterns. */
3768 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3769 {
3770 mem = dest;
3771 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3772 != stack_pointer_rtx);
3773 }
3774 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3775 mem = SET_SRC (set);
3776 else
3777 return 0;
3778
3779 addr = XEXP (mem, 0);
3780 switch (GET_CODE (addr))
3781 {
3782 case PRE_INC:
3783 case POST_INC:
3784 return GET_MODE_SIZE (GET_MODE (mem));
3785 case PRE_DEC:
3786 case POST_DEC:
3787 return -GET_MODE_SIZE (GET_MODE (mem));
3788 case PRE_MODIFY:
3789 case POST_MODIFY:
3790 addr = XEXP (addr, 1);
3791 gcc_assert (GET_CODE (addr) == PLUS);
3792 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3793 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3794 return INTVAL (XEXP (addr, 1));
3795 default:
3796 gcc_unreachable ();
3797 }
3798 }
3799 }
3800
3801 int
3802 fixup_args_size_notes (rtx prev, rtx last, int end_args_size)
3803 {
3804 int args_size = end_args_size;
3805 bool saw_unknown = false;
3806 rtx insn;
3807
3808 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3809 {
3810 HOST_WIDE_INT this_delta;
3811
3812 if (!NONDEBUG_INSN_P (insn))
3813 continue;
3814
3815 this_delta = find_args_size_adjust (insn);
3816 if (this_delta == 0)
3817 {
3818 if (!CALL_P (insn)
3819 || ACCUMULATE_OUTGOING_ARGS
3820 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3821 continue;
3822 }
3823
3824 gcc_assert (!saw_unknown);
3825 if (this_delta == HOST_WIDE_INT_MIN)
3826 saw_unknown = true;
3827
3828 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3829 #ifdef STACK_GROWS_DOWNWARD
3830 this_delta = -this_delta;
3831 #endif
3832 args_size -= this_delta;
3833 }
3834
3835 return saw_unknown ? INT_MIN : args_size;
3836 }
3837
3838 #ifdef PUSH_ROUNDING
3839 /* Emit single push insn. */
3840
3841 static void
3842 emit_single_push_insn_1 (enum machine_mode mode, rtx x, tree type)
3843 {
3844 rtx dest_addr;
3845 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3846 rtx dest;
3847 enum insn_code icode;
3848
3849 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3850 /* If there is push pattern, use it. Otherwise try old way of throwing
3851 MEM representing push operation to move expander. */
3852 icode = optab_handler (push_optab, mode);
3853 if (icode != CODE_FOR_nothing)
3854 {
3855 struct expand_operand ops[1];
3856
3857 create_input_operand (&ops[0], x, mode);
3858 if (maybe_expand_insn (icode, 1, ops))
3859 return;
3860 }
3861 if (GET_MODE_SIZE (mode) == rounded_size)
3862 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3863 /* If we are to pad downward, adjust the stack pointer first and
3864 then store X into the stack location using an offset. This is
3865 because emit_move_insn does not know how to pad; it does not have
3866 access to type. */
3867 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3868 {
3869 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3870 HOST_WIDE_INT offset;
3871
3872 emit_move_insn (stack_pointer_rtx,
3873 expand_binop (Pmode,
3874 #ifdef STACK_GROWS_DOWNWARD
3875 sub_optab,
3876 #else
3877 add_optab,
3878 #endif
3879 stack_pointer_rtx,
3880 GEN_INT (rounded_size),
3881 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3882
3883 offset = (HOST_WIDE_INT) padding_size;
3884 #ifdef STACK_GROWS_DOWNWARD
3885 if (STACK_PUSH_CODE == POST_DEC)
3886 /* We have already decremented the stack pointer, so get the
3887 previous value. */
3888 offset += (HOST_WIDE_INT) rounded_size;
3889 #else
3890 if (STACK_PUSH_CODE == POST_INC)
3891 /* We have already incremented the stack pointer, so get the
3892 previous value. */
3893 offset -= (HOST_WIDE_INT) rounded_size;
3894 #endif
3895 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3896 }
3897 else
3898 {
3899 #ifdef STACK_GROWS_DOWNWARD
3900 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3901 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3902 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3903 #else
3904 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3905 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3906 GEN_INT (rounded_size));
3907 #endif
3908 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3909 }
3910
3911 dest = gen_rtx_MEM (mode, dest_addr);
3912
3913 if (type != 0)
3914 {
3915 set_mem_attributes (dest, type, 1);
3916
3917 if (flag_optimize_sibling_calls)
3918 /* Function incoming arguments may overlap with sibling call
3919 outgoing arguments and we cannot allow reordering of reads
3920 from function arguments with stores to outgoing arguments
3921 of sibling calls. */
3922 set_mem_alias_set (dest, 0);
3923 }
3924 emit_move_insn (dest, x);
3925 }
3926
3927 /* Emit and annotate a single push insn. */
3928
3929 static void
3930 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3931 {
3932 int delta, old_delta = stack_pointer_delta;
3933 rtx prev = get_last_insn ();
3934 rtx last;
3935
3936 emit_single_push_insn_1 (mode, x, type);
3937
3938 last = get_last_insn ();
3939
3940 /* Notice the common case where we emitted exactly one insn. */
3941 if (PREV_INSN (last) == prev)
3942 {
3943 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
3944 return;
3945 }
3946
3947 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
3948 gcc_assert (delta == INT_MIN || delta == old_delta);
3949 }
3950 #endif
3951
3952 /* Generate code to push X onto the stack, assuming it has mode MODE and
3953 type TYPE.
3954 MODE is redundant except when X is a CONST_INT (since they don't
3955 carry mode info).
3956 SIZE is an rtx for the size of data to be copied (in bytes),
3957 needed only if X is BLKmode.
3958
3959 ALIGN (in bits) is maximum alignment we can assume.
3960
3961 If PARTIAL and REG are both nonzero, then copy that many of the first
3962 bytes of X into registers starting with REG, and push the rest of X.
3963 The amount of space pushed is decreased by PARTIAL bytes.
3964 REG must be a hard register in this case.
3965 If REG is zero but PARTIAL is not, take any all others actions for an
3966 argument partially in registers, but do not actually load any
3967 registers.
3968
3969 EXTRA is the amount in bytes of extra space to leave next to this arg.
3970 This is ignored if an argument block has already been allocated.
3971
3972 On a machine that lacks real push insns, ARGS_ADDR is the address of
3973 the bottom of the argument block for this call. We use indexing off there
3974 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3975 argument block has not been preallocated.
3976
3977 ARGS_SO_FAR is the size of args previously pushed for this call.
3978
3979 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3980 for arguments passed in registers. If nonzero, it will be the number
3981 of bytes required. */
3982
3983 void
3984 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3985 unsigned int align, int partial, rtx reg, int extra,
3986 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3987 rtx alignment_pad)
3988 {
3989 rtx xinner;
3990 enum direction stack_direction
3991 #ifdef STACK_GROWS_DOWNWARD
3992 = downward;
3993 #else
3994 = upward;
3995 #endif
3996
3997 /* Decide where to pad the argument: `downward' for below,
3998 `upward' for above, or `none' for don't pad it.
3999 Default is below for small data on big-endian machines; else above. */
4000 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4001
4002 /* Invert direction if stack is post-decrement.
4003 FIXME: why? */
4004 if (STACK_PUSH_CODE == POST_DEC)
4005 if (where_pad != none)
4006 where_pad = (where_pad == downward ? upward : downward);
4007
4008 xinner = x;
4009
4010 if (mode == BLKmode
4011 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4012 {
4013 /* Copy a block into the stack, entirely or partially. */
4014
4015 rtx temp;
4016 int used;
4017 int offset;
4018 int skip;
4019
4020 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4021 used = partial - offset;
4022
4023 if (mode != BLKmode)
4024 {
4025 /* A value is to be stored in an insufficiently aligned
4026 stack slot; copy via a suitably aligned slot if
4027 necessary. */
4028 size = GEN_INT (GET_MODE_SIZE (mode));
4029 if (!MEM_P (xinner))
4030 {
4031 temp = assign_temp (type, 1, 1);
4032 emit_move_insn (temp, xinner);
4033 xinner = temp;
4034 }
4035 }
4036
4037 gcc_assert (size);
4038
4039 /* USED is now the # of bytes we need not copy to the stack
4040 because registers will take care of them. */
4041
4042 if (partial != 0)
4043 xinner = adjust_address (xinner, BLKmode, used);
4044
4045 /* If the partial register-part of the arg counts in its stack size,
4046 skip the part of stack space corresponding to the registers.
4047 Otherwise, start copying to the beginning of the stack space,
4048 by setting SKIP to 0. */
4049 skip = (reg_parm_stack_space == 0) ? 0 : used;
4050
4051 #ifdef PUSH_ROUNDING
4052 /* Do it with several push insns if that doesn't take lots of insns
4053 and if there is no difficulty with push insns that skip bytes
4054 on the stack for alignment purposes. */
4055 if (args_addr == 0
4056 && PUSH_ARGS
4057 && CONST_INT_P (size)
4058 && skip == 0
4059 && MEM_ALIGN (xinner) >= align
4060 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
4061 /* Here we avoid the case of a structure whose weak alignment
4062 forces many pushes of a small amount of data,
4063 and such small pushes do rounding that causes trouble. */
4064 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4065 || align >= BIGGEST_ALIGNMENT
4066 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4067 == (align / BITS_PER_UNIT)))
4068 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4069 {
4070 /* Push padding now if padding above and stack grows down,
4071 or if padding below and stack grows up.
4072 But if space already allocated, this has already been done. */
4073 if (extra && args_addr == 0
4074 && where_pad != none && where_pad != stack_direction)
4075 anti_adjust_stack (GEN_INT (extra));
4076
4077 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4078 }
4079 else
4080 #endif /* PUSH_ROUNDING */
4081 {
4082 rtx target;
4083
4084 /* Otherwise make space on the stack and copy the data
4085 to the address of that space. */
4086
4087 /* Deduct words put into registers from the size we must copy. */
4088 if (partial != 0)
4089 {
4090 if (CONST_INT_P (size))
4091 size = GEN_INT (INTVAL (size) - used);
4092 else
4093 size = expand_binop (GET_MODE (size), sub_optab, size,
4094 GEN_INT (used), NULL_RTX, 0,
4095 OPTAB_LIB_WIDEN);
4096 }
4097
4098 /* Get the address of the stack space.
4099 In this case, we do not deal with EXTRA separately.
4100 A single stack adjust will do. */
4101 if (! args_addr)
4102 {
4103 temp = push_block (size, extra, where_pad == downward);
4104 extra = 0;
4105 }
4106 else if (CONST_INT_P (args_so_far))
4107 temp = memory_address (BLKmode,
4108 plus_constant (Pmode, args_addr,
4109 skip + INTVAL (args_so_far)));
4110 else
4111 temp = memory_address (BLKmode,
4112 plus_constant (Pmode,
4113 gen_rtx_PLUS (Pmode,
4114 args_addr,
4115 args_so_far),
4116 skip));
4117
4118 if (!ACCUMULATE_OUTGOING_ARGS)
4119 {
4120 /* If the source is referenced relative to the stack pointer,
4121 copy it to another register to stabilize it. We do not need
4122 to do this if we know that we won't be changing sp. */
4123
4124 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4125 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4126 temp = copy_to_reg (temp);
4127 }
4128
4129 target = gen_rtx_MEM (BLKmode, temp);
4130
4131 /* We do *not* set_mem_attributes here, because incoming arguments
4132 may overlap with sibling call outgoing arguments and we cannot
4133 allow reordering of reads from function arguments with stores
4134 to outgoing arguments of sibling calls. We do, however, want
4135 to record the alignment of the stack slot. */
4136 /* ALIGN may well be better aligned than TYPE, e.g. due to
4137 PARM_BOUNDARY. Assume the caller isn't lying. */
4138 set_mem_align (target, align);
4139
4140 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4141 }
4142 }
4143 else if (partial > 0)
4144 {
4145 /* Scalar partly in registers. */
4146
4147 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4148 int i;
4149 int not_stack;
4150 /* # bytes of start of argument
4151 that we must make space for but need not store. */
4152 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4153 int args_offset = INTVAL (args_so_far);
4154 int skip;
4155
4156 /* Push padding now if padding above and stack grows down,
4157 or if padding below and stack grows up.
4158 But if space already allocated, this has already been done. */
4159 if (extra && args_addr == 0
4160 && where_pad != none && where_pad != stack_direction)
4161 anti_adjust_stack (GEN_INT (extra));
4162
4163 /* If we make space by pushing it, we might as well push
4164 the real data. Otherwise, we can leave OFFSET nonzero
4165 and leave the space uninitialized. */
4166 if (args_addr == 0)
4167 offset = 0;
4168
4169 /* Now NOT_STACK gets the number of words that we don't need to
4170 allocate on the stack. Convert OFFSET to words too. */
4171 not_stack = (partial - offset) / UNITS_PER_WORD;
4172 offset /= UNITS_PER_WORD;
4173
4174 /* If the partial register-part of the arg counts in its stack size,
4175 skip the part of stack space corresponding to the registers.
4176 Otherwise, start copying to the beginning of the stack space,
4177 by setting SKIP to 0. */
4178 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4179
4180 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4181 x = validize_mem (force_const_mem (mode, x));
4182
4183 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4184 SUBREGs of such registers are not allowed. */
4185 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4186 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4187 x = copy_to_reg (x);
4188
4189 /* Loop over all the words allocated on the stack for this arg. */
4190 /* We can do it by words, because any scalar bigger than a word
4191 has a size a multiple of a word. */
4192 #ifndef PUSH_ARGS_REVERSED
4193 for (i = not_stack; i < size; i++)
4194 #else
4195 for (i = size - 1; i >= not_stack; i--)
4196 #endif
4197 if (i >= not_stack + offset)
4198 emit_push_insn (operand_subword_force (x, i, mode),
4199 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4200 0, args_addr,
4201 GEN_INT (args_offset + ((i - not_stack + skip)
4202 * UNITS_PER_WORD)),
4203 reg_parm_stack_space, alignment_pad);
4204 }
4205 else
4206 {
4207 rtx addr;
4208 rtx dest;
4209
4210 /* Push padding now if padding above and stack grows down,
4211 or if padding below and stack grows up.
4212 But if space already allocated, this has already been done. */
4213 if (extra && args_addr == 0
4214 && where_pad != none && where_pad != stack_direction)
4215 anti_adjust_stack (GEN_INT (extra));
4216
4217 #ifdef PUSH_ROUNDING
4218 if (args_addr == 0 && PUSH_ARGS)
4219 emit_single_push_insn (mode, x, type);
4220 else
4221 #endif
4222 {
4223 if (CONST_INT_P (args_so_far))
4224 addr
4225 = memory_address (mode,
4226 plus_constant (Pmode, args_addr,
4227 INTVAL (args_so_far)));
4228 else
4229 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4230 args_so_far));
4231 dest = gen_rtx_MEM (mode, addr);
4232
4233 /* We do *not* set_mem_attributes here, because incoming arguments
4234 may overlap with sibling call outgoing arguments and we cannot
4235 allow reordering of reads from function arguments with stores
4236 to outgoing arguments of sibling calls. We do, however, want
4237 to record the alignment of the stack slot. */
4238 /* ALIGN may well be better aligned than TYPE, e.g. due to
4239 PARM_BOUNDARY. Assume the caller isn't lying. */
4240 set_mem_align (dest, align);
4241
4242 emit_move_insn (dest, x);
4243 }
4244 }
4245
4246 /* If part should go in registers, copy that part
4247 into the appropriate registers. Do this now, at the end,
4248 since mem-to-mem copies above may do function calls. */
4249 if (partial > 0 && reg != 0)
4250 {
4251 /* Handle calls that pass values in multiple non-contiguous locations.
4252 The Irix 6 ABI has examples of this. */
4253 if (GET_CODE (reg) == PARALLEL)
4254 emit_group_load (reg, x, type, -1);
4255 else
4256 {
4257 gcc_assert (partial % UNITS_PER_WORD == 0);
4258 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4259 }
4260 }
4261
4262 if (extra && args_addr == 0 && where_pad == stack_direction)
4263 anti_adjust_stack (GEN_INT (extra));
4264
4265 if (alignment_pad && args_addr == 0)
4266 anti_adjust_stack (alignment_pad);
4267 }
4268 \f
4269 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4270 operations. */
4271
4272 static rtx
4273 get_subtarget (rtx x)
4274 {
4275 return (optimize
4276 || x == 0
4277 /* Only registers can be subtargets. */
4278 || !REG_P (x)
4279 /* Don't use hard regs to avoid extending their life. */
4280 || REGNO (x) < FIRST_PSEUDO_REGISTER
4281 ? 0 : x);
4282 }
4283
4284 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4285 FIELD is a bitfield. Returns true if the optimization was successful,
4286 and there's nothing else to do. */
4287
4288 static bool
4289 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4290 unsigned HOST_WIDE_INT bitpos,
4291 unsigned HOST_WIDE_INT bitregion_start,
4292 unsigned HOST_WIDE_INT bitregion_end,
4293 enum machine_mode mode1, rtx str_rtx,
4294 tree to, tree src)
4295 {
4296 enum machine_mode str_mode = GET_MODE (str_rtx);
4297 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4298 tree op0, op1;
4299 rtx value, result;
4300 optab binop;
4301 gimple srcstmt;
4302 enum tree_code code;
4303
4304 if (mode1 != VOIDmode
4305 || bitsize >= BITS_PER_WORD
4306 || str_bitsize > BITS_PER_WORD
4307 || TREE_SIDE_EFFECTS (to)
4308 || TREE_THIS_VOLATILE (to))
4309 return false;
4310
4311 STRIP_NOPS (src);
4312 if (TREE_CODE (src) != SSA_NAME)
4313 return false;
4314 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4315 return false;
4316
4317 srcstmt = get_gimple_for_ssa_name (src);
4318 if (!srcstmt
4319 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4320 return false;
4321
4322 code = gimple_assign_rhs_code (srcstmt);
4323
4324 op0 = gimple_assign_rhs1 (srcstmt);
4325
4326 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4327 to find its initialization. Hopefully the initialization will
4328 be from a bitfield load. */
4329 if (TREE_CODE (op0) == SSA_NAME)
4330 {
4331 gimple op0stmt = get_gimple_for_ssa_name (op0);
4332
4333 /* We want to eventually have OP0 be the same as TO, which
4334 should be a bitfield. */
4335 if (!op0stmt
4336 || !is_gimple_assign (op0stmt)
4337 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4338 return false;
4339 op0 = gimple_assign_rhs1 (op0stmt);
4340 }
4341
4342 op1 = gimple_assign_rhs2 (srcstmt);
4343
4344 if (!operand_equal_p (to, op0, 0))
4345 return false;
4346
4347 if (MEM_P (str_rtx))
4348 {
4349 unsigned HOST_WIDE_INT offset1;
4350
4351 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4352 str_mode = word_mode;
4353 str_mode = get_best_mode (bitsize, bitpos,
4354 bitregion_start, bitregion_end,
4355 MEM_ALIGN (str_rtx), str_mode, 0);
4356 if (str_mode == VOIDmode)
4357 return false;
4358 str_bitsize = GET_MODE_BITSIZE (str_mode);
4359
4360 offset1 = bitpos;
4361 bitpos %= str_bitsize;
4362 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4363 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4364 }
4365 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4366 return false;
4367
4368 /* If the bit field covers the whole REG/MEM, store_field
4369 will likely generate better code. */
4370 if (bitsize >= str_bitsize)
4371 return false;
4372
4373 /* We can't handle fields split across multiple entities. */
4374 if (bitpos + bitsize > str_bitsize)
4375 return false;
4376
4377 if (BYTES_BIG_ENDIAN)
4378 bitpos = str_bitsize - bitpos - bitsize;
4379
4380 switch (code)
4381 {
4382 case PLUS_EXPR:
4383 case MINUS_EXPR:
4384 /* For now, just optimize the case of the topmost bitfield
4385 where we don't need to do any masking and also
4386 1 bit bitfields where xor can be used.
4387 We might win by one instruction for the other bitfields
4388 too if insv/extv instructions aren't used, so that
4389 can be added later. */
4390 if (bitpos + bitsize != str_bitsize
4391 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4392 break;
4393
4394 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4395 value = convert_modes (str_mode,
4396 TYPE_MODE (TREE_TYPE (op1)), value,
4397 TYPE_UNSIGNED (TREE_TYPE (op1)));
4398
4399 /* We may be accessing data outside the field, which means
4400 we can alias adjacent data. */
4401 if (MEM_P (str_rtx))
4402 {
4403 str_rtx = shallow_copy_rtx (str_rtx);
4404 set_mem_alias_set (str_rtx, 0);
4405 set_mem_expr (str_rtx, 0);
4406 }
4407
4408 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4409 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4410 {
4411 value = expand_and (str_mode, value, const1_rtx, NULL);
4412 binop = xor_optab;
4413 }
4414 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4415 result = expand_binop (str_mode, binop, str_rtx,
4416 value, str_rtx, 1, OPTAB_WIDEN);
4417 if (result != str_rtx)
4418 emit_move_insn (str_rtx, result);
4419 return true;
4420
4421 case BIT_IOR_EXPR:
4422 case BIT_XOR_EXPR:
4423 if (TREE_CODE (op1) != INTEGER_CST)
4424 break;
4425 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4426 value = convert_modes (str_mode,
4427 TYPE_MODE (TREE_TYPE (op1)), value,
4428 TYPE_UNSIGNED (TREE_TYPE (op1)));
4429
4430 /* We may be accessing data outside the field, which means
4431 we can alias adjacent data. */
4432 if (MEM_P (str_rtx))
4433 {
4434 str_rtx = shallow_copy_rtx (str_rtx);
4435 set_mem_alias_set (str_rtx, 0);
4436 set_mem_expr (str_rtx, 0);
4437 }
4438
4439 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4440 if (bitpos + bitsize != str_bitsize)
4441 {
4442 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1);
4443 value = expand_and (str_mode, value, mask, NULL_RTX);
4444 }
4445 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4446 result = expand_binop (str_mode, binop, str_rtx,
4447 value, str_rtx, 1, OPTAB_WIDEN);
4448 if (result != str_rtx)
4449 emit_move_insn (str_rtx, result);
4450 return true;
4451
4452 default:
4453 break;
4454 }
4455
4456 return false;
4457 }
4458
4459 /* In the C++ memory model, consecutive bit fields in a structure are
4460 considered one memory location.
4461
4462 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4463 returns the bit range of consecutive bits in which this COMPONENT_REF
4464 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4465 and *OFFSET may be adjusted in the process.
4466
4467 If the access does not need to be restricted, 0 is returned in both
4468 *BITSTART and *BITEND. */
4469
4470 static void
4471 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4472 unsigned HOST_WIDE_INT *bitend,
4473 tree exp,
4474 HOST_WIDE_INT *bitpos,
4475 tree *offset)
4476 {
4477 HOST_WIDE_INT bitoffset;
4478 tree field, repr;
4479
4480 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4481
4482 field = TREE_OPERAND (exp, 1);
4483 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4484 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4485 need to limit the range we can access. */
4486 if (!repr)
4487 {
4488 *bitstart = *bitend = 0;
4489 return;
4490 }
4491
4492 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4493 part of a larger bit field, then the representative does not serve any
4494 useful purpose. This can occur in Ada. */
4495 if (handled_component_p (TREE_OPERAND (exp, 0)))
4496 {
4497 enum machine_mode rmode;
4498 HOST_WIDE_INT rbitsize, rbitpos;
4499 tree roffset;
4500 int unsignedp;
4501 int volatilep = 0;
4502 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4503 &roffset, &rmode, &unsignedp, &volatilep, false);
4504 if ((rbitpos % BITS_PER_UNIT) != 0)
4505 {
4506 *bitstart = *bitend = 0;
4507 return;
4508 }
4509 }
4510
4511 /* Compute the adjustment to bitpos from the offset of the field
4512 relative to the representative. DECL_FIELD_OFFSET of field and
4513 repr are the same by construction if they are not constants,
4514 see finish_bitfield_layout. */
4515 if (host_integerp (DECL_FIELD_OFFSET (field), 1)
4516 && host_integerp (DECL_FIELD_OFFSET (repr), 1))
4517 bitoffset = (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
4518 - tree_low_cst (DECL_FIELD_OFFSET (repr), 1)) * BITS_PER_UNIT;
4519 else
4520 bitoffset = 0;
4521 bitoffset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
4522 - tree_low_cst (DECL_FIELD_BIT_OFFSET (repr), 1));
4523
4524 /* If the adjustment is larger than bitpos, we would have a negative bit
4525 position for the lower bound and this may wreak havoc later. This can
4526 occur only if we have a non-null offset, so adjust offset and bitpos
4527 to make the lower bound non-negative. */
4528 if (bitoffset > *bitpos)
4529 {
4530 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4531
4532 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4533 gcc_assert (*offset != NULL_TREE);
4534
4535 *bitpos += adjust;
4536 *offset
4537 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4538 *bitstart = 0;
4539 }
4540 else
4541 *bitstart = *bitpos - bitoffset;
4542
4543 *bitend = *bitstart + tree_low_cst (DECL_SIZE (repr), 1) - 1;
4544 }
4545
4546 /* Returns true if the MEM_REF REF refers to an object that does not
4547 reside in memory and has non-BLKmode. */
4548
4549 static bool
4550 mem_ref_refers_to_non_mem_p (tree ref)
4551 {
4552 tree base = TREE_OPERAND (ref, 0);
4553 if (TREE_CODE (base) != ADDR_EXPR)
4554 return false;
4555 base = TREE_OPERAND (base, 0);
4556 return (DECL_P (base)
4557 && !TREE_ADDRESSABLE (base)
4558 && DECL_MODE (base) != BLKmode
4559 && DECL_RTL_SET_P (base)
4560 && !MEM_P (DECL_RTL (base)));
4561 }
4562
4563 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4564 is true, try generating a nontemporal store. */
4565
4566 void
4567 expand_assignment (tree to, tree from, bool nontemporal)
4568 {
4569 rtx to_rtx = 0;
4570 rtx result;
4571 enum machine_mode mode;
4572 unsigned int align;
4573 enum insn_code icode;
4574
4575 /* Don't crash if the lhs of the assignment was erroneous. */
4576 if (TREE_CODE (to) == ERROR_MARK)
4577 {
4578 expand_normal (from);
4579 return;
4580 }
4581
4582 /* Optimize away no-op moves without side-effects. */
4583 if (operand_equal_p (to, from, 0))
4584 return;
4585
4586 /* Handle misaligned stores. */
4587 mode = TYPE_MODE (TREE_TYPE (to));
4588 if ((TREE_CODE (to) == MEM_REF
4589 || TREE_CODE (to) == TARGET_MEM_REF)
4590 && mode != BLKmode
4591 && !mem_ref_refers_to_non_mem_p (to)
4592 && ((align = get_object_or_type_alignment (to))
4593 < GET_MODE_ALIGNMENT (mode))
4594 && (((icode = optab_handler (movmisalign_optab, mode))
4595 != CODE_FOR_nothing)
4596 || SLOW_UNALIGNED_ACCESS (mode, align)))
4597 {
4598 rtx reg, mem;
4599
4600 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4601 reg = force_not_mem (reg);
4602 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4603
4604 if (icode != CODE_FOR_nothing)
4605 {
4606 struct expand_operand ops[2];
4607
4608 create_fixed_operand (&ops[0], mem);
4609 create_input_operand (&ops[1], reg, mode);
4610 /* The movmisalign<mode> pattern cannot fail, else the assignment
4611 would silently be omitted. */
4612 expand_insn (icode, 2, ops);
4613 }
4614 else
4615 store_bit_field (mem, GET_MODE_BITSIZE (mode),
4616 0, 0, 0, mode, reg);
4617 return;
4618 }
4619
4620 /* Assignment of a structure component needs special treatment
4621 if the structure component's rtx is not simply a MEM.
4622 Assignment of an array element at a constant index, and assignment of
4623 an array element in an unaligned packed structure field, has the same
4624 problem. Same for (partially) storing into a non-memory object. */
4625 if (handled_component_p (to)
4626 || (TREE_CODE (to) == MEM_REF
4627 && mem_ref_refers_to_non_mem_p (to))
4628 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4629 {
4630 enum machine_mode mode1;
4631 HOST_WIDE_INT bitsize, bitpos;
4632 unsigned HOST_WIDE_INT bitregion_start = 0;
4633 unsigned HOST_WIDE_INT bitregion_end = 0;
4634 tree offset;
4635 int unsignedp;
4636 int volatilep = 0;
4637 tree tem;
4638 bool misalignp;
4639 rtx mem = NULL_RTX;
4640
4641 push_temp_slots ();
4642 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4643 &unsignedp, &volatilep, true);
4644
4645 if (TREE_CODE (to) == COMPONENT_REF
4646 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4647 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4648
4649 /* If we are going to use store_bit_field and extract_bit_field,
4650 make sure to_rtx will be safe for multiple use. */
4651 mode = TYPE_MODE (TREE_TYPE (tem));
4652 if (TREE_CODE (tem) == MEM_REF
4653 && mode != BLKmode
4654 && ((align = get_object_or_type_alignment (tem))
4655 < GET_MODE_ALIGNMENT (mode))
4656 && ((icode = optab_handler (movmisalign_optab, mode))
4657 != CODE_FOR_nothing))
4658 {
4659 struct expand_operand ops[2];
4660
4661 misalignp = true;
4662 to_rtx = gen_reg_rtx (mode);
4663 mem = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4664
4665 /* If the misaligned store doesn't overwrite all bits, perform
4666 rmw cycle on MEM. */
4667 if (bitsize != GET_MODE_BITSIZE (mode))
4668 {
4669 create_input_operand (&ops[0], to_rtx, mode);
4670 create_fixed_operand (&ops[1], mem);
4671 /* The movmisalign<mode> pattern cannot fail, else the assignment
4672 would silently be omitted. */
4673 expand_insn (icode, 2, ops);
4674
4675 mem = copy_rtx (mem);
4676 }
4677 }
4678 else
4679 {
4680 misalignp = false;
4681 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4682 }
4683
4684 /* If the bitfield is volatile, we want to access it in the
4685 field's mode, not the computed mode.
4686 If a MEM has VOIDmode (external with incomplete type),
4687 use BLKmode for it instead. */
4688 if (MEM_P (to_rtx))
4689 {
4690 if (volatilep && flag_strict_volatile_bitfields > 0)
4691 to_rtx = adjust_address (to_rtx, mode1, 0);
4692 else if (GET_MODE (to_rtx) == VOIDmode)
4693 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4694 }
4695
4696 if (offset != 0)
4697 {
4698 enum machine_mode address_mode;
4699 rtx offset_rtx;
4700
4701 if (!MEM_P (to_rtx))
4702 {
4703 /* We can get constant negative offsets into arrays with broken
4704 user code. Translate this to a trap instead of ICEing. */
4705 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4706 expand_builtin_trap ();
4707 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4708 }
4709
4710 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4711 address_mode = get_address_mode (to_rtx);
4712 if (GET_MODE (offset_rtx) != address_mode)
4713 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4714
4715 /* A constant address in TO_RTX can have VOIDmode, we must not try
4716 to call force_reg for that case. Avoid that case. */
4717 if (MEM_P (to_rtx)
4718 && GET_MODE (to_rtx) == BLKmode
4719 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4720 && bitsize > 0
4721 && (bitpos % bitsize) == 0
4722 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4723 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4724 {
4725 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4726 bitpos = 0;
4727 }
4728
4729 to_rtx = offset_address (to_rtx, offset_rtx,
4730 highest_pow2_factor_for_target (to,
4731 offset));
4732 }
4733
4734 /* No action is needed if the target is not a memory and the field
4735 lies completely outside that target. This can occur if the source
4736 code contains an out-of-bounds access to a small array. */
4737 if (!MEM_P (to_rtx)
4738 && GET_MODE (to_rtx) != BLKmode
4739 && (unsigned HOST_WIDE_INT) bitpos
4740 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4741 {
4742 expand_normal (from);
4743 result = NULL;
4744 }
4745 /* Handle expand_expr of a complex value returning a CONCAT. */
4746 else if (GET_CODE (to_rtx) == CONCAT)
4747 {
4748 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4749 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4750 && bitpos == 0
4751 && bitsize == mode_bitsize)
4752 result = store_expr (from, to_rtx, false, nontemporal);
4753 else if (bitsize == mode_bitsize / 2
4754 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4755 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4756 nontemporal);
4757 else if (bitpos + bitsize <= mode_bitsize / 2)
4758 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4759 bitregion_start, bitregion_end,
4760 mode1, from, TREE_TYPE (tem),
4761 get_alias_set (to), nontemporal);
4762 else if (bitpos >= mode_bitsize / 2)
4763 result = store_field (XEXP (to_rtx, 1), bitsize,
4764 bitpos - mode_bitsize / 2,
4765 bitregion_start, bitregion_end,
4766 mode1, from,
4767 TREE_TYPE (tem), get_alias_set (to),
4768 nontemporal);
4769 else if (bitpos == 0 && bitsize == mode_bitsize)
4770 {
4771 rtx from_rtx;
4772 result = expand_normal (from);
4773 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4774 TYPE_MODE (TREE_TYPE (from)), 0);
4775 emit_move_insn (XEXP (to_rtx, 0),
4776 read_complex_part (from_rtx, false));
4777 emit_move_insn (XEXP (to_rtx, 1),
4778 read_complex_part (from_rtx, true));
4779 }
4780 else
4781 {
4782 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4783 GET_MODE_SIZE (GET_MODE (to_rtx)));
4784 write_complex_part (temp, XEXP (to_rtx, 0), false);
4785 write_complex_part (temp, XEXP (to_rtx, 1), true);
4786 result = store_field (temp, bitsize, bitpos,
4787 bitregion_start, bitregion_end,
4788 mode1, from,
4789 TREE_TYPE (tem), get_alias_set (to),
4790 nontemporal);
4791 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4792 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4793 }
4794 }
4795 else
4796 {
4797 if (MEM_P (to_rtx))
4798 {
4799 /* If the field is at offset zero, we could have been given the
4800 DECL_RTX of the parent struct. Don't munge it. */
4801 to_rtx = shallow_copy_rtx (to_rtx);
4802
4803 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4804
4805 /* Deal with volatile and readonly fields. The former is only
4806 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4807 if (volatilep)
4808 MEM_VOLATILE_P (to_rtx) = 1;
4809 if (component_uses_parent_alias_set (to))
4810 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4811 }
4812
4813 if (optimize_bitfield_assignment_op (bitsize, bitpos,
4814 bitregion_start, bitregion_end,
4815 mode1,
4816 to_rtx, to, from))
4817 result = NULL;
4818 else
4819 result = store_field (to_rtx, bitsize, bitpos,
4820 bitregion_start, bitregion_end,
4821 mode1, from,
4822 TREE_TYPE (tem), get_alias_set (to),
4823 nontemporal);
4824 }
4825
4826 if (misalignp)
4827 {
4828 struct expand_operand ops[2];
4829
4830 create_fixed_operand (&ops[0], mem);
4831 create_input_operand (&ops[1], to_rtx, mode);
4832 /* The movmisalign<mode> pattern cannot fail, else the assignment
4833 would silently be omitted. */
4834 expand_insn (icode, 2, ops);
4835 }
4836
4837 if (result)
4838 preserve_temp_slots (result);
4839 pop_temp_slots ();
4840 return;
4841 }
4842
4843 /* If the rhs is a function call and its value is not an aggregate,
4844 call the function before we start to compute the lhs.
4845 This is needed for correct code for cases such as
4846 val = setjmp (buf) on machines where reference to val
4847 requires loading up part of an address in a separate insn.
4848
4849 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4850 since it might be a promoted variable where the zero- or sign- extension
4851 needs to be done. Handling this in the normal way is safe because no
4852 computation is done before the call. The same is true for SSA names. */
4853 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4854 && COMPLETE_TYPE_P (TREE_TYPE (from))
4855 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4856 && ! (((TREE_CODE (to) == VAR_DECL
4857 || TREE_CODE (to) == PARM_DECL
4858 || TREE_CODE (to) == RESULT_DECL)
4859 && REG_P (DECL_RTL (to)))
4860 || TREE_CODE (to) == SSA_NAME))
4861 {
4862 rtx value;
4863
4864 push_temp_slots ();
4865 value = expand_normal (from);
4866 if (to_rtx == 0)
4867 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4868
4869 /* Handle calls that return values in multiple non-contiguous locations.
4870 The Irix 6 ABI has examples of this. */
4871 if (GET_CODE (to_rtx) == PARALLEL)
4872 emit_group_load (to_rtx, value, TREE_TYPE (from),
4873 int_size_in_bytes (TREE_TYPE (from)));
4874 else if (GET_MODE (to_rtx) == BLKmode)
4875 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4876 else
4877 {
4878 if (POINTER_TYPE_P (TREE_TYPE (to)))
4879 value = convert_memory_address_addr_space
4880 (GET_MODE (to_rtx), value,
4881 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
4882
4883 emit_move_insn (to_rtx, value);
4884 }
4885 preserve_temp_slots (to_rtx);
4886 pop_temp_slots ();
4887 return;
4888 }
4889
4890 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
4891 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4892
4893 /* Don't move directly into a return register. */
4894 if (TREE_CODE (to) == RESULT_DECL
4895 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4896 {
4897 rtx temp;
4898
4899 push_temp_slots ();
4900 if (REG_P (to_rtx) && TYPE_MODE (TREE_TYPE (from)) == BLKmode)
4901 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
4902 else
4903 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4904
4905 if (GET_CODE (to_rtx) == PARALLEL)
4906 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4907 int_size_in_bytes (TREE_TYPE (from)));
4908 else if (temp)
4909 emit_move_insn (to_rtx, temp);
4910
4911 preserve_temp_slots (to_rtx);
4912 pop_temp_slots ();
4913 return;
4914 }
4915
4916 /* In case we are returning the contents of an object which overlaps
4917 the place the value is being stored, use a safe function when copying
4918 a value through a pointer into a structure value return block. */
4919 if (TREE_CODE (to) == RESULT_DECL
4920 && TREE_CODE (from) == INDIRECT_REF
4921 && ADDR_SPACE_GENERIC_P
4922 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
4923 && refs_may_alias_p (to, from)
4924 && cfun->returns_struct
4925 && !cfun->returns_pcc_struct)
4926 {
4927 rtx from_rtx, size;
4928
4929 push_temp_slots ();
4930 size = expr_size (from);
4931 from_rtx = expand_normal (from);
4932
4933 emit_library_call (memmove_libfunc, LCT_NORMAL,
4934 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4935 XEXP (from_rtx, 0), Pmode,
4936 convert_to_mode (TYPE_MODE (sizetype),
4937 size, TYPE_UNSIGNED (sizetype)),
4938 TYPE_MODE (sizetype));
4939
4940 preserve_temp_slots (to_rtx);
4941 pop_temp_slots ();
4942 return;
4943 }
4944
4945 /* Compute FROM and store the value in the rtx we got. */
4946
4947 push_temp_slots ();
4948 result = store_expr (from, to_rtx, 0, nontemporal);
4949 preserve_temp_slots (result);
4950 pop_temp_slots ();
4951 return;
4952 }
4953
4954 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4955 succeeded, false otherwise. */
4956
4957 bool
4958 emit_storent_insn (rtx to, rtx from)
4959 {
4960 struct expand_operand ops[2];
4961 enum machine_mode mode = GET_MODE (to);
4962 enum insn_code code = optab_handler (storent_optab, mode);
4963
4964 if (code == CODE_FOR_nothing)
4965 return false;
4966
4967 create_fixed_operand (&ops[0], to);
4968 create_input_operand (&ops[1], from, mode);
4969 return maybe_expand_insn (code, 2, ops);
4970 }
4971
4972 /* Generate code for computing expression EXP,
4973 and storing the value into TARGET.
4974
4975 If the mode is BLKmode then we may return TARGET itself.
4976 It turns out that in BLKmode it doesn't cause a problem.
4977 because C has no operators that could combine two different
4978 assignments into the same BLKmode object with different values
4979 with no sequence point. Will other languages need this to
4980 be more thorough?
4981
4982 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4983 stack, and block moves may need to be treated specially.
4984
4985 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4986
4987 rtx
4988 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4989 {
4990 rtx temp;
4991 rtx alt_rtl = NULL_RTX;
4992 location_t loc = EXPR_LOCATION (exp);
4993
4994 if (VOID_TYPE_P (TREE_TYPE (exp)))
4995 {
4996 /* C++ can generate ?: expressions with a throw expression in one
4997 branch and an rvalue in the other. Here, we resolve attempts to
4998 store the throw expression's nonexistent result. */
4999 gcc_assert (!call_param_p);
5000 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5001 return NULL_RTX;
5002 }
5003 if (TREE_CODE (exp) == COMPOUND_EXPR)
5004 {
5005 /* Perform first part of compound expression, then assign from second
5006 part. */
5007 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5008 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5009 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5010 nontemporal);
5011 }
5012 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5013 {
5014 /* For conditional expression, get safe form of the target. Then
5015 test the condition, doing the appropriate assignment on either
5016 side. This avoids the creation of unnecessary temporaries.
5017 For non-BLKmode, it is more efficient not to do this. */
5018
5019 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
5020
5021 do_pending_stack_adjust ();
5022 NO_DEFER_POP;
5023 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5024 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5025 nontemporal);
5026 emit_jump_insn (gen_jump (lab2));
5027 emit_barrier ();
5028 emit_label (lab1);
5029 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
5030 nontemporal);
5031 emit_label (lab2);
5032 OK_DEFER_POP;
5033
5034 return NULL_RTX;
5035 }
5036 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5037 /* If this is a scalar in a register that is stored in a wider mode
5038 than the declared mode, compute the result into its declared mode
5039 and then convert to the wider mode. Our value is the computed
5040 expression. */
5041 {
5042 rtx inner_target = 0;
5043
5044 /* We can do the conversion inside EXP, which will often result
5045 in some optimizations. Do the conversion in two steps: first
5046 change the signedness, if needed, then the extend. But don't
5047 do this if the type of EXP is a subtype of something else
5048 since then the conversion might involve more than just
5049 converting modes. */
5050 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5051 && TREE_TYPE (TREE_TYPE (exp)) == 0
5052 && GET_MODE_PRECISION (GET_MODE (target))
5053 == TYPE_PRECISION (TREE_TYPE (exp)))
5054 {
5055 if (TYPE_UNSIGNED (TREE_TYPE (exp))
5056 != SUBREG_PROMOTED_UNSIGNED_P (target))
5057 {
5058 /* Some types, e.g. Fortran's logical*4, won't have a signed
5059 version, so use the mode instead. */
5060 tree ntype
5061 = (signed_or_unsigned_type_for
5062 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
5063 if (ntype == NULL)
5064 ntype = lang_hooks.types.type_for_mode
5065 (TYPE_MODE (TREE_TYPE (exp)),
5066 SUBREG_PROMOTED_UNSIGNED_P (target));
5067
5068 exp = fold_convert_loc (loc, ntype, exp);
5069 }
5070
5071 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5072 (GET_MODE (SUBREG_REG (target)),
5073 SUBREG_PROMOTED_UNSIGNED_P (target)),
5074 exp);
5075
5076 inner_target = SUBREG_REG (target);
5077 }
5078
5079 temp = expand_expr (exp, inner_target, VOIDmode,
5080 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5081
5082 /* If TEMP is a VOIDmode constant, use convert_modes to make
5083 sure that we properly convert it. */
5084 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5085 {
5086 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5087 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
5088 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5089 GET_MODE (target), temp,
5090 SUBREG_PROMOTED_UNSIGNED_P (target));
5091 }
5092
5093 convert_move (SUBREG_REG (target), temp,
5094 SUBREG_PROMOTED_UNSIGNED_P (target));
5095
5096 return NULL_RTX;
5097 }
5098 else if ((TREE_CODE (exp) == STRING_CST
5099 || (TREE_CODE (exp) == MEM_REF
5100 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5101 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5102 == STRING_CST
5103 && integer_zerop (TREE_OPERAND (exp, 1))))
5104 && !nontemporal && !call_param_p
5105 && MEM_P (target))
5106 {
5107 /* Optimize initialization of an array with a STRING_CST. */
5108 HOST_WIDE_INT exp_len, str_copy_len;
5109 rtx dest_mem;
5110 tree str = TREE_CODE (exp) == STRING_CST
5111 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5112
5113 exp_len = int_expr_size (exp);
5114 if (exp_len <= 0)
5115 goto normal_expr;
5116
5117 if (TREE_STRING_LENGTH (str) <= 0)
5118 goto normal_expr;
5119
5120 str_copy_len = strlen (TREE_STRING_POINTER (str));
5121 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5122 goto normal_expr;
5123
5124 str_copy_len = TREE_STRING_LENGTH (str);
5125 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5126 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5127 {
5128 str_copy_len += STORE_MAX_PIECES - 1;
5129 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5130 }
5131 str_copy_len = MIN (str_copy_len, exp_len);
5132 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5133 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5134 MEM_ALIGN (target), false))
5135 goto normal_expr;
5136
5137 dest_mem = target;
5138
5139 dest_mem = store_by_pieces (dest_mem,
5140 str_copy_len, builtin_strncpy_read_str,
5141 CONST_CAST (char *,
5142 TREE_STRING_POINTER (str)),
5143 MEM_ALIGN (target), false,
5144 exp_len > str_copy_len ? 1 : 0);
5145 if (exp_len > str_copy_len)
5146 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5147 GEN_INT (exp_len - str_copy_len),
5148 BLOCK_OP_NORMAL);
5149 return NULL_RTX;
5150 }
5151 else
5152 {
5153 rtx tmp_target;
5154
5155 normal_expr:
5156 /* If we want to use a nontemporal store, force the value to
5157 register first. */
5158 tmp_target = nontemporal ? NULL_RTX : target;
5159 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5160 (call_param_p
5161 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5162 &alt_rtl);
5163 }
5164
5165 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5166 the same as that of TARGET, adjust the constant. This is needed, for
5167 example, in case it is a CONST_DOUBLE and we want only a word-sized
5168 value. */
5169 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5170 && TREE_CODE (exp) != ERROR_MARK
5171 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5172 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5173 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5174
5175 /* If value was not generated in the target, store it there.
5176 Convert the value to TARGET's type first if necessary and emit the
5177 pending incrementations that have been queued when expanding EXP.
5178 Note that we cannot emit the whole queue blindly because this will
5179 effectively disable the POST_INC optimization later.
5180
5181 If TEMP and TARGET compare equal according to rtx_equal_p, but
5182 one or both of them are volatile memory refs, we have to distinguish
5183 two cases:
5184 - expand_expr has used TARGET. In this case, we must not generate
5185 another copy. This can be detected by TARGET being equal according
5186 to == .
5187 - expand_expr has not used TARGET - that means that the source just
5188 happens to have the same RTX form. Since temp will have been created
5189 by expand_expr, it will compare unequal according to == .
5190 We must generate a copy in this case, to reach the correct number
5191 of volatile memory references. */
5192
5193 if ((! rtx_equal_p (temp, target)
5194 || (temp != target && (side_effects_p (temp)
5195 || side_effects_p (target))))
5196 && TREE_CODE (exp) != ERROR_MARK
5197 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5198 but TARGET is not valid memory reference, TEMP will differ
5199 from TARGET although it is really the same location. */
5200 && !(alt_rtl
5201 && rtx_equal_p (alt_rtl, target)
5202 && !side_effects_p (alt_rtl)
5203 && !side_effects_p (target))
5204 /* If there's nothing to copy, don't bother. Don't call
5205 expr_size unless necessary, because some front-ends (C++)
5206 expr_size-hook must not be given objects that are not
5207 supposed to be bit-copied or bit-initialized. */
5208 && expr_size (exp) != const0_rtx)
5209 {
5210 if (GET_MODE (temp) != GET_MODE (target)
5211 && GET_MODE (temp) != VOIDmode)
5212 {
5213 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5214 if (GET_MODE (target) == BLKmode
5215 && GET_MODE (temp) == BLKmode)
5216 emit_block_move (target, temp, expr_size (exp),
5217 (call_param_p
5218 ? BLOCK_OP_CALL_PARM
5219 : BLOCK_OP_NORMAL));
5220 else if (GET_MODE (target) == BLKmode)
5221 store_bit_field (target, INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5222 0, 0, 0, GET_MODE (temp), temp);
5223 else
5224 convert_move (target, temp, unsignedp);
5225 }
5226
5227 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5228 {
5229 /* Handle copying a string constant into an array. The string
5230 constant may be shorter than the array. So copy just the string's
5231 actual length, and clear the rest. First get the size of the data
5232 type of the string, which is actually the size of the target. */
5233 rtx size = expr_size (exp);
5234
5235 if (CONST_INT_P (size)
5236 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5237 emit_block_move (target, temp, size,
5238 (call_param_p
5239 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5240 else
5241 {
5242 enum machine_mode pointer_mode
5243 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5244 enum machine_mode address_mode = get_address_mode (target);
5245
5246 /* Compute the size of the data to copy from the string. */
5247 tree copy_size
5248 = size_binop_loc (loc, MIN_EXPR,
5249 make_tree (sizetype, size),
5250 size_int (TREE_STRING_LENGTH (exp)));
5251 rtx copy_size_rtx
5252 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5253 (call_param_p
5254 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5255 rtx label = 0;
5256
5257 /* Copy that much. */
5258 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5259 TYPE_UNSIGNED (sizetype));
5260 emit_block_move (target, temp, copy_size_rtx,
5261 (call_param_p
5262 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5263
5264 /* Figure out how much is left in TARGET that we have to clear.
5265 Do all calculations in pointer_mode. */
5266 if (CONST_INT_P (copy_size_rtx))
5267 {
5268 size = plus_constant (address_mode, size,
5269 -INTVAL (copy_size_rtx));
5270 target = adjust_address (target, BLKmode,
5271 INTVAL (copy_size_rtx));
5272 }
5273 else
5274 {
5275 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5276 copy_size_rtx, NULL_RTX, 0,
5277 OPTAB_LIB_WIDEN);
5278
5279 if (GET_MODE (copy_size_rtx) != address_mode)
5280 copy_size_rtx = convert_to_mode (address_mode,
5281 copy_size_rtx,
5282 TYPE_UNSIGNED (sizetype));
5283
5284 target = offset_address (target, copy_size_rtx,
5285 highest_pow2_factor (copy_size));
5286 label = gen_label_rtx ();
5287 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5288 GET_MODE (size), 0, label);
5289 }
5290
5291 if (size != const0_rtx)
5292 clear_storage (target, size, BLOCK_OP_NORMAL);
5293
5294 if (label)
5295 emit_label (label);
5296 }
5297 }
5298 /* Handle calls that return values in multiple non-contiguous locations.
5299 The Irix 6 ABI has examples of this. */
5300 else if (GET_CODE (target) == PARALLEL)
5301 emit_group_load (target, temp, TREE_TYPE (exp),
5302 int_size_in_bytes (TREE_TYPE (exp)));
5303 else if (GET_MODE (temp) == BLKmode)
5304 emit_block_move (target, temp, expr_size (exp),
5305 (call_param_p
5306 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5307 else if (nontemporal
5308 && emit_storent_insn (target, temp))
5309 /* If we managed to emit a nontemporal store, there is nothing else to
5310 do. */
5311 ;
5312 else
5313 {
5314 temp = force_operand (temp, target);
5315 if (temp != target)
5316 emit_move_insn (target, temp);
5317 }
5318 }
5319
5320 return NULL_RTX;
5321 }
5322 \f
5323 /* Return true if field F of structure TYPE is a flexible array. */
5324
5325 static bool
5326 flexible_array_member_p (const_tree f, const_tree type)
5327 {
5328 const_tree tf;
5329
5330 tf = TREE_TYPE (f);
5331 return (DECL_CHAIN (f) == NULL
5332 && TREE_CODE (tf) == ARRAY_TYPE
5333 && TYPE_DOMAIN (tf)
5334 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5335 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5336 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5337 && int_size_in_bytes (type) >= 0);
5338 }
5339
5340 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5341 must have in order for it to completely initialize a value of type TYPE.
5342 Return -1 if the number isn't known.
5343
5344 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5345
5346 static HOST_WIDE_INT
5347 count_type_elements (const_tree type, bool for_ctor_p)
5348 {
5349 switch (TREE_CODE (type))
5350 {
5351 case ARRAY_TYPE:
5352 {
5353 tree nelts;
5354
5355 nelts = array_type_nelts (type);
5356 if (nelts && host_integerp (nelts, 1))
5357 {
5358 unsigned HOST_WIDE_INT n;
5359
5360 n = tree_low_cst (nelts, 1) + 1;
5361 if (n == 0 || for_ctor_p)
5362 return n;
5363 else
5364 return n * count_type_elements (TREE_TYPE (type), false);
5365 }
5366 return for_ctor_p ? -1 : 1;
5367 }
5368
5369 case RECORD_TYPE:
5370 {
5371 unsigned HOST_WIDE_INT n;
5372 tree f;
5373
5374 n = 0;
5375 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5376 if (TREE_CODE (f) == FIELD_DECL)
5377 {
5378 if (!for_ctor_p)
5379 n += count_type_elements (TREE_TYPE (f), false);
5380 else if (!flexible_array_member_p (f, type))
5381 /* Don't count flexible arrays, which are not supposed
5382 to be initialized. */
5383 n += 1;
5384 }
5385
5386 return n;
5387 }
5388
5389 case UNION_TYPE:
5390 case QUAL_UNION_TYPE:
5391 {
5392 tree f;
5393 HOST_WIDE_INT n, m;
5394
5395 gcc_assert (!for_ctor_p);
5396 /* Estimate the number of scalars in each field and pick the
5397 maximum. Other estimates would do instead; the idea is simply
5398 to make sure that the estimate is not sensitive to the ordering
5399 of the fields. */
5400 n = 1;
5401 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5402 if (TREE_CODE (f) == FIELD_DECL)
5403 {
5404 m = count_type_elements (TREE_TYPE (f), false);
5405 /* If the field doesn't span the whole union, add an extra
5406 scalar for the rest. */
5407 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5408 TYPE_SIZE (type)) != 1)
5409 m++;
5410 if (n < m)
5411 n = m;
5412 }
5413 return n;
5414 }
5415
5416 case COMPLEX_TYPE:
5417 return 2;
5418
5419 case VECTOR_TYPE:
5420 return TYPE_VECTOR_SUBPARTS (type);
5421
5422 case INTEGER_TYPE:
5423 case REAL_TYPE:
5424 case FIXED_POINT_TYPE:
5425 case ENUMERAL_TYPE:
5426 case BOOLEAN_TYPE:
5427 case POINTER_TYPE:
5428 case OFFSET_TYPE:
5429 case REFERENCE_TYPE:
5430 case NULLPTR_TYPE:
5431 return 1;
5432
5433 case ERROR_MARK:
5434 return 0;
5435
5436 case VOID_TYPE:
5437 case METHOD_TYPE:
5438 case FUNCTION_TYPE:
5439 case LANG_TYPE:
5440 default:
5441 gcc_unreachable ();
5442 }
5443 }
5444
5445 /* Helper for categorize_ctor_elements. Identical interface. */
5446
5447 static bool
5448 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5449 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5450 {
5451 unsigned HOST_WIDE_INT idx;
5452 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5453 tree value, purpose, elt_type;
5454
5455 /* Whether CTOR is a valid constant initializer, in accordance with what
5456 initializer_constant_valid_p does. If inferred from the constructor
5457 elements, true until proven otherwise. */
5458 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5459 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5460
5461 nz_elts = 0;
5462 init_elts = 0;
5463 num_fields = 0;
5464 elt_type = NULL_TREE;
5465
5466 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5467 {
5468 HOST_WIDE_INT mult = 1;
5469
5470 if (TREE_CODE (purpose) == RANGE_EXPR)
5471 {
5472 tree lo_index = TREE_OPERAND (purpose, 0);
5473 tree hi_index = TREE_OPERAND (purpose, 1);
5474
5475 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
5476 mult = (tree_low_cst (hi_index, 1)
5477 - tree_low_cst (lo_index, 1) + 1);
5478 }
5479 num_fields += mult;
5480 elt_type = TREE_TYPE (value);
5481
5482 switch (TREE_CODE (value))
5483 {
5484 case CONSTRUCTOR:
5485 {
5486 HOST_WIDE_INT nz = 0, ic = 0;
5487
5488 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5489 p_complete);
5490
5491 nz_elts += mult * nz;
5492 init_elts += mult * ic;
5493
5494 if (const_from_elts_p && const_p)
5495 const_p = const_elt_p;
5496 }
5497 break;
5498
5499 case INTEGER_CST:
5500 case REAL_CST:
5501 case FIXED_CST:
5502 if (!initializer_zerop (value))
5503 nz_elts += mult;
5504 init_elts += mult;
5505 break;
5506
5507 case STRING_CST:
5508 nz_elts += mult * TREE_STRING_LENGTH (value);
5509 init_elts += mult * TREE_STRING_LENGTH (value);
5510 break;
5511
5512 case COMPLEX_CST:
5513 if (!initializer_zerop (TREE_REALPART (value)))
5514 nz_elts += mult;
5515 if (!initializer_zerop (TREE_IMAGPART (value)))
5516 nz_elts += mult;
5517 init_elts += mult;
5518 break;
5519
5520 case VECTOR_CST:
5521 {
5522 unsigned i;
5523 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5524 {
5525 tree v = VECTOR_CST_ELT (value, i);
5526 if (!initializer_zerop (v))
5527 nz_elts += mult;
5528 init_elts += mult;
5529 }
5530 }
5531 break;
5532
5533 default:
5534 {
5535 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5536 nz_elts += mult * tc;
5537 init_elts += mult * tc;
5538
5539 if (const_from_elts_p && const_p)
5540 const_p = initializer_constant_valid_p (value, elt_type)
5541 != NULL_TREE;
5542 }
5543 break;
5544 }
5545 }
5546
5547 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5548 num_fields, elt_type))
5549 *p_complete = false;
5550
5551 *p_nz_elts += nz_elts;
5552 *p_init_elts += init_elts;
5553
5554 return const_p;
5555 }
5556
5557 /* Examine CTOR to discover:
5558 * how many scalar fields are set to nonzero values,
5559 and place it in *P_NZ_ELTS;
5560 * how many scalar fields in total are in CTOR,
5561 and place it in *P_ELT_COUNT.
5562 * whether the constructor is complete -- in the sense that every
5563 meaningful byte is explicitly given a value --
5564 and place it in *P_COMPLETE.
5565
5566 Return whether or not CTOR is a valid static constant initializer, the same
5567 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5568
5569 bool
5570 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5571 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5572 {
5573 *p_nz_elts = 0;
5574 *p_init_elts = 0;
5575 *p_complete = true;
5576
5577 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5578 }
5579
5580 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5581 of which had type LAST_TYPE. Each element was itself a complete
5582 initializer, in the sense that every meaningful byte was explicitly
5583 given a value. Return true if the same is true for the constructor
5584 as a whole. */
5585
5586 bool
5587 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5588 const_tree last_type)
5589 {
5590 if (TREE_CODE (type) == UNION_TYPE
5591 || TREE_CODE (type) == QUAL_UNION_TYPE)
5592 {
5593 if (num_elts == 0)
5594 return false;
5595
5596 gcc_assert (num_elts == 1 && last_type);
5597
5598 /* ??? We could look at each element of the union, and find the
5599 largest element. Which would avoid comparing the size of the
5600 initialized element against any tail padding in the union.
5601 Doesn't seem worth the effort... */
5602 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5603 }
5604
5605 return count_type_elements (type, true) == num_elts;
5606 }
5607
5608 /* Return 1 if EXP contains mostly (3/4) zeros. */
5609
5610 static int
5611 mostly_zeros_p (const_tree exp)
5612 {
5613 if (TREE_CODE (exp) == CONSTRUCTOR)
5614 {
5615 HOST_WIDE_INT nz_elts, init_elts;
5616 bool complete_p;
5617
5618 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5619 return !complete_p || nz_elts < init_elts / 4;
5620 }
5621
5622 return initializer_zerop (exp);
5623 }
5624
5625 /* Return 1 if EXP contains all zeros. */
5626
5627 static int
5628 all_zeros_p (const_tree exp)
5629 {
5630 if (TREE_CODE (exp) == CONSTRUCTOR)
5631 {
5632 HOST_WIDE_INT nz_elts, init_elts;
5633 bool complete_p;
5634
5635 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5636 return nz_elts == 0;
5637 }
5638
5639 return initializer_zerop (exp);
5640 }
5641 \f
5642 /* Helper function for store_constructor.
5643 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5644 TYPE is the type of the CONSTRUCTOR, not the element type.
5645 CLEARED is as for store_constructor.
5646 ALIAS_SET is the alias set to use for any stores.
5647
5648 This provides a recursive shortcut back to store_constructor when it isn't
5649 necessary to go through store_field. This is so that we can pass through
5650 the cleared field to let store_constructor know that we may not have to
5651 clear a substructure if the outer structure has already been cleared. */
5652
5653 static void
5654 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5655 HOST_WIDE_INT bitpos, enum machine_mode mode,
5656 tree exp, tree type, int cleared,
5657 alias_set_type alias_set)
5658 {
5659 if (TREE_CODE (exp) == CONSTRUCTOR
5660 /* We can only call store_constructor recursively if the size and
5661 bit position are on a byte boundary. */
5662 && bitpos % BITS_PER_UNIT == 0
5663 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5664 /* If we have a nonzero bitpos for a register target, then we just
5665 let store_field do the bitfield handling. This is unlikely to
5666 generate unnecessary clear instructions anyways. */
5667 && (bitpos == 0 || MEM_P (target)))
5668 {
5669 if (MEM_P (target))
5670 target
5671 = adjust_address (target,
5672 GET_MODE (target) == BLKmode
5673 || 0 != (bitpos
5674 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5675 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5676
5677
5678 /* Update the alias set, if required. */
5679 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5680 && MEM_ALIAS_SET (target) != 0)
5681 {
5682 target = copy_rtx (target);
5683 set_mem_alias_set (target, alias_set);
5684 }
5685
5686 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5687 }
5688 else
5689 store_field (target, bitsize, bitpos, 0, 0, mode, exp, type, alias_set,
5690 false);
5691 }
5692
5693 /* Store the value of constructor EXP into the rtx TARGET.
5694 TARGET is either a REG or a MEM; we know it cannot conflict, since
5695 safe_from_p has been called.
5696 CLEARED is true if TARGET is known to have been zero'd.
5697 SIZE is the number of bytes of TARGET we are allowed to modify: this
5698 may not be the same as the size of EXP if we are assigning to a field
5699 which has been packed to exclude padding bits. */
5700
5701 static void
5702 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5703 {
5704 tree type = TREE_TYPE (exp);
5705 #ifdef WORD_REGISTER_OPERATIONS
5706 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5707 #endif
5708
5709 switch (TREE_CODE (type))
5710 {
5711 case RECORD_TYPE:
5712 case UNION_TYPE:
5713 case QUAL_UNION_TYPE:
5714 {
5715 unsigned HOST_WIDE_INT idx;
5716 tree field, value;
5717
5718 /* If size is zero or the target is already cleared, do nothing. */
5719 if (size == 0 || cleared)
5720 cleared = 1;
5721 /* We either clear the aggregate or indicate the value is dead. */
5722 else if ((TREE_CODE (type) == UNION_TYPE
5723 || TREE_CODE (type) == QUAL_UNION_TYPE)
5724 && ! CONSTRUCTOR_ELTS (exp))
5725 /* If the constructor is empty, clear the union. */
5726 {
5727 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5728 cleared = 1;
5729 }
5730
5731 /* If we are building a static constructor into a register,
5732 set the initial value as zero so we can fold the value into
5733 a constant. But if more than one register is involved,
5734 this probably loses. */
5735 else if (REG_P (target) && TREE_STATIC (exp)
5736 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5737 {
5738 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5739 cleared = 1;
5740 }
5741
5742 /* If the constructor has fewer fields than the structure or
5743 if we are initializing the structure to mostly zeros, clear
5744 the whole structure first. Don't do this if TARGET is a
5745 register whose mode size isn't equal to SIZE since
5746 clear_storage can't handle this case. */
5747 else if (size > 0
5748 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5749 != fields_length (type))
5750 || mostly_zeros_p (exp))
5751 && (!REG_P (target)
5752 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5753 == size)))
5754 {
5755 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5756 cleared = 1;
5757 }
5758
5759 if (REG_P (target) && !cleared)
5760 emit_clobber (target);
5761
5762 /* Store each element of the constructor into the
5763 corresponding field of TARGET. */
5764 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5765 {
5766 enum machine_mode mode;
5767 HOST_WIDE_INT bitsize;
5768 HOST_WIDE_INT bitpos = 0;
5769 tree offset;
5770 rtx to_rtx = target;
5771
5772 /* Just ignore missing fields. We cleared the whole
5773 structure, above, if any fields are missing. */
5774 if (field == 0)
5775 continue;
5776
5777 if (cleared && initializer_zerop (value))
5778 continue;
5779
5780 if (host_integerp (DECL_SIZE (field), 1))
5781 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5782 else
5783 bitsize = -1;
5784
5785 mode = DECL_MODE (field);
5786 if (DECL_BIT_FIELD (field))
5787 mode = VOIDmode;
5788
5789 offset = DECL_FIELD_OFFSET (field);
5790 if (host_integerp (offset, 0)
5791 && host_integerp (bit_position (field), 0))
5792 {
5793 bitpos = int_bit_position (field);
5794 offset = 0;
5795 }
5796 else
5797 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5798
5799 if (offset)
5800 {
5801 enum machine_mode address_mode;
5802 rtx offset_rtx;
5803
5804 offset
5805 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5806 make_tree (TREE_TYPE (exp),
5807 target));
5808
5809 offset_rtx = expand_normal (offset);
5810 gcc_assert (MEM_P (to_rtx));
5811
5812 address_mode = get_address_mode (to_rtx);
5813 if (GET_MODE (offset_rtx) != address_mode)
5814 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5815
5816 to_rtx = offset_address (to_rtx, offset_rtx,
5817 highest_pow2_factor (offset));
5818 }
5819
5820 #ifdef WORD_REGISTER_OPERATIONS
5821 /* If this initializes a field that is smaller than a
5822 word, at the start of a word, try to widen it to a full
5823 word. This special case allows us to output C++ member
5824 function initializations in a form that the optimizers
5825 can understand. */
5826 if (REG_P (target)
5827 && bitsize < BITS_PER_WORD
5828 && bitpos % BITS_PER_WORD == 0
5829 && GET_MODE_CLASS (mode) == MODE_INT
5830 && TREE_CODE (value) == INTEGER_CST
5831 && exp_size >= 0
5832 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5833 {
5834 tree type = TREE_TYPE (value);
5835
5836 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5837 {
5838 type = lang_hooks.types.type_for_mode
5839 (word_mode, TYPE_UNSIGNED (type));
5840 value = fold_convert (type, value);
5841 }
5842
5843 if (BYTES_BIG_ENDIAN)
5844 value
5845 = fold_build2 (LSHIFT_EXPR, type, value,
5846 build_int_cst (type,
5847 BITS_PER_WORD - bitsize));
5848 bitsize = BITS_PER_WORD;
5849 mode = word_mode;
5850 }
5851 #endif
5852
5853 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5854 && DECL_NONADDRESSABLE_P (field))
5855 {
5856 to_rtx = copy_rtx (to_rtx);
5857 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5858 }
5859
5860 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5861 value, type, cleared,
5862 get_alias_set (TREE_TYPE (field)));
5863 }
5864 break;
5865 }
5866 case ARRAY_TYPE:
5867 {
5868 tree value, index;
5869 unsigned HOST_WIDE_INT i;
5870 int need_to_clear;
5871 tree domain;
5872 tree elttype = TREE_TYPE (type);
5873 int const_bounds_p;
5874 HOST_WIDE_INT minelt = 0;
5875 HOST_WIDE_INT maxelt = 0;
5876
5877 domain = TYPE_DOMAIN (type);
5878 const_bounds_p = (TYPE_MIN_VALUE (domain)
5879 && TYPE_MAX_VALUE (domain)
5880 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5881 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5882
5883 /* If we have constant bounds for the range of the type, get them. */
5884 if (const_bounds_p)
5885 {
5886 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5887 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5888 }
5889
5890 /* If the constructor has fewer elements than the array, clear
5891 the whole array first. Similarly if this is static
5892 constructor of a non-BLKmode object. */
5893 if (cleared)
5894 need_to_clear = 0;
5895 else if (REG_P (target) && TREE_STATIC (exp))
5896 need_to_clear = 1;
5897 else
5898 {
5899 unsigned HOST_WIDE_INT idx;
5900 tree index, value;
5901 HOST_WIDE_INT count = 0, zero_count = 0;
5902 need_to_clear = ! const_bounds_p;
5903
5904 /* This loop is a more accurate version of the loop in
5905 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5906 is also needed to check for missing elements. */
5907 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5908 {
5909 HOST_WIDE_INT this_node_count;
5910
5911 if (need_to_clear)
5912 break;
5913
5914 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5915 {
5916 tree lo_index = TREE_OPERAND (index, 0);
5917 tree hi_index = TREE_OPERAND (index, 1);
5918
5919 if (! host_integerp (lo_index, 1)
5920 || ! host_integerp (hi_index, 1))
5921 {
5922 need_to_clear = 1;
5923 break;
5924 }
5925
5926 this_node_count = (tree_low_cst (hi_index, 1)
5927 - tree_low_cst (lo_index, 1) + 1);
5928 }
5929 else
5930 this_node_count = 1;
5931
5932 count += this_node_count;
5933 if (mostly_zeros_p (value))
5934 zero_count += this_node_count;
5935 }
5936
5937 /* Clear the entire array first if there are any missing
5938 elements, or if the incidence of zero elements is >=
5939 75%. */
5940 if (! need_to_clear
5941 && (count < maxelt - minelt + 1
5942 || 4 * zero_count >= 3 * count))
5943 need_to_clear = 1;
5944 }
5945
5946 if (need_to_clear && size > 0)
5947 {
5948 if (REG_P (target))
5949 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5950 else
5951 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5952 cleared = 1;
5953 }
5954
5955 if (!cleared && REG_P (target))
5956 /* Inform later passes that the old value is dead. */
5957 emit_clobber (target);
5958
5959 /* Store each element of the constructor into the
5960 corresponding element of TARGET, determined by counting the
5961 elements. */
5962 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5963 {
5964 enum machine_mode mode;
5965 HOST_WIDE_INT bitsize;
5966 HOST_WIDE_INT bitpos;
5967 rtx xtarget = target;
5968
5969 if (cleared && initializer_zerop (value))
5970 continue;
5971
5972 mode = TYPE_MODE (elttype);
5973 if (mode == BLKmode)
5974 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5975 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5976 : -1);
5977 else
5978 bitsize = GET_MODE_BITSIZE (mode);
5979
5980 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5981 {
5982 tree lo_index = TREE_OPERAND (index, 0);
5983 tree hi_index = TREE_OPERAND (index, 1);
5984 rtx index_r, pos_rtx;
5985 HOST_WIDE_INT lo, hi, count;
5986 tree position;
5987
5988 /* If the range is constant and "small", unroll the loop. */
5989 if (const_bounds_p
5990 && host_integerp (lo_index, 0)
5991 && host_integerp (hi_index, 0)
5992 && (lo = tree_low_cst (lo_index, 0),
5993 hi = tree_low_cst (hi_index, 0),
5994 count = hi - lo + 1,
5995 (!MEM_P (target)
5996 || count <= 2
5997 || (host_integerp (TYPE_SIZE (elttype), 1)
5998 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5999 <= 40 * 8)))))
6000 {
6001 lo -= minelt; hi -= minelt;
6002 for (; lo <= hi; lo++)
6003 {
6004 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
6005
6006 if (MEM_P (target)
6007 && !MEM_KEEP_ALIAS_SET_P (target)
6008 && TREE_CODE (type) == ARRAY_TYPE
6009 && TYPE_NONALIASED_COMPONENT (type))
6010 {
6011 target = copy_rtx (target);
6012 MEM_KEEP_ALIAS_SET_P (target) = 1;
6013 }
6014
6015 store_constructor_field
6016 (target, bitsize, bitpos, mode, value, type, cleared,
6017 get_alias_set (elttype));
6018 }
6019 }
6020 else
6021 {
6022 rtx loop_start = gen_label_rtx ();
6023 rtx loop_end = gen_label_rtx ();
6024 tree exit_cond;
6025
6026 expand_normal (hi_index);
6027
6028 index = build_decl (EXPR_LOCATION (exp),
6029 VAR_DECL, NULL_TREE, domain);
6030 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6031 SET_DECL_RTL (index, index_r);
6032 store_expr (lo_index, index_r, 0, false);
6033
6034 /* Build the head of the loop. */
6035 do_pending_stack_adjust ();
6036 emit_label (loop_start);
6037
6038 /* Assign value to element index. */
6039 position =
6040 fold_convert (ssizetype,
6041 fold_build2 (MINUS_EXPR,
6042 TREE_TYPE (index),
6043 index,
6044 TYPE_MIN_VALUE (domain)));
6045
6046 position =
6047 size_binop (MULT_EXPR, position,
6048 fold_convert (ssizetype,
6049 TYPE_SIZE_UNIT (elttype)));
6050
6051 pos_rtx = expand_normal (position);
6052 xtarget = offset_address (target, pos_rtx,
6053 highest_pow2_factor (position));
6054 xtarget = adjust_address (xtarget, mode, 0);
6055 if (TREE_CODE (value) == CONSTRUCTOR)
6056 store_constructor (value, xtarget, cleared,
6057 bitsize / BITS_PER_UNIT);
6058 else
6059 store_expr (value, xtarget, 0, false);
6060
6061 /* Generate a conditional jump to exit the loop. */
6062 exit_cond = build2 (LT_EXPR, integer_type_node,
6063 index, hi_index);
6064 jumpif (exit_cond, loop_end, -1);
6065
6066 /* Update the loop counter, and jump to the head of
6067 the loop. */
6068 expand_assignment (index,
6069 build2 (PLUS_EXPR, TREE_TYPE (index),
6070 index, integer_one_node),
6071 false);
6072
6073 emit_jump (loop_start);
6074
6075 /* Build the end of the loop. */
6076 emit_label (loop_end);
6077 }
6078 }
6079 else if ((index != 0 && ! host_integerp (index, 0))
6080 || ! host_integerp (TYPE_SIZE (elttype), 1))
6081 {
6082 tree position;
6083
6084 if (index == 0)
6085 index = ssize_int (1);
6086
6087 if (minelt)
6088 index = fold_convert (ssizetype,
6089 fold_build2 (MINUS_EXPR,
6090 TREE_TYPE (index),
6091 index,
6092 TYPE_MIN_VALUE (domain)));
6093
6094 position =
6095 size_binop (MULT_EXPR, index,
6096 fold_convert (ssizetype,
6097 TYPE_SIZE_UNIT (elttype)));
6098 xtarget = offset_address (target,
6099 expand_normal (position),
6100 highest_pow2_factor (position));
6101 xtarget = adjust_address (xtarget, mode, 0);
6102 store_expr (value, xtarget, 0, false);
6103 }
6104 else
6105 {
6106 if (index != 0)
6107 bitpos = ((tree_low_cst (index, 0) - minelt)
6108 * tree_low_cst (TYPE_SIZE (elttype), 1));
6109 else
6110 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
6111
6112 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6113 && TREE_CODE (type) == ARRAY_TYPE
6114 && TYPE_NONALIASED_COMPONENT (type))
6115 {
6116 target = copy_rtx (target);
6117 MEM_KEEP_ALIAS_SET_P (target) = 1;
6118 }
6119 store_constructor_field (target, bitsize, bitpos, mode, value,
6120 type, cleared, get_alias_set (elttype));
6121 }
6122 }
6123 break;
6124 }
6125
6126 case VECTOR_TYPE:
6127 {
6128 unsigned HOST_WIDE_INT idx;
6129 constructor_elt *ce;
6130 int i;
6131 int need_to_clear;
6132 int icode = 0;
6133 tree elttype = TREE_TYPE (type);
6134 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
6135 enum machine_mode eltmode = TYPE_MODE (elttype);
6136 HOST_WIDE_INT bitsize;
6137 HOST_WIDE_INT bitpos;
6138 rtvec vector = NULL;
6139 unsigned n_elts;
6140 alias_set_type alias;
6141
6142 gcc_assert (eltmode != BLKmode);
6143
6144 n_elts = TYPE_VECTOR_SUBPARTS (type);
6145 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6146 {
6147 enum machine_mode mode = GET_MODE (target);
6148
6149 icode = (int) optab_handler (vec_init_optab, mode);
6150 if (icode != CODE_FOR_nothing)
6151 {
6152 unsigned int i;
6153
6154 vector = rtvec_alloc (n_elts);
6155 for (i = 0; i < n_elts; i++)
6156 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6157 }
6158 }
6159
6160 /* If the constructor has fewer elements than the vector,
6161 clear the whole array first. Similarly if this is static
6162 constructor of a non-BLKmode object. */
6163 if (cleared)
6164 need_to_clear = 0;
6165 else if (REG_P (target) && TREE_STATIC (exp))
6166 need_to_clear = 1;
6167 else
6168 {
6169 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6170 tree value;
6171
6172 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6173 {
6174 int n_elts_here = tree_low_cst
6175 (int_const_binop (TRUNC_DIV_EXPR,
6176 TYPE_SIZE (TREE_TYPE (value)),
6177 TYPE_SIZE (elttype)), 1);
6178
6179 count += n_elts_here;
6180 if (mostly_zeros_p (value))
6181 zero_count += n_elts_here;
6182 }
6183
6184 /* Clear the entire vector first if there are any missing elements,
6185 or if the incidence of zero elements is >= 75%. */
6186 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6187 }
6188
6189 if (need_to_clear && size > 0 && !vector)
6190 {
6191 if (REG_P (target))
6192 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6193 else
6194 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6195 cleared = 1;
6196 }
6197
6198 /* Inform later passes that the old value is dead. */
6199 if (!cleared && !vector && REG_P (target))
6200 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6201
6202 if (MEM_P (target))
6203 alias = MEM_ALIAS_SET (target);
6204 else
6205 alias = get_alias_set (elttype);
6206
6207 /* Store each element of the constructor into the corresponding
6208 element of TARGET, determined by counting the elements. */
6209 for (idx = 0, i = 0;
6210 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6211 idx++, i += bitsize / elt_size)
6212 {
6213 HOST_WIDE_INT eltpos;
6214 tree value = ce->value;
6215
6216 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
6217 if (cleared && initializer_zerop (value))
6218 continue;
6219
6220 if (ce->index)
6221 eltpos = tree_low_cst (ce->index, 1);
6222 else
6223 eltpos = i;
6224
6225 if (vector)
6226 {
6227 /* Vector CONSTRUCTORs should only be built from smaller
6228 vectors in the case of BLKmode vectors. */
6229 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6230 RTVEC_ELT (vector, eltpos)
6231 = expand_normal (value);
6232 }
6233 else
6234 {
6235 enum machine_mode value_mode =
6236 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6237 ? TYPE_MODE (TREE_TYPE (value))
6238 : eltmode;
6239 bitpos = eltpos * elt_size;
6240 store_constructor_field (target, bitsize, bitpos,
6241 value_mode, value, type,
6242 cleared, alias);
6243 }
6244 }
6245
6246 if (vector)
6247 emit_insn (GEN_FCN (icode)
6248 (target,
6249 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6250 break;
6251 }
6252
6253 default:
6254 gcc_unreachable ();
6255 }
6256 }
6257
6258 /* Store the value of EXP (an expression tree)
6259 into a subfield of TARGET which has mode MODE and occupies
6260 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6261 If MODE is VOIDmode, it means that we are storing into a bit-field.
6262
6263 BITREGION_START is bitpos of the first bitfield in this region.
6264 BITREGION_END is the bitpos of the ending bitfield in this region.
6265 These two fields are 0, if the C++ memory model does not apply,
6266 or we are not interested in keeping track of bitfield regions.
6267
6268 Always return const0_rtx unless we have something particular to
6269 return.
6270
6271 TYPE is the type of the underlying object,
6272
6273 ALIAS_SET is the alias set for the destination. This value will
6274 (in general) be different from that for TARGET, since TARGET is a
6275 reference to the containing structure.
6276
6277 If NONTEMPORAL is true, try generating a nontemporal store. */
6278
6279 static rtx
6280 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6281 unsigned HOST_WIDE_INT bitregion_start,
6282 unsigned HOST_WIDE_INT bitregion_end,
6283 enum machine_mode mode, tree exp, tree type,
6284 alias_set_type alias_set, bool nontemporal)
6285 {
6286 if (TREE_CODE (exp) == ERROR_MARK)
6287 return const0_rtx;
6288
6289 /* If we have nothing to store, do nothing unless the expression has
6290 side-effects. */
6291 if (bitsize == 0)
6292 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6293
6294 /* If we are storing into an unaligned field of an aligned union that is
6295 in a register, we may have the mode of TARGET being an integer mode but
6296 MODE == BLKmode. In that case, get an aligned object whose size and
6297 alignment are the same as TARGET and store TARGET into it (we can avoid
6298 the store if the field being stored is the entire width of TARGET). Then
6299 call ourselves recursively to store the field into a BLKmode version of
6300 that object. Finally, load from the object into TARGET. This is not
6301 very efficient in general, but should only be slightly more expensive
6302 than the otherwise-required unaligned accesses. Perhaps this can be
6303 cleaned up later. It's tempting to make OBJECT readonly, but it's set
6304 twice, once with emit_move_insn and once via store_field. */
6305
6306 if (mode == BLKmode
6307 && (REG_P (target) || GET_CODE (target) == SUBREG))
6308 {
6309 rtx object = assign_temp (type, 1, 1);
6310 rtx blk_object = adjust_address (object, BLKmode, 0);
6311
6312 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
6313 emit_move_insn (object, target);
6314
6315 store_field (blk_object, bitsize, bitpos,
6316 bitregion_start, bitregion_end,
6317 mode, exp, type, MEM_ALIAS_SET (blk_object), nontemporal);
6318
6319 emit_move_insn (target, object);
6320
6321 /* We want to return the BLKmode version of the data. */
6322 return blk_object;
6323 }
6324
6325 if (GET_CODE (target) == CONCAT)
6326 {
6327 /* We're storing into a struct containing a single __complex. */
6328
6329 gcc_assert (!bitpos);
6330 return store_expr (exp, target, 0, nontemporal);
6331 }
6332
6333 /* If the structure is in a register or if the component
6334 is a bit field, we cannot use addressing to access it.
6335 Use bit-field techniques or SUBREG to store in it. */
6336
6337 if (mode == VOIDmode
6338 || (mode != BLKmode && ! direct_store[(int) mode]
6339 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6340 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6341 || REG_P (target)
6342 || GET_CODE (target) == SUBREG
6343 /* If the field isn't aligned enough to store as an ordinary memref,
6344 store it as a bit field. */
6345 || (mode != BLKmode
6346 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6347 || bitpos % GET_MODE_ALIGNMENT (mode))
6348 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6349 || (bitpos % BITS_PER_UNIT != 0)))
6350 || (bitsize >= 0 && mode != BLKmode
6351 && GET_MODE_BITSIZE (mode) > bitsize)
6352 /* If the RHS and field are a constant size and the size of the
6353 RHS isn't the same size as the bitfield, we must use bitfield
6354 operations. */
6355 || (bitsize >= 0
6356 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6357 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6358 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6359 decl we must use bitfield operations. */
6360 || (bitsize >= 0
6361 && TREE_CODE (exp) == MEM_REF
6362 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6363 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6364 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6365 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6366 {
6367 rtx temp;
6368 gimple nop_def;
6369
6370 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6371 implies a mask operation. If the precision is the same size as
6372 the field we're storing into, that mask is redundant. This is
6373 particularly common with bit field assignments generated by the
6374 C front end. */
6375 nop_def = get_def_for_expr (exp, NOP_EXPR);
6376 if (nop_def)
6377 {
6378 tree type = TREE_TYPE (exp);
6379 if (INTEGRAL_TYPE_P (type)
6380 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6381 && bitsize == TYPE_PRECISION (type))
6382 {
6383 tree op = gimple_assign_rhs1 (nop_def);
6384 type = TREE_TYPE (op);
6385 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6386 exp = op;
6387 }
6388 }
6389
6390 temp = expand_normal (exp);
6391
6392 /* If BITSIZE is narrower than the size of the type of EXP
6393 we will be narrowing TEMP. Normally, what's wanted are the
6394 low-order bits. However, if EXP's type is a record and this is
6395 big-endian machine, we want the upper BITSIZE bits. */
6396 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6397 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6398 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6399 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6400 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6401 NULL_RTX, 1);
6402
6403 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6404 if (mode != VOIDmode && mode != BLKmode
6405 && mode != TYPE_MODE (TREE_TYPE (exp)))
6406 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6407
6408 /* If the modes of TEMP and TARGET are both BLKmode, both
6409 must be in memory and BITPOS must be aligned on a byte
6410 boundary. If so, we simply do a block copy. Likewise
6411 for a BLKmode-like TARGET. */
6412 if (GET_MODE (temp) == BLKmode
6413 && (GET_MODE (target) == BLKmode
6414 || (MEM_P (target)
6415 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6416 && (bitpos % BITS_PER_UNIT) == 0
6417 && (bitsize % BITS_PER_UNIT) == 0)))
6418 {
6419 gcc_assert (MEM_P (target) && MEM_P (temp)
6420 && (bitpos % BITS_PER_UNIT) == 0);
6421
6422 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6423 emit_block_move (target, temp,
6424 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6425 / BITS_PER_UNIT),
6426 BLOCK_OP_NORMAL);
6427
6428 return const0_rtx;
6429 }
6430
6431 /* Store the value in the bitfield. */
6432 store_bit_field (target, bitsize, bitpos,
6433 bitregion_start, bitregion_end,
6434 mode, temp);
6435
6436 return const0_rtx;
6437 }
6438 else
6439 {
6440 /* Now build a reference to just the desired component. */
6441 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6442
6443 if (to_rtx == target)
6444 to_rtx = copy_rtx (to_rtx);
6445
6446 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6447 set_mem_alias_set (to_rtx, alias_set);
6448
6449 return store_expr (exp, to_rtx, 0, nontemporal);
6450 }
6451 }
6452 \f
6453 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6454 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6455 codes and find the ultimate containing object, which we return.
6456
6457 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6458 bit position, and *PUNSIGNEDP to the signedness of the field.
6459 If the position of the field is variable, we store a tree
6460 giving the variable offset (in units) in *POFFSET.
6461 This offset is in addition to the bit position.
6462 If the position is not variable, we store 0 in *POFFSET.
6463
6464 If any of the extraction expressions is volatile,
6465 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6466
6467 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6468 Otherwise, it is a mode that can be used to access the field.
6469
6470 If the field describes a variable-sized object, *PMODE is set to
6471 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6472 this case, but the address of the object can be found.
6473
6474 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6475 look through nodes that serve as markers of a greater alignment than
6476 the one that can be deduced from the expression. These nodes make it
6477 possible for front-ends to prevent temporaries from being created by
6478 the middle-end on alignment considerations. For that purpose, the
6479 normal operating mode at high-level is to always pass FALSE so that
6480 the ultimate containing object is really returned; moreover, the
6481 associated predicate handled_component_p will always return TRUE
6482 on these nodes, thus indicating that they are essentially handled
6483 by get_inner_reference. TRUE should only be passed when the caller
6484 is scanning the expression in order to build another representation
6485 and specifically knows how to handle these nodes; as such, this is
6486 the normal operating mode in the RTL expanders. */
6487
6488 tree
6489 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6490 HOST_WIDE_INT *pbitpos, tree *poffset,
6491 enum machine_mode *pmode, int *punsignedp,
6492 int *pvolatilep, bool keep_aligning)
6493 {
6494 tree size_tree = 0;
6495 enum machine_mode mode = VOIDmode;
6496 bool blkmode_bitfield = false;
6497 tree offset = size_zero_node;
6498 double_int bit_offset = double_int_zero;
6499
6500 /* First get the mode, signedness, and size. We do this from just the
6501 outermost expression. */
6502 *pbitsize = -1;
6503 if (TREE_CODE (exp) == COMPONENT_REF)
6504 {
6505 tree field = TREE_OPERAND (exp, 1);
6506 size_tree = DECL_SIZE (field);
6507 if (!DECL_BIT_FIELD (field))
6508 mode = DECL_MODE (field);
6509 else if (DECL_MODE (field) == BLKmode)
6510 blkmode_bitfield = true;
6511 else if (TREE_THIS_VOLATILE (exp)
6512 && flag_strict_volatile_bitfields > 0)
6513 /* Volatile bitfields should be accessed in the mode of the
6514 field's type, not the mode computed based on the bit
6515 size. */
6516 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6517
6518 *punsignedp = DECL_UNSIGNED (field);
6519 }
6520 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6521 {
6522 size_tree = TREE_OPERAND (exp, 1);
6523 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6524 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6525
6526 /* For vector types, with the correct size of access, use the mode of
6527 inner type. */
6528 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6529 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6530 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6531 mode = TYPE_MODE (TREE_TYPE (exp));
6532 }
6533 else
6534 {
6535 mode = TYPE_MODE (TREE_TYPE (exp));
6536 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6537
6538 if (mode == BLKmode)
6539 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6540 else
6541 *pbitsize = GET_MODE_BITSIZE (mode);
6542 }
6543
6544 if (size_tree != 0)
6545 {
6546 if (! host_integerp (size_tree, 1))
6547 mode = BLKmode, *pbitsize = -1;
6548 else
6549 *pbitsize = tree_low_cst (size_tree, 1);
6550 }
6551
6552 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6553 and find the ultimate containing object. */
6554 while (1)
6555 {
6556 switch (TREE_CODE (exp))
6557 {
6558 case BIT_FIELD_REF:
6559 bit_offset
6560 = double_int_add (bit_offset,
6561 tree_to_double_int (TREE_OPERAND (exp, 2)));
6562 break;
6563
6564 case COMPONENT_REF:
6565 {
6566 tree field = TREE_OPERAND (exp, 1);
6567 tree this_offset = component_ref_field_offset (exp);
6568
6569 /* If this field hasn't been filled in yet, don't go past it.
6570 This should only happen when folding expressions made during
6571 type construction. */
6572 if (this_offset == 0)
6573 break;
6574
6575 offset = size_binop (PLUS_EXPR, offset, this_offset);
6576 bit_offset = double_int_add (bit_offset,
6577 tree_to_double_int
6578 (DECL_FIELD_BIT_OFFSET (field)));
6579
6580 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6581 }
6582 break;
6583
6584 case ARRAY_REF:
6585 case ARRAY_RANGE_REF:
6586 {
6587 tree index = TREE_OPERAND (exp, 1);
6588 tree low_bound = array_ref_low_bound (exp);
6589 tree unit_size = array_ref_element_size (exp);
6590
6591 /* We assume all arrays have sizes that are a multiple of a byte.
6592 First subtract the lower bound, if any, in the type of the
6593 index, then convert to sizetype and multiply by the size of
6594 the array element. */
6595 if (! integer_zerop (low_bound))
6596 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6597 index, low_bound);
6598
6599 offset = size_binop (PLUS_EXPR, offset,
6600 size_binop (MULT_EXPR,
6601 fold_convert (sizetype, index),
6602 unit_size));
6603 }
6604 break;
6605
6606 case REALPART_EXPR:
6607 break;
6608
6609 case IMAGPART_EXPR:
6610 bit_offset = double_int_add (bit_offset,
6611 uhwi_to_double_int (*pbitsize));
6612 break;
6613
6614 case VIEW_CONVERT_EXPR:
6615 if (keep_aligning && STRICT_ALIGNMENT
6616 && (TYPE_ALIGN (TREE_TYPE (exp))
6617 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6618 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6619 < BIGGEST_ALIGNMENT)
6620 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6621 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6622 goto done;
6623 break;
6624
6625 case MEM_REF:
6626 /* Hand back the decl for MEM[&decl, off]. */
6627 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6628 {
6629 tree off = TREE_OPERAND (exp, 1);
6630 if (!integer_zerop (off))
6631 {
6632 double_int boff, coff = mem_ref_offset (exp);
6633 boff = double_int_lshift (coff,
6634 BITS_PER_UNIT == 8
6635 ? 3 : exact_log2 (BITS_PER_UNIT),
6636 HOST_BITS_PER_DOUBLE_INT, true);
6637 bit_offset = double_int_add (bit_offset, boff);
6638 }
6639 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6640 }
6641 goto done;
6642
6643 default:
6644 goto done;
6645 }
6646
6647 /* If any reference in the chain is volatile, the effect is volatile. */
6648 if (TREE_THIS_VOLATILE (exp))
6649 *pvolatilep = 1;
6650
6651 exp = TREE_OPERAND (exp, 0);
6652 }
6653 done:
6654
6655 /* If OFFSET is constant, see if we can return the whole thing as a
6656 constant bit position. Make sure to handle overflow during
6657 this conversion. */
6658 if (TREE_CODE (offset) == INTEGER_CST)
6659 {
6660 double_int tem = tree_to_double_int (offset);
6661 tem = double_int_sext (tem, TYPE_PRECISION (sizetype));
6662 tem = double_int_lshift (tem,
6663 BITS_PER_UNIT == 8
6664 ? 3 : exact_log2 (BITS_PER_UNIT),
6665 HOST_BITS_PER_DOUBLE_INT, true);
6666 tem = double_int_add (tem, bit_offset);
6667 if (double_int_fits_in_shwi_p (tem))
6668 {
6669 *pbitpos = double_int_to_shwi (tem);
6670 *poffset = offset = NULL_TREE;
6671 }
6672 }
6673
6674 /* Otherwise, split it up. */
6675 if (offset)
6676 {
6677 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6678 if (double_int_negative_p (bit_offset))
6679 {
6680 double_int mask
6681 = double_int_mask (BITS_PER_UNIT == 8
6682 ? 3 : exact_log2 (BITS_PER_UNIT));
6683 double_int tem = double_int_and_not (bit_offset, mask);
6684 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6685 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6686 bit_offset = double_int_sub (bit_offset, tem);
6687 tem = double_int_rshift (tem,
6688 BITS_PER_UNIT == 8
6689 ? 3 : exact_log2 (BITS_PER_UNIT),
6690 HOST_BITS_PER_DOUBLE_INT, true);
6691 offset = size_binop (PLUS_EXPR, offset,
6692 double_int_to_tree (sizetype, tem));
6693 }
6694
6695 *pbitpos = double_int_to_shwi (bit_offset);
6696 *poffset = offset;
6697 }
6698
6699 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6700 if (mode == VOIDmode
6701 && blkmode_bitfield
6702 && (*pbitpos % BITS_PER_UNIT) == 0
6703 && (*pbitsize % BITS_PER_UNIT) == 0)
6704 *pmode = BLKmode;
6705 else
6706 *pmode = mode;
6707
6708 return exp;
6709 }
6710
6711 /* Given an expression EXP that may be a COMPONENT_REF, an ARRAY_REF or an
6712 ARRAY_RANGE_REF, look for whether EXP or any nested component-refs within
6713 EXP is marked as PACKED. */
6714
6715 bool
6716 contains_packed_reference (const_tree exp)
6717 {
6718 bool packed_p = false;
6719
6720 while (1)
6721 {
6722 switch (TREE_CODE (exp))
6723 {
6724 case COMPONENT_REF:
6725 {
6726 tree field = TREE_OPERAND (exp, 1);
6727 packed_p = DECL_PACKED (field)
6728 || TYPE_PACKED (TREE_TYPE (field))
6729 || TYPE_PACKED (TREE_TYPE (exp));
6730 if (packed_p)
6731 goto done;
6732 }
6733 break;
6734
6735 case BIT_FIELD_REF:
6736 case ARRAY_REF:
6737 case ARRAY_RANGE_REF:
6738 case REALPART_EXPR:
6739 case IMAGPART_EXPR:
6740 case VIEW_CONVERT_EXPR:
6741 break;
6742
6743 default:
6744 goto done;
6745 }
6746 exp = TREE_OPERAND (exp, 0);
6747 }
6748 done:
6749 return packed_p;
6750 }
6751
6752 /* Return a tree of sizetype representing the size, in bytes, of the element
6753 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6754
6755 tree
6756 array_ref_element_size (tree exp)
6757 {
6758 tree aligned_size = TREE_OPERAND (exp, 3);
6759 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6760 location_t loc = EXPR_LOCATION (exp);
6761
6762 /* If a size was specified in the ARRAY_REF, it's the size measured
6763 in alignment units of the element type. So multiply by that value. */
6764 if (aligned_size)
6765 {
6766 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6767 sizetype from another type of the same width and signedness. */
6768 if (TREE_TYPE (aligned_size) != sizetype)
6769 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6770 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6771 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6772 }
6773
6774 /* Otherwise, take the size from that of the element type. Substitute
6775 any PLACEHOLDER_EXPR that we have. */
6776 else
6777 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6778 }
6779
6780 /* Return a tree representing the lower bound of the array mentioned in
6781 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6782
6783 tree
6784 array_ref_low_bound (tree exp)
6785 {
6786 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6787
6788 /* If a lower bound is specified in EXP, use it. */
6789 if (TREE_OPERAND (exp, 2))
6790 return TREE_OPERAND (exp, 2);
6791
6792 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6793 substituting for a PLACEHOLDER_EXPR as needed. */
6794 if (domain_type && TYPE_MIN_VALUE (domain_type))
6795 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6796
6797 /* Otherwise, return a zero of the appropriate type. */
6798 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6799 }
6800
6801 /* Returns true if REF is an array reference to an array at the end of
6802 a structure. If this is the case, the array may be allocated larger
6803 than its upper bound implies. */
6804
6805 bool
6806 array_at_struct_end_p (tree ref)
6807 {
6808 if (TREE_CODE (ref) != ARRAY_REF
6809 && TREE_CODE (ref) != ARRAY_RANGE_REF)
6810 return false;
6811
6812 while (handled_component_p (ref))
6813 {
6814 /* If the reference chain contains a component reference to a
6815 non-union type and there follows another field the reference
6816 is not at the end of a structure. */
6817 if (TREE_CODE (ref) == COMPONENT_REF
6818 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
6819 {
6820 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
6821 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
6822 nextf = DECL_CHAIN (nextf);
6823 if (nextf)
6824 return false;
6825 }
6826
6827 ref = TREE_OPERAND (ref, 0);
6828 }
6829
6830 /* If the reference is based on a declared entity, the size of the array
6831 is constrained by its given domain. */
6832 if (DECL_P (ref))
6833 return false;
6834
6835 return true;
6836 }
6837
6838 /* Return a tree representing the upper bound of the array mentioned in
6839 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6840
6841 tree
6842 array_ref_up_bound (tree exp)
6843 {
6844 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6845
6846 /* If there is a domain type and it has an upper bound, use it, substituting
6847 for a PLACEHOLDER_EXPR as needed. */
6848 if (domain_type && TYPE_MAX_VALUE (domain_type))
6849 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6850
6851 /* Otherwise fail. */
6852 return NULL_TREE;
6853 }
6854
6855 /* Return a tree representing the offset, in bytes, of the field referenced
6856 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6857
6858 tree
6859 component_ref_field_offset (tree exp)
6860 {
6861 tree aligned_offset = TREE_OPERAND (exp, 2);
6862 tree field = TREE_OPERAND (exp, 1);
6863 location_t loc = EXPR_LOCATION (exp);
6864
6865 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6866 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6867 value. */
6868 if (aligned_offset)
6869 {
6870 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6871 sizetype from another type of the same width and signedness. */
6872 if (TREE_TYPE (aligned_offset) != sizetype)
6873 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6874 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
6875 size_int (DECL_OFFSET_ALIGN (field)
6876 / BITS_PER_UNIT));
6877 }
6878
6879 /* Otherwise, take the offset from that of the field. Substitute
6880 any PLACEHOLDER_EXPR that we have. */
6881 else
6882 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6883 }
6884
6885 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6886
6887 static unsigned HOST_WIDE_INT
6888 target_align (const_tree target)
6889 {
6890 /* We might have a chain of nested references with intermediate misaligning
6891 bitfields components, so need to recurse to find out. */
6892
6893 unsigned HOST_WIDE_INT this_align, outer_align;
6894
6895 switch (TREE_CODE (target))
6896 {
6897 case BIT_FIELD_REF:
6898 return 1;
6899
6900 case COMPONENT_REF:
6901 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
6902 outer_align = target_align (TREE_OPERAND (target, 0));
6903 return MIN (this_align, outer_align);
6904
6905 case ARRAY_REF:
6906 case ARRAY_RANGE_REF:
6907 this_align = TYPE_ALIGN (TREE_TYPE (target));
6908 outer_align = target_align (TREE_OPERAND (target, 0));
6909 return MIN (this_align, outer_align);
6910
6911 CASE_CONVERT:
6912 case NON_LVALUE_EXPR:
6913 case VIEW_CONVERT_EXPR:
6914 this_align = TYPE_ALIGN (TREE_TYPE (target));
6915 outer_align = target_align (TREE_OPERAND (target, 0));
6916 return MAX (this_align, outer_align);
6917
6918 default:
6919 return TYPE_ALIGN (TREE_TYPE (target));
6920 }
6921 }
6922
6923 \f
6924 /* Given an rtx VALUE that may contain additions and multiplications, return
6925 an equivalent value that just refers to a register, memory, or constant.
6926 This is done by generating instructions to perform the arithmetic and
6927 returning a pseudo-register containing the value.
6928
6929 The returned value may be a REG, SUBREG, MEM or constant. */
6930
6931 rtx
6932 force_operand (rtx value, rtx target)
6933 {
6934 rtx op1, op2;
6935 /* Use subtarget as the target for operand 0 of a binary operation. */
6936 rtx subtarget = get_subtarget (target);
6937 enum rtx_code code = GET_CODE (value);
6938
6939 /* Check for subreg applied to an expression produced by loop optimizer. */
6940 if (code == SUBREG
6941 && !REG_P (SUBREG_REG (value))
6942 && !MEM_P (SUBREG_REG (value)))
6943 {
6944 value
6945 = simplify_gen_subreg (GET_MODE (value),
6946 force_reg (GET_MODE (SUBREG_REG (value)),
6947 force_operand (SUBREG_REG (value),
6948 NULL_RTX)),
6949 GET_MODE (SUBREG_REG (value)),
6950 SUBREG_BYTE (value));
6951 code = GET_CODE (value);
6952 }
6953
6954 /* Check for a PIC address load. */
6955 if ((code == PLUS || code == MINUS)
6956 && XEXP (value, 0) == pic_offset_table_rtx
6957 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6958 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6959 || GET_CODE (XEXP (value, 1)) == CONST))
6960 {
6961 if (!subtarget)
6962 subtarget = gen_reg_rtx (GET_MODE (value));
6963 emit_move_insn (subtarget, value);
6964 return subtarget;
6965 }
6966
6967 if (ARITHMETIC_P (value))
6968 {
6969 op2 = XEXP (value, 1);
6970 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6971 subtarget = 0;
6972 if (code == MINUS && CONST_INT_P (op2))
6973 {
6974 code = PLUS;
6975 op2 = negate_rtx (GET_MODE (value), op2);
6976 }
6977
6978 /* Check for an addition with OP2 a constant integer and our first
6979 operand a PLUS of a virtual register and something else. In that
6980 case, we want to emit the sum of the virtual register and the
6981 constant first and then add the other value. This allows virtual
6982 register instantiation to simply modify the constant rather than
6983 creating another one around this addition. */
6984 if (code == PLUS && CONST_INT_P (op2)
6985 && GET_CODE (XEXP (value, 0)) == PLUS
6986 && REG_P (XEXP (XEXP (value, 0), 0))
6987 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6988 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6989 {
6990 rtx temp = expand_simple_binop (GET_MODE (value), code,
6991 XEXP (XEXP (value, 0), 0), op2,
6992 subtarget, 0, OPTAB_LIB_WIDEN);
6993 return expand_simple_binop (GET_MODE (value), code, temp,
6994 force_operand (XEXP (XEXP (value,
6995 0), 1), 0),
6996 target, 0, OPTAB_LIB_WIDEN);
6997 }
6998
6999 op1 = force_operand (XEXP (value, 0), subtarget);
7000 op2 = force_operand (op2, NULL_RTX);
7001 switch (code)
7002 {
7003 case MULT:
7004 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7005 case DIV:
7006 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7007 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7008 target, 1, OPTAB_LIB_WIDEN);
7009 else
7010 return expand_divmod (0,
7011 FLOAT_MODE_P (GET_MODE (value))
7012 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7013 GET_MODE (value), op1, op2, target, 0);
7014 case MOD:
7015 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7016 target, 0);
7017 case UDIV:
7018 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7019 target, 1);
7020 case UMOD:
7021 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7022 target, 1);
7023 case ASHIFTRT:
7024 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7025 target, 0, OPTAB_LIB_WIDEN);
7026 default:
7027 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7028 target, 1, OPTAB_LIB_WIDEN);
7029 }
7030 }
7031 if (UNARY_P (value))
7032 {
7033 if (!target)
7034 target = gen_reg_rtx (GET_MODE (value));
7035 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7036 switch (code)
7037 {
7038 case ZERO_EXTEND:
7039 case SIGN_EXTEND:
7040 case TRUNCATE:
7041 case FLOAT_EXTEND:
7042 case FLOAT_TRUNCATE:
7043 convert_move (target, op1, code == ZERO_EXTEND);
7044 return target;
7045
7046 case FIX:
7047 case UNSIGNED_FIX:
7048 expand_fix (target, op1, code == UNSIGNED_FIX);
7049 return target;
7050
7051 case FLOAT:
7052 case UNSIGNED_FLOAT:
7053 expand_float (target, op1, code == UNSIGNED_FLOAT);
7054 return target;
7055
7056 default:
7057 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7058 }
7059 }
7060
7061 #ifdef INSN_SCHEDULING
7062 /* On machines that have insn scheduling, we want all memory reference to be
7063 explicit, so we need to deal with such paradoxical SUBREGs. */
7064 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7065 value
7066 = simplify_gen_subreg (GET_MODE (value),
7067 force_reg (GET_MODE (SUBREG_REG (value)),
7068 force_operand (SUBREG_REG (value),
7069 NULL_RTX)),
7070 GET_MODE (SUBREG_REG (value)),
7071 SUBREG_BYTE (value));
7072 #endif
7073
7074 return value;
7075 }
7076 \f
7077 /* Subroutine of expand_expr: return nonzero iff there is no way that
7078 EXP can reference X, which is being modified. TOP_P is nonzero if this
7079 call is going to be used to determine whether we need a temporary
7080 for EXP, as opposed to a recursive call to this function.
7081
7082 It is always safe for this routine to return zero since it merely
7083 searches for optimization opportunities. */
7084
7085 int
7086 safe_from_p (const_rtx x, tree exp, int top_p)
7087 {
7088 rtx exp_rtl = 0;
7089 int i, nops;
7090
7091 if (x == 0
7092 /* If EXP has varying size, we MUST use a target since we currently
7093 have no way of allocating temporaries of variable size
7094 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7095 So we assume here that something at a higher level has prevented a
7096 clash. This is somewhat bogus, but the best we can do. Only
7097 do this when X is BLKmode and when we are at the top level. */
7098 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7099 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7100 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7101 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7102 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7103 != INTEGER_CST)
7104 && GET_MODE (x) == BLKmode)
7105 /* If X is in the outgoing argument area, it is always safe. */
7106 || (MEM_P (x)
7107 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7108 || (GET_CODE (XEXP (x, 0)) == PLUS
7109 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7110 return 1;
7111
7112 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7113 find the underlying pseudo. */
7114 if (GET_CODE (x) == SUBREG)
7115 {
7116 x = SUBREG_REG (x);
7117 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7118 return 0;
7119 }
7120
7121 /* Now look at our tree code and possibly recurse. */
7122 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7123 {
7124 case tcc_declaration:
7125 exp_rtl = DECL_RTL_IF_SET (exp);
7126 break;
7127
7128 case tcc_constant:
7129 return 1;
7130
7131 case tcc_exceptional:
7132 if (TREE_CODE (exp) == TREE_LIST)
7133 {
7134 while (1)
7135 {
7136 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7137 return 0;
7138 exp = TREE_CHAIN (exp);
7139 if (!exp)
7140 return 1;
7141 if (TREE_CODE (exp) != TREE_LIST)
7142 return safe_from_p (x, exp, 0);
7143 }
7144 }
7145 else if (TREE_CODE (exp) == CONSTRUCTOR)
7146 {
7147 constructor_elt *ce;
7148 unsigned HOST_WIDE_INT idx;
7149
7150 FOR_EACH_VEC_ELT (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce)
7151 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7152 || !safe_from_p (x, ce->value, 0))
7153 return 0;
7154 return 1;
7155 }
7156 else if (TREE_CODE (exp) == ERROR_MARK)
7157 return 1; /* An already-visited SAVE_EXPR? */
7158 else
7159 return 0;
7160
7161 case tcc_statement:
7162 /* The only case we look at here is the DECL_INITIAL inside a
7163 DECL_EXPR. */
7164 return (TREE_CODE (exp) != DECL_EXPR
7165 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7166 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7167 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7168
7169 case tcc_binary:
7170 case tcc_comparison:
7171 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7172 return 0;
7173 /* Fall through. */
7174
7175 case tcc_unary:
7176 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7177
7178 case tcc_expression:
7179 case tcc_reference:
7180 case tcc_vl_exp:
7181 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7182 the expression. If it is set, we conflict iff we are that rtx or
7183 both are in memory. Otherwise, we check all operands of the
7184 expression recursively. */
7185
7186 switch (TREE_CODE (exp))
7187 {
7188 case ADDR_EXPR:
7189 /* If the operand is static or we are static, we can't conflict.
7190 Likewise if we don't conflict with the operand at all. */
7191 if (staticp (TREE_OPERAND (exp, 0))
7192 || TREE_STATIC (exp)
7193 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7194 return 1;
7195
7196 /* Otherwise, the only way this can conflict is if we are taking
7197 the address of a DECL a that address if part of X, which is
7198 very rare. */
7199 exp = TREE_OPERAND (exp, 0);
7200 if (DECL_P (exp))
7201 {
7202 if (!DECL_RTL_SET_P (exp)
7203 || !MEM_P (DECL_RTL (exp)))
7204 return 0;
7205 else
7206 exp_rtl = XEXP (DECL_RTL (exp), 0);
7207 }
7208 break;
7209
7210 case MEM_REF:
7211 if (MEM_P (x)
7212 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7213 get_alias_set (exp)))
7214 return 0;
7215 break;
7216
7217 case CALL_EXPR:
7218 /* Assume that the call will clobber all hard registers and
7219 all of memory. */
7220 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7221 || MEM_P (x))
7222 return 0;
7223 break;
7224
7225 case WITH_CLEANUP_EXPR:
7226 case CLEANUP_POINT_EXPR:
7227 /* Lowered by gimplify.c. */
7228 gcc_unreachable ();
7229
7230 case SAVE_EXPR:
7231 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7232
7233 default:
7234 break;
7235 }
7236
7237 /* If we have an rtx, we do not need to scan our operands. */
7238 if (exp_rtl)
7239 break;
7240
7241 nops = TREE_OPERAND_LENGTH (exp);
7242 for (i = 0; i < nops; i++)
7243 if (TREE_OPERAND (exp, i) != 0
7244 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7245 return 0;
7246
7247 break;
7248
7249 case tcc_type:
7250 /* Should never get a type here. */
7251 gcc_unreachable ();
7252 }
7253
7254 /* If we have an rtl, find any enclosed object. Then see if we conflict
7255 with it. */
7256 if (exp_rtl)
7257 {
7258 if (GET_CODE (exp_rtl) == SUBREG)
7259 {
7260 exp_rtl = SUBREG_REG (exp_rtl);
7261 if (REG_P (exp_rtl)
7262 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7263 return 0;
7264 }
7265
7266 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7267 are memory and they conflict. */
7268 return ! (rtx_equal_p (x, exp_rtl)
7269 || (MEM_P (x) && MEM_P (exp_rtl)
7270 && true_dependence (exp_rtl, VOIDmode, x)));
7271 }
7272
7273 /* If we reach here, it is safe. */
7274 return 1;
7275 }
7276
7277 \f
7278 /* Return the highest power of two that EXP is known to be a multiple of.
7279 This is used in updating alignment of MEMs in array references. */
7280
7281 unsigned HOST_WIDE_INT
7282 highest_pow2_factor (const_tree exp)
7283 {
7284 unsigned HOST_WIDE_INT c0, c1;
7285
7286 switch (TREE_CODE (exp))
7287 {
7288 case INTEGER_CST:
7289 /* We can find the lowest bit that's a one. If the low
7290 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
7291 We need to handle this case since we can find it in a COND_EXPR,
7292 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
7293 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
7294 later ICE. */
7295 if (TREE_OVERFLOW (exp))
7296 return BIGGEST_ALIGNMENT;
7297 else
7298 {
7299 /* Note: tree_low_cst is intentionally not used here,
7300 we don't care about the upper bits. */
7301 c0 = TREE_INT_CST_LOW (exp);
7302 c0 &= -c0;
7303 return c0 ? c0 : BIGGEST_ALIGNMENT;
7304 }
7305 break;
7306
7307 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
7308 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
7309 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
7310 return MIN (c0, c1);
7311
7312 case MULT_EXPR:
7313 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
7314 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
7315 return c0 * c1;
7316
7317 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
7318 case CEIL_DIV_EXPR:
7319 if (integer_pow2p (TREE_OPERAND (exp, 1))
7320 && host_integerp (TREE_OPERAND (exp, 1), 1))
7321 {
7322 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
7323 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
7324 return MAX (1, c0 / c1);
7325 }
7326 break;
7327
7328 case BIT_AND_EXPR:
7329 /* The highest power of two of a bit-and expression is the maximum of
7330 that of its operands. We typically get here for a complex LHS and
7331 a constant negative power of two on the RHS to force an explicit
7332 alignment, so don't bother looking at the LHS. */
7333 return highest_pow2_factor (TREE_OPERAND (exp, 1));
7334
7335 CASE_CONVERT:
7336 case SAVE_EXPR:
7337 return highest_pow2_factor (TREE_OPERAND (exp, 0));
7338
7339 case COMPOUND_EXPR:
7340 return highest_pow2_factor (TREE_OPERAND (exp, 1));
7341
7342 case COND_EXPR:
7343 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
7344 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
7345 return MIN (c0, c1);
7346
7347 default:
7348 break;
7349 }
7350
7351 return 1;
7352 }
7353
7354 /* Similar, except that the alignment requirements of TARGET are
7355 taken into account. Assume it is at least as aligned as its
7356 type, unless it is a COMPONENT_REF in which case the layout of
7357 the structure gives the alignment. */
7358
7359 static unsigned HOST_WIDE_INT
7360 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7361 {
7362 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7363 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7364
7365 return MAX (factor, talign);
7366 }
7367 \f
7368 #ifdef HAVE_conditional_move
7369 /* Convert the tree comparison code TCODE to the rtl one where the
7370 signedness is UNSIGNEDP. */
7371
7372 static enum rtx_code
7373 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7374 {
7375 enum rtx_code code;
7376 switch (tcode)
7377 {
7378 case EQ_EXPR:
7379 code = EQ;
7380 break;
7381 case NE_EXPR:
7382 code = NE;
7383 break;
7384 case LT_EXPR:
7385 code = unsignedp ? LTU : LT;
7386 break;
7387 case LE_EXPR:
7388 code = unsignedp ? LEU : LE;
7389 break;
7390 case GT_EXPR:
7391 code = unsignedp ? GTU : GT;
7392 break;
7393 case GE_EXPR:
7394 code = unsignedp ? GEU : GE;
7395 break;
7396 case UNORDERED_EXPR:
7397 code = UNORDERED;
7398 break;
7399 case ORDERED_EXPR:
7400 code = ORDERED;
7401 break;
7402 case UNLT_EXPR:
7403 code = UNLT;
7404 break;
7405 case UNLE_EXPR:
7406 code = UNLE;
7407 break;
7408 case UNGT_EXPR:
7409 code = UNGT;
7410 break;
7411 case UNGE_EXPR:
7412 code = UNGE;
7413 break;
7414 case UNEQ_EXPR:
7415 code = UNEQ;
7416 break;
7417 case LTGT_EXPR:
7418 code = LTGT;
7419 break;
7420
7421 default:
7422 gcc_unreachable ();
7423 }
7424 return code;
7425 }
7426 #endif
7427
7428 /* Subroutine of expand_expr. Expand the two operands of a binary
7429 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7430 The value may be stored in TARGET if TARGET is nonzero. The
7431 MODIFIER argument is as documented by expand_expr. */
7432
7433 static void
7434 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7435 enum expand_modifier modifier)
7436 {
7437 if (! safe_from_p (target, exp1, 1))
7438 target = 0;
7439 if (operand_equal_p (exp0, exp1, 0))
7440 {
7441 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7442 *op1 = copy_rtx (*op0);
7443 }
7444 else
7445 {
7446 /* If we need to preserve evaluation order, copy exp0 into its own
7447 temporary variable so that it can't be clobbered by exp1. */
7448 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7449 exp0 = save_expr (exp0);
7450 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7451 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7452 }
7453 }
7454
7455 \f
7456 /* Return a MEM that contains constant EXP. DEFER is as for
7457 output_constant_def and MODIFIER is as for expand_expr. */
7458
7459 static rtx
7460 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7461 {
7462 rtx mem;
7463
7464 mem = output_constant_def (exp, defer);
7465 if (modifier != EXPAND_INITIALIZER)
7466 mem = use_anchored_address (mem);
7467 return mem;
7468 }
7469
7470 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7471 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7472
7473 static rtx
7474 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
7475 enum expand_modifier modifier, addr_space_t as)
7476 {
7477 rtx result, subtarget;
7478 tree inner, offset;
7479 HOST_WIDE_INT bitsize, bitpos;
7480 int volatilep, unsignedp;
7481 enum machine_mode mode1;
7482
7483 /* If we are taking the address of a constant and are at the top level,
7484 we have to use output_constant_def since we can't call force_const_mem
7485 at top level. */
7486 /* ??? This should be considered a front-end bug. We should not be
7487 generating ADDR_EXPR of something that isn't an LVALUE. The only
7488 exception here is STRING_CST. */
7489 if (CONSTANT_CLASS_P (exp))
7490 {
7491 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7492 if (modifier < EXPAND_SUM)
7493 result = force_operand (result, target);
7494 return result;
7495 }
7496
7497 /* Everything must be something allowed by is_gimple_addressable. */
7498 switch (TREE_CODE (exp))
7499 {
7500 case INDIRECT_REF:
7501 /* This case will happen via recursion for &a->b. */
7502 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7503
7504 case MEM_REF:
7505 {
7506 tree tem = TREE_OPERAND (exp, 0);
7507 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7508 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7509 return expand_expr (tem, target, tmode, modifier);
7510 }
7511
7512 case CONST_DECL:
7513 /* Expand the initializer like constants above. */
7514 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7515 0, modifier), 0);
7516 if (modifier < EXPAND_SUM)
7517 result = force_operand (result, target);
7518 return result;
7519
7520 case REALPART_EXPR:
7521 /* The real part of the complex number is always first, therefore
7522 the address is the same as the address of the parent object. */
7523 offset = 0;
7524 bitpos = 0;
7525 inner = TREE_OPERAND (exp, 0);
7526 break;
7527
7528 case IMAGPART_EXPR:
7529 /* The imaginary part of the complex number is always second.
7530 The expression is therefore always offset by the size of the
7531 scalar type. */
7532 offset = 0;
7533 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7534 inner = TREE_OPERAND (exp, 0);
7535 break;
7536
7537 default:
7538 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7539 expand_expr, as that can have various side effects; LABEL_DECLs for
7540 example, may not have their DECL_RTL set yet. Expand the rtl of
7541 CONSTRUCTORs too, which should yield a memory reference for the
7542 constructor's contents. Assume language specific tree nodes can
7543 be expanded in some interesting way. */
7544 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7545 if (DECL_P (exp)
7546 || TREE_CODE (exp) == CONSTRUCTOR
7547 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7548 {
7549 result = expand_expr (exp, target, tmode,
7550 modifier == EXPAND_INITIALIZER
7551 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7552
7553 /* If the DECL isn't in memory, then the DECL wasn't properly
7554 marked TREE_ADDRESSABLE, which will be either a front-end
7555 or a tree optimizer bug. */
7556
7557 if (TREE_ADDRESSABLE (exp)
7558 && ! MEM_P (result)
7559 && ! targetm.calls.allocate_stack_slots_for_args())
7560 {
7561 error ("local frame unavailable (naked function?)");
7562 return result;
7563 }
7564 else
7565 gcc_assert (MEM_P (result));
7566 result = XEXP (result, 0);
7567
7568 /* ??? Is this needed anymore? */
7569 if (DECL_P (exp))
7570 TREE_USED (exp) = 1;
7571
7572 if (modifier != EXPAND_INITIALIZER
7573 && modifier != EXPAND_CONST_ADDRESS
7574 && modifier != EXPAND_SUM)
7575 result = force_operand (result, target);
7576 return result;
7577 }
7578
7579 /* Pass FALSE as the last argument to get_inner_reference although
7580 we are expanding to RTL. The rationale is that we know how to
7581 handle "aligning nodes" here: we can just bypass them because
7582 they won't change the final object whose address will be returned
7583 (they actually exist only for that purpose). */
7584 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7585 &mode1, &unsignedp, &volatilep, false);
7586 break;
7587 }
7588
7589 /* We must have made progress. */
7590 gcc_assert (inner != exp);
7591
7592 subtarget = offset || bitpos ? NULL_RTX : target;
7593 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7594 inner alignment, force the inner to be sufficiently aligned. */
7595 if (CONSTANT_CLASS_P (inner)
7596 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7597 {
7598 inner = copy_node (inner);
7599 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7600 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7601 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7602 }
7603 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7604
7605 if (offset)
7606 {
7607 rtx tmp;
7608
7609 if (modifier != EXPAND_NORMAL)
7610 result = force_operand (result, NULL);
7611 tmp = expand_expr (offset, NULL_RTX, tmode,
7612 modifier == EXPAND_INITIALIZER
7613 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7614
7615 result = convert_memory_address_addr_space (tmode, result, as);
7616 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7617
7618 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7619 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7620 else
7621 {
7622 subtarget = bitpos ? NULL_RTX : target;
7623 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7624 1, OPTAB_LIB_WIDEN);
7625 }
7626 }
7627
7628 if (bitpos)
7629 {
7630 /* Someone beforehand should have rejected taking the address
7631 of such an object. */
7632 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7633
7634 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7635 if (modifier < EXPAND_SUM)
7636 result = force_operand (result, target);
7637 }
7638
7639 return result;
7640 }
7641
7642 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7643 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7644
7645 static rtx
7646 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7647 enum expand_modifier modifier)
7648 {
7649 addr_space_t as = ADDR_SPACE_GENERIC;
7650 enum machine_mode address_mode = Pmode;
7651 enum machine_mode pointer_mode = ptr_mode;
7652 enum machine_mode rmode;
7653 rtx result;
7654
7655 /* Target mode of VOIDmode says "whatever's natural". */
7656 if (tmode == VOIDmode)
7657 tmode = TYPE_MODE (TREE_TYPE (exp));
7658
7659 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7660 {
7661 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7662 address_mode = targetm.addr_space.address_mode (as);
7663 pointer_mode = targetm.addr_space.pointer_mode (as);
7664 }
7665
7666 /* We can get called with some Weird Things if the user does silliness
7667 like "(short) &a". In that case, convert_memory_address won't do
7668 the right thing, so ignore the given target mode. */
7669 if (tmode != address_mode && tmode != pointer_mode)
7670 tmode = address_mode;
7671
7672 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7673 tmode, modifier, as);
7674
7675 /* Despite expand_expr claims concerning ignoring TMODE when not
7676 strictly convenient, stuff breaks if we don't honor it. Note
7677 that combined with the above, we only do this for pointer modes. */
7678 rmode = GET_MODE (result);
7679 if (rmode == VOIDmode)
7680 rmode = tmode;
7681 if (rmode != tmode)
7682 result = convert_memory_address_addr_space (tmode, result, as);
7683
7684 return result;
7685 }
7686
7687 /* Generate code for computing CONSTRUCTOR EXP.
7688 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7689 is TRUE, instead of creating a temporary variable in memory
7690 NULL is returned and the caller needs to handle it differently. */
7691
7692 static rtx
7693 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7694 bool avoid_temp_mem)
7695 {
7696 tree type = TREE_TYPE (exp);
7697 enum machine_mode mode = TYPE_MODE (type);
7698
7699 /* Try to avoid creating a temporary at all. This is possible
7700 if all of the initializer is zero.
7701 FIXME: try to handle all [0..255] initializers we can handle
7702 with memset. */
7703 if (TREE_STATIC (exp)
7704 && !TREE_ADDRESSABLE (exp)
7705 && target != 0 && mode == BLKmode
7706 && all_zeros_p (exp))
7707 {
7708 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7709 return target;
7710 }
7711
7712 /* All elts simple constants => refer to a constant in memory. But
7713 if this is a non-BLKmode mode, let it store a field at a time
7714 since that should make a CONST_INT or CONST_DOUBLE when we
7715 fold. Likewise, if we have a target we can use, it is best to
7716 store directly into the target unless the type is large enough
7717 that memcpy will be used. If we are making an initializer and
7718 all operands are constant, put it in memory as well.
7719
7720 FIXME: Avoid trying to fill vector constructors piece-meal.
7721 Output them with output_constant_def below unless we're sure
7722 they're zeros. This should go away when vector initializers
7723 are treated like VECTOR_CST instead of arrays. */
7724 if ((TREE_STATIC (exp)
7725 && ((mode == BLKmode
7726 && ! (target != 0 && safe_from_p (target, exp, 1)))
7727 || TREE_ADDRESSABLE (exp)
7728 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7729 && (! MOVE_BY_PIECES_P
7730 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7731 TYPE_ALIGN (type)))
7732 && ! mostly_zeros_p (exp))))
7733 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7734 && TREE_CONSTANT (exp)))
7735 {
7736 rtx constructor;
7737
7738 if (avoid_temp_mem)
7739 return NULL_RTX;
7740
7741 constructor = expand_expr_constant (exp, 1, modifier);
7742
7743 if (modifier != EXPAND_CONST_ADDRESS
7744 && modifier != EXPAND_INITIALIZER
7745 && modifier != EXPAND_SUM)
7746 constructor = validize_mem (constructor);
7747
7748 return constructor;
7749 }
7750
7751 /* Handle calls that pass values in multiple non-contiguous
7752 locations. The Irix 6 ABI has examples of this. */
7753 if (target == 0 || ! safe_from_p (target, exp, 1)
7754 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7755 {
7756 if (avoid_temp_mem)
7757 return NULL_RTX;
7758
7759 target
7760 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7761 | (TREE_READONLY (exp)
7762 * TYPE_QUAL_CONST))),
7763 TREE_ADDRESSABLE (exp), 1);
7764 }
7765
7766 store_constructor (exp, target, 0, int_expr_size (exp));
7767 return target;
7768 }
7769
7770
7771 /* expand_expr: generate code for computing expression EXP.
7772 An rtx for the computed value is returned. The value is never null.
7773 In the case of a void EXP, const0_rtx is returned.
7774
7775 The value may be stored in TARGET if TARGET is nonzero.
7776 TARGET is just a suggestion; callers must assume that
7777 the rtx returned may not be the same as TARGET.
7778
7779 If TARGET is CONST0_RTX, it means that the value will be ignored.
7780
7781 If TMODE is not VOIDmode, it suggests generating the
7782 result in mode TMODE. But this is done only when convenient.
7783 Otherwise, TMODE is ignored and the value generated in its natural mode.
7784 TMODE is just a suggestion; callers must assume that
7785 the rtx returned may not have mode TMODE.
7786
7787 Note that TARGET may have neither TMODE nor MODE. In that case, it
7788 probably will not be used.
7789
7790 If MODIFIER is EXPAND_SUM then when EXP is an addition
7791 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7792 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7793 products as above, or REG or MEM, or constant.
7794 Ordinarily in such cases we would output mul or add instructions
7795 and then return a pseudo reg containing the sum.
7796
7797 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7798 it also marks a label as absolutely required (it can't be dead).
7799 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7800 This is used for outputting expressions used in initializers.
7801
7802 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7803 with a constant address even if that address is not normally legitimate.
7804 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7805
7806 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7807 a call parameter. Such targets require special care as we haven't yet
7808 marked TARGET so that it's safe from being trashed by libcalls. We
7809 don't want to use TARGET for anything but the final result;
7810 Intermediate values must go elsewhere. Additionally, calls to
7811 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7812
7813 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7814 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7815 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7816 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7817 recursively. */
7818
7819 rtx
7820 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7821 enum expand_modifier modifier, rtx *alt_rtl)
7822 {
7823 rtx ret;
7824
7825 /* Handle ERROR_MARK before anybody tries to access its type. */
7826 if (TREE_CODE (exp) == ERROR_MARK
7827 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7828 {
7829 ret = CONST0_RTX (tmode);
7830 return ret ? ret : const0_rtx;
7831 }
7832
7833 /* If this is an expression of some kind and it has an associated line
7834 number, then emit the line number before expanding the expression.
7835
7836 We need to save and restore the file and line information so that
7837 errors discovered during expansion are emitted with the right
7838 information. It would be better of the diagnostic routines
7839 used the file/line information embedded in the tree nodes rather
7840 than globals. */
7841 if (cfun && EXPR_HAS_LOCATION (exp))
7842 {
7843 location_t saved_location = input_location;
7844 location_t saved_curr_loc = get_curr_insn_source_location ();
7845 tree saved_block = get_curr_insn_block ();
7846 input_location = EXPR_LOCATION (exp);
7847 set_curr_insn_source_location (input_location);
7848
7849 /* Record where the insns produced belong. */
7850 set_curr_insn_block (TREE_BLOCK (exp));
7851
7852 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7853
7854 input_location = saved_location;
7855 set_curr_insn_block (saved_block);
7856 set_curr_insn_source_location (saved_curr_loc);
7857 }
7858 else
7859 {
7860 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7861 }
7862
7863 return ret;
7864 }
7865
7866 /* Try to expand the conditional expression which is represented by
7867 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
7868 return the rtl reg which repsents the result. Otherwise return
7869 NULL_RTL. */
7870
7871 static rtx
7872 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7873 tree treeop1 ATTRIBUTE_UNUSED,
7874 tree treeop2 ATTRIBUTE_UNUSED)
7875 {
7876 #ifdef HAVE_conditional_move
7877 rtx insn;
7878 rtx op00, op01, op1, op2;
7879 enum rtx_code comparison_code;
7880 enum machine_mode comparison_mode;
7881 gimple srcstmt;
7882 rtx temp;
7883 tree type = TREE_TYPE (treeop1);
7884 int unsignedp = TYPE_UNSIGNED (type);
7885 enum machine_mode mode = TYPE_MODE (type);
7886
7887 temp = assign_temp (type, 0, 1);
7888
7889 /* If we cannot do a conditional move on the mode, try doing it
7890 with the promoted mode. */
7891 if (!can_conditionally_move_p (mode))
7892 mode = promote_mode (type, mode, &unsignedp);
7893
7894 if (!can_conditionally_move_p (mode))
7895 return NULL_RTX;
7896
7897 start_sequence ();
7898 expand_operands (treeop1, treeop2,
7899 temp, &op1, &op2, EXPAND_NORMAL);
7900
7901 if (TREE_CODE (treeop0) == SSA_NAME
7902 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
7903 {
7904 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
7905 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
7906 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
7907 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
7908 comparison_mode = TYPE_MODE (type);
7909 unsignedp = TYPE_UNSIGNED (type);
7910 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7911 }
7912 else if (TREE_CODE_CLASS (TREE_CODE (treeop0)) == tcc_comparison)
7913 {
7914 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
7915 enum tree_code cmpcode = TREE_CODE (treeop0);
7916 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
7917 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
7918 unsignedp = TYPE_UNSIGNED (type);
7919 comparison_mode = TYPE_MODE (type);
7920 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7921 }
7922 else
7923 {
7924 op00 = expand_normal (treeop0);
7925 op01 = const0_rtx;
7926 comparison_code = NE;
7927 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
7928 }
7929
7930 if (GET_MODE (op1) != mode)
7931 op1 = gen_lowpart (mode, op1);
7932
7933 if (GET_MODE (op2) != mode)
7934 op2 = gen_lowpart (mode, op2);
7935
7936 /* Try to emit the conditional move. */
7937 insn = emit_conditional_move (temp, comparison_code,
7938 op00, op01, comparison_mode,
7939 op1, op2, mode,
7940 unsignedp);
7941
7942 /* If we could do the conditional move, emit the sequence,
7943 and return. */
7944 if (insn)
7945 {
7946 rtx seq = get_insns ();
7947 end_sequence ();
7948 emit_insn (seq);
7949 return temp;
7950 }
7951
7952 /* Otherwise discard the sequence and fall back to code with
7953 branches. */
7954 end_sequence ();
7955 #endif
7956 return NULL_RTX;
7957 }
7958
7959 rtx
7960 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
7961 enum expand_modifier modifier)
7962 {
7963 rtx op0, op1, op2, temp;
7964 tree type;
7965 int unsignedp;
7966 enum machine_mode mode;
7967 enum tree_code code = ops->code;
7968 optab this_optab;
7969 rtx subtarget, original_target;
7970 int ignore;
7971 bool reduce_bit_field;
7972 location_t loc = ops->location;
7973 tree treeop0, treeop1, treeop2;
7974 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7975 ? reduce_to_bit_field_precision ((expr), \
7976 target, \
7977 type) \
7978 : (expr))
7979
7980 type = ops->type;
7981 mode = TYPE_MODE (type);
7982 unsignedp = TYPE_UNSIGNED (type);
7983
7984 treeop0 = ops->op0;
7985 treeop1 = ops->op1;
7986 treeop2 = ops->op2;
7987
7988 /* We should be called only on simple (binary or unary) expressions,
7989 exactly those that are valid in gimple expressions that aren't
7990 GIMPLE_SINGLE_RHS (or invalid). */
7991 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
7992 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
7993 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
7994
7995 ignore = (target == const0_rtx
7996 || ((CONVERT_EXPR_CODE_P (code)
7997 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7998 && TREE_CODE (type) == VOID_TYPE));
7999
8000 /* We should be called only if we need the result. */
8001 gcc_assert (!ignore);
8002
8003 /* An operation in what may be a bit-field type needs the
8004 result to be reduced to the precision of the bit-field type,
8005 which is narrower than that of the type's mode. */
8006 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8007 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8008
8009 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8010 target = 0;
8011
8012 /* Use subtarget as the target for operand 0 of a binary operation. */
8013 subtarget = get_subtarget (target);
8014 original_target = target;
8015
8016 switch (code)
8017 {
8018 case NON_LVALUE_EXPR:
8019 case PAREN_EXPR:
8020 CASE_CONVERT:
8021 if (treeop0 == error_mark_node)
8022 return const0_rtx;
8023
8024 if (TREE_CODE (type) == UNION_TYPE)
8025 {
8026 tree valtype = TREE_TYPE (treeop0);
8027
8028 /* If both input and output are BLKmode, this conversion isn't doing
8029 anything except possibly changing memory attribute. */
8030 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8031 {
8032 rtx result = expand_expr (treeop0, target, tmode,
8033 modifier);
8034
8035 result = copy_rtx (result);
8036 set_mem_attributes (result, type, 0);
8037 return result;
8038 }
8039
8040 if (target == 0)
8041 {
8042 if (TYPE_MODE (type) != BLKmode)
8043 target = gen_reg_rtx (TYPE_MODE (type));
8044 else
8045 target = assign_temp (type, 1, 1);
8046 }
8047
8048 if (MEM_P (target))
8049 /* Store data into beginning of memory target. */
8050 store_expr (treeop0,
8051 adjust_address (target, TYPE_MODE (valtype), 0),
8052 modifier == EXPAND_STACK_PARM,
8053 false);
8054
8055 else
8056 {
8057 gcc_assert (REG_P (target));
8058
8059 /* Store this field into a union of the proper type. */
8060 store_field (target,
8061 MIN ((int_size_in_bytes (TREE_TYPE
8062 (treeop0))
8063 * BITS_PER_UNIT),
8064 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8065 0, 0, 0, TYPE_MODE (valtype), treeop0,
8066 type, 0, false);
8067 }
8068
8069 /* Return the entire union. */
8070 return target;
8071 }
8072
8073 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8074 {
8075 op0 = expand_expr (treeop0, target, VOIDmode,
8076 modifier);
8077
8078 /* If the signedness of the conversion differs and OP0 is
8079 a promoted SUBREG, clear that indication since we now
8080 have to do the proper extension. */
8081 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8082 && GET_CODE (op0) == SUBREG)
8083 SUBREG_PROMOTED_VAR_P (op0) = 0;
8084
8085 return REDUCE_BIT_FIELD (op0);
8086 }
8087
8088 op0 = expand_expr (treeop0, NULL_RTX, mode,
8089 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8090 if (GET_MODE (op0) == mode)
8091 ;
8092
8093 /* If OP0 is a constant, just convert it into the proper mode. */
8094 else if (CONSTANT_P (op0))
8095 {
8096 tree inner_type = TREE_TYPE (treeop0);
8097 enum machine_mode inner_mode = GET_MODE (op0);
8098
8099 if (inner_mode == VOIDmode)
8100 inner_mode = TYPE_MODE (inner_type);
8101
8102 if (modifier == EXPAND_INITIALIZER)
8103 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8104 subreg_lowpart_offset (mode,
8105 inner_mode));
8106 else
8107 op0= convert_modes (mode, inner_mode, op0,
8108 TYPE_UNSIGNED (inner_type));
8109 }
8110
8111 else if (modifier == EXPAND_INITIALIZER)
8112 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8113
8114 else if (target == 0)
8115 op0 = convert_to_mode (mode, op0,
8116 TYPE_UNSIGNED (TREE_TYPE
8117 (treeop0)));
8118 else
8119 {
8120 convert_move (target, op0,
8121 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8122 op0 = target;
8123 }
8124
8125 return REDUCE_BIT_FIELD (op0);
8126
8127 case ADDR_SPACE_CONVERT_EXPR:
8128 {
8129 tree treeop0_type = TREE_TYPE (treeop0);
8130 addr_space_t as_to;
8131 addr_space_t as_from;
8132
8133 gcc_assert (POINTER_TYPE_P (type));
8134 gcc_assert (POINTER_TYPE_P (treeop0_type));
8135
8136 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8137 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8138
8139 /* Conversions between pointers to the same address space should
8140 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8141 gcc_assert (as_to != as_from);
8142
8143 /* Ask target code to handle conversion between pointers
8144 to overlapping address spaces. */
8145 if (targetm.addr_space.subset_p (as_to, as_from)
8146 || targetm.addr_space.subset_p (as_from, as_to))
8147 {
8148 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8149 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8150 gcc_assert (op0);
8151 return op0;
8152 }
8153
8154 /* For disjoint address spaces, converting anything but
8155 a null pointer invokes undefined behaviour. We simply
8156 always return a null pointer here. */
8157 return CONST0_RTX (mode);
8158 }
8159
8160 case POINTER_PLUS_EXPR:
8161 /* Even though the sizetype mode and the pointer's mode can be different
8162 expand is able to handle this correctly and get the correct result out
8163 of the PLUS_EXPR code. */
8164 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8165 if sizetype precision is smaller than pointer precision. */
8166 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8167 treeop1 = fold_convert_loc (loc, type,
8168 fold_convert_loc (loc, ssizetype,
8169 treeop1));
8170 /* If sizetype precision is larger than pointer precision, truncate the
8171 offset to have matching modes. */
8172 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8173 treeop1 = fold_convert_loc (loc, type, treeop1);
8174
8175 case PLUS_EXPR:
8176 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8177 something else, make sure we add the register to the constant and
8178 then to the other thing. This case can occur during strength
8179 reduction and doing it this way will produce better code if the
8180 frame pointer or argument pointer is eliminated.
8181
8182 fold-const.c will ensure that the constant is always in the inner
8183 PLUS_EXPR, so the only case we need to do anything about is if
8184 sp, ap, or fp is our second argument, in which case we must swap
8185 the innermost first argument and our second argument. */
8186
8187 if (TREE_CODE (treeop0) == PLUS_EXPR
8188 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8189 && TREE_CODE (treeop1) == VAR_DECL
8190 && (DECL_RTL (treeop1) == frame_pointer_rtx
8191 || DECL_RTL (treeop1) == stack_pointer_rtx
8192 || DECL_RTL (treeop1) == arg_pointer_rtx))
8193 {
8194 gcc_unreachable ();
8195 }
8196
8197 /* If the result is to be ptr_mode and we are adding an integer to
8198 something, we might be forming a constant. So try to use
8199 plus_constant. If it produces a sum and we can't accept it,
8200 use force_operand. This allows P = &ARR[const] to generate
8201 efficient code on machines where a SYMBOL_REF is not a valid
8202 address.
8203
8204 If this is an EXPAND_SUM call, always return the sum. */
8205 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8206 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8207 {
8208 if (modifier == EXPAND_STACK_PARM)
8209 target = 0;
8210 if (TREE_CODE (treeop0) == INTEGER_CST
8211 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8212 && TREE_CONSTANT (treeop1))
8213 {
8214 rtx constant_part;
8215
8216 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8217 EXPAND_SUM);
8218 /* Use immed_double_const to ensure that the constant is
8219 truncated according to the mode of OP1, then sign extended
8220 to a HOST_WIDE_INT. Using the constant directly can result
8221 in non-canonical RTL in a 64x32 cross compile. */
8222 constant_part
8223 = immed_double_const (TREE_INT_CST_LOW (treeop0),
8224 (HOST_WIDE_INT) 0,
8225 TYPE_MODE (TREE_TYPE (treeop1)));
8226 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8227 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8228 op1 = force_operand (op1, target);
8229 return REDUCE_BIT_FIELD (op1);
8230 }
8231
8232 else if (TREE_CODE (treeop1) == INTEGER_CST
8233 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8234 && TREE_CONSTANT (treeop0))
8235 {
8236 rtx constant_part;
8237
8238 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8239 (modifier == EXPAND_INITIALIZER
8240 ? EXPAND_INITIALIZER : EXPAND_SUM));
8241 if (! CONSTANT_P (op0))
8242 {
8243 op1 = expand_expr (treeop1, NULL_RTX,
8244 VOIDmode, modifier);
8245 /* Return a PLUS if modifier says it's OK. */
8246 if (modifier == EXPAND_SUM
8247 || modifier == EXPAND_INITIALIZER)
8248 return simplify_gen_binary (PLUS, mode, op0, op1);
8249 goto binop2;
8250 }
8251 /* Use immed_double_const to ensure that the constant is
8252 truncated according to the mode of OP1, then sign extended
8253 to a HOST_WIDE_INT. Using the constant directly can result
8254 in non-canonical RTL in a 64x32 cross compile. */
8255 constant_part
8256 = immed_double_const (TREE_INT_CST_LOW (treeop1),
8257 (HOST_WIDE_INT) 0,
8258 TYPE_MODE (TREE_TYPE (treeop0)));
8259 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8260 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8261 op0 = force_operand (op0, target);
8262 return REDUCE_BIT_FIELD (op0);
8263 }
8264 }
8265
8266 /* Use TER to expand pointer addition of a negated value
8267 as pointer subtraction. */
8268 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8269 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8270 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8271 && TREE_CODE (treeop1) == SSA_NAME
8272 && TYPE_MODE (TREE_TYPE (treeop0))
8273 == TYPE_MODE (TREE_TYPE (treeop1)))
8274 {
8275 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8276 if (def)
8277 {
8278 treeop1 = gimple_assign_rhs1 (def);
8279 code = MINUS_EXPR;
8280 goto do_minus;
8281 }
8282 }
8283
8284 /* No sense saving up arithmetic to be done
8285 if it's all in the wrong mode to form part of an address.
8286 And force_operand won't know whether to sign-extend or
8287 zero-extend. */
8288 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8289 || mode != ptr_mode)
8290 {
8291 expand_operands (treeop0, treeop1,
8292 subtarget, &op0, &op1, EXPAND_NORMAL);
8293 if (op0 == const0_rtx)
8294 return op1;
8295 if (op1 == const0_rtx)
8296 return op0;
8297 goto binop2;
8298 }
8299
8300 expand_operands (treeop0, treeop1,
8301 subtarget, &op0, &op1, modifier);
8302 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8303
8304 case MINUS_EXPR:
8305 do_minus:
8306 /* For initializers, we are allowed to return a MINUS of two
8307 symbolic constants. Here we handle all cases when both operands
8308 are constant. */
8309 /* Handle difference of two symbolic constants,
8310 for the sake of an initializer. */
8311 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8312 && really_constant_p (treeop0)
8313 && really_constant_p (treeop1))
8314 {
8315 expand_operands (treeop0, treeop1,
8316 NULL_RTX, &op0, &op1, modifier);
8317
8318 /* If the last operand is a CONST_INT, use plus_constant of
8319 the negated constant. Else make the MINUS. */
8320 if (CONST_INT_P (op1))
8321 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8322 -INTVAL (op1)));
8323 else
8324 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8325 }
8326
8327 /* No sense saving up arithmetic to be done
8328 if it's all in the wrong mode to form part of an address.
8329 And force_operand won't know whether to sign-extend or
8330 zero-extend. */
8331 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8332 || mode != ptr_mode)
8333 goto binop;
8334
8335 expand_operands (treeop0, treeop1,
8336 subtarget, &op0, &op1, modifier);
8337
8338 /* Convert A - const to A + (-const). */
8339 if (CONST_INT_P (op1))
8340 {
8341 op1 = negate_rtx (mode, op1);
8342 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8343 }
8344
8345 goto binop2;
8346
8347 case WIDEN_MULT_PLUS_EXPR:
8348 case WIDEN_MULT_MINUS_EXPR:
8349 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8350 op2 = expand_normal (treeop2);
8351 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8352 target, unsignedp);
8353 return target;
8354
8355 case WIDEN_MULT_EXPR:
8356 /* If first operand is constant, swap them.
8357 Thus the following special case checks need only
8358 check the second operand. */
8359 if (TREE_CODE (treeop0) == INTEGER_CST)
8360 {
8361 tree t1 = treeop0;
8362 treeop0 = treeop1;
8363 treeop1 = t1;
8364 }
8365
8366 /* First, check if we have a multiplication of one signed and one
8367 unsigned operand. */
8368 if (TREE_CODE (treeop1) != INTEGER_CST
8369 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8370 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8371 {
8372 enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8373 this_optab = usmul_widen_optab;
8374 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8375 != CODE_FOR_nothing)
8376 {
8377 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8378 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8379 EXPAND_NORMAL);
8380 else
8381 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8382 EXPAND_NORMAL);
8383 goto binop3;
8384 }
8385 }
8386 /* Check for a multiplication with matching signedness. */
8387 else if ((TREE_CODE (treeop1) == INTEGER_CST
8388 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8389 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8390 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8391 {
8392 tree op0type = TREE_TYPE (treeop0);
8393 enum machine_mode innermode = TYPE_MODE (op0type);
8394 bool zextend_p = TYPE_UNSIGNED (op0type);
8395 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8396 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8397
8398 if (TREE_CODE (treeop0) != INTEGER_CST)
8399 {
8400 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8401 != CODE_FOR_nothing)
8402 {
8403 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8404 EXPAND_NORMAL);
8405 temp = expand_widening_mult (mode, op0, op1, target,
8406 unsignedp, this_optab);
8407 return REDUCE_BIT_FIELD (temp);
8408 }
8409 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8410 != CODE_FOR_nothing
8411 && innermode == word_mode)
8412 {
8413 rtx htem, hipart;
8414 op0 = expand_normal (treeop0);
8415 if (TREE_CODE (treeop1) == INTEGER_CST)
8416 op1 = convert_modes (innermode, mode,
8417 expand_normal (treeop1), unsignedp);
8418 else
8419 op1 = expand_normal (treeop1);
8420 temp = expand_binop (mode, other_optab, op0, op1, target,
8421 unsignedp, OPTAB_LIB_WIDEN);
8422 hipart = gen_highpart (innermode, temp);
8423 htem = expand_mult_highpart_adjust (innermode, hipart,
8424 op0, op1, hipart,
8425 zextend_p);
8426 if (htem != hipart)
8427 emit_move_insn (hipart, htem);
8428 return REDUCE_BIT_FIELD (temp);
8429 }
8430 }
8431 }
8432 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8433 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8434 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8435 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8436
8437 case FMA_EXPR:
8438 {
8439 optab opt = fma_optab;
8440 gimple def0, def2;
8441
8442 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8443 call. */
8444 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8445 {
8446 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8447 tree call_expr;
8448
8449 gcc_assert (fn != NULL_TREE);
8450 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8451 return expand_builtin (call_expr, target, subtarget, mode, false);
8452 }
8453
8454 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8455 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8456
8457 op0 = op2 = NULL;
8458
8459 if (def0 && def2
8460 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8461 {
8462 opt = fnms_optab;
8463 op0 = expand_normal (gimple_assign_rhs1 (def0));
8464 op2 = expand_normal (gimple_assign_rhs1 (def2));
8465 }
8466 else if (def0
8467 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8468 {
8469 opt = fnma_optab;
8470 op0 = expand_normal (gimple_assign_rhs1 (def0));
8471 }
8472 else if (def2
8473 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8474 {
8475 opt = fms_optab;
8476 op2 = expand_normal (gimple_assign_rhs1 (def2));
8477 }
8478
8479 if (op0 == NULL)
8480 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8481 if (op2 == NULL)
8482 op2 = expand_normal (treeop2);
8483 op1 = expand_normal (treeop1);
8484
8485 return expand_ternary_op (TYPE_MODE (type), opt,
8486 op0, op1, op2, target, 0);
8487 }
8488
8489 case MULT_EXPR:
8490 /* If this is a fixed-point operation, then we cannot use the code
8491 below because "expand_mult" doesn't support sat/no-sat fixed-point
8492 multiplications. */
8493 if (ALL_FIXED_POINT_MODE_P (mode))
8494 goto binop;
8495
8496 /* If first operand is constant, swap them.
8497 Thus the following special case checks need only
8498 check the second operand. */
8499 if (TREE_CODE (treeop0) == INTEGER_CST)
8500 {
8501 tree t1 = treeop0;
8502 treeop0 = treeop1;
8503 treeop1 = t1;
8504 }
8505
8506 /* Attempt to return something suitable for generating an
8507 indexed address, for machines that support that. */
8508
8509 if (modifier == EXPAND_SUM && mode == ptr_mode
8510 && host_integerp (treeop1, 0))
8511 {
8512 tree exp1 = treeop1;
8513
8514 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8515 EXPAND_SUM);
8516
8517 if (!REG_P (op0))
8518 op0 = force_operand (op0, NULL_RTX);
8519 if (!REG_P (op0))
8520 op0 = copy_to_mode_reg (mode, op0);
8521
8522 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8523 gen_int_mode (tree_low_cst (exp1, 0),
8524 TYPE_MODE (TREE_TYPE (exp1)))));
8525 }
8526
8527 if (modifier == EXPAND_STACK_PARM)
8528 target = 0;
8529
8530 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8531 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8532
8533 case TRUNC_DIV_EXPR:
8534 case FLOOR_DIV_EXPR:
8535 case CEIL_DIV_EXPR:
8536 case ROUND_DIV_EXPR:
8537 case EXACT_DIV_EXPR:
8538 /* If this is a fixed-point operation, then we cannot use the code
8539 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8540 divisions. */
8541 if (ALL_FIXED_POINT_MODE_P (mode))
8542 goto binop;
8543
8544 if (modifier == EXPAND_STACK_PARM)
8545 target = 0;
8546 /* Possible optimization: compute the dividend with EXPAND_SUM
8547 then if the divisor is constant can optimize the case
8548 where some terms of the dividend have coeffs divisible by it. */
8549 expand_operands (treeop0, treeop1,
8550 subtarget, &op0, &op1, EXPAND_NORMAL);
8551 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8552
8553 case RDIV_EXPR:
8554 goto binop;
8555
8556 case TRUNC_MOD_EXPR:
8557 case FLOOR_MOD_EXPR:
8558 case CEIL_MOD_EXPR:
8559 case ROUND_MOD_EXPR:
8560 if (modifier == EXPAND_STACK_PARM)
8561 target = 0;
8562 expand_operands (treeop0, treeop1,
8563 subtarget, &op0, &op1, EXPAND_NORMAL);
8564 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8565
8566 case FIXED_CONVERT_EXPR:
8567 op0 = expand_normal (treeop0);
8568 if (target == 0 || modifier == EXPAND_STACK_PARM)
8569 target = gen_reg_rtx (mode);
8570
8571 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8572 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8573 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8574 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8575 else
8576 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8577 return target;
8578
8579 case FIX_TRUNC_EXPR:
8580 op0 = expand_normal (treeop0);
8581 if (target == 0 || modifier == EXPAND_STACK_PARM)
8582 target = gen_reg_rtx (mode);
8583 expand_fix (target, op0, unsignedp);
8584 return target;
8585
8586 case FLOAT_EXPR:
8587 op0 = expand_normal (treeop0);
8588 if (target == 0 || modifier == EXPAND_STACK_PARM)
8589 target = gen_reg_rtx (mode);
8590 /* expand_float can't figure out what to do if FROM has VOIDmode.
8591 So give it the correct mode. With -O, cse will optimize this. */
8592 if (GET_MODE (op0) == VOIDmode)
8593 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8594 op0);
8595 expand_float (target, op0,
8596 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8597 return target;
8598
8599 case NEGATE_EXPR:
8600 op0 = expand_expr (treeop0, subtarget,
8601 VOIDmode, EXPAND_NORMAL);
8602 if (modifier == EXPAND_STACK_PARM)
8603 target = 0;
8604 temp = expand_unop (mode,
8605 optab_for_tree_code (NEGATE_EXPR, type,
8606 optab_default),
8607 op0, target, 0);
8608 gcc_assert (temp);
8609 return REDUCE_BIT_FIELD (temp);
8610
8611 case ABS_EXPR:
8612 op0 = expand_expr (treeop0, subtarget,
8613 VOIDmode, EXPAND_NORMAL);
8614 if (modifier == EXPAND_STACK_PARM)
8615 target = 0;
8616
8617 /* ABS_EXPR is not valid for complex arguments. */
8618 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8619 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8620
8621 /* Unsigned abs is simply the operand. Testing here means we don't
8622 risk generating incorrect code below. */
8623 if (TYPE_UNSIGNED (type))
8624 return op0;
8625
8626 return expand_abs (mode, op0, target, unsignedp,
8627 safe_from_p (target, treeop0, 1));
8628
8629 case MAX_EXPR:
8630 case MIN_EXPR:
8631 target = original_target;
8632 if (target == 0
8633 || modifier == EXPAND_STACK_PARM
8634 || (MEM_P (target) && MEM_VOLATILE_P (target))
8635 || GET_MODE (target) != mode
8636 || (REG_P (target)
8637 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8638 target = gen_reg_rtx (mode);
8639 expand_operands (treeop0, treeop1,
8640 target, &op0, &op1, EXPAND_NORMAL);
8641
8642 /* First try to do it with a special MIN or MAX instruction.
8643 If that does not win, use a conditional jump to select the proper
8644 value. */
8645 this_optab = optab_for_tree_code (code, type, optab_default);
8646 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8647 OPTAB_WIDEN);
8648 if (temp != 0)
8649 return temp;
8650
8651 /* At this point, a MEM target is no longer useful; we will get better
8652 code without it. */
8653
8654 if (! REG_P (target))
8655 target = gen_reg_rtx (mode);
8656
8657 /* If op1 was placed in target, swap op0 and op1. */
8658 if (target != op0 && target == op1)
8659 {
8660 temp = op0;
8661 op0 = op1;
8662 op1 = temp;
8663 }
8664
8665 /* We generate better code and avoid problems with op1 mentioning
8666 target by forcing op1 into a pseudo if it isn't a constant. */
8667 if (! CONSTANT_P (op1))
8668 op1 = force_reg (mode, op1);
8669
8670 {
8671 enum rtx_code comparison_code;
8672 rtx cmpop1 = op1;
8673
8674 if (code == MAX_EXPR)
8675 comparison_code = unsignedp ? GEU : GE;
8676 else
8677 comparison_code = unsignedp ? LEU : LE;
8678
8679 /* Canonicalize to comparisons against 0. */
8680 if (op1 == const1_rtx)
8681 {
8682 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8683 or (a != 0 ? a : 1) for unsigned.
8684 For MIN we are safe converting (a <= 1 ? a : 1)
8685 into (a <= 0 ? a : 1) */
8686 cmpop1 = const0_rtx;
8687 if (code == MAX_EXPR)
8688 comparison_code = unsignedp ? NE : GT;
8689 }
8690 if (op1 == constm1_rtx && !unsignedp)
8691 {
8692 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8693 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8694 cmpop1 = const0_rtx;
8695 if (code == MIN_EXPR)
8696 comparison_code = LT;
8697 }
8698 #ifdef HAVE_conditional_move
8699 /* Use a conditional move if possible. */
8700 if (can_conditionally_move_p (mode))
8701 {
8702 rtx insn;
8703
8704 /* ??? Same problem as in expmed.c: emit_conditional_move
8705 forces a stack adjustment via compare_from_rtx, and we
8706 lose the stack adjustment if the sequence we are about
8707 to create is discarded. */
8708 do_pending_stack_adjust ();
8709
8710 start_sequence ();
8711
8712 /* Try to emit the conditional move. */
8713 insn = emit_conditional_move (target, comparison_code,
8714 op0, cmpop1, mode,
8715 op0, op1, mode,
8716 unsignedp);
8717
8718 /* If we could do the conditional move, emit the sequence,
8719 and return. */
8720 if (insn)
8721 {
8722 rtx seq = get_insns ();
8723 end_sequence ();
8724 emit_insn (seq);
8725 return target;
8726 }
8727
8728 /* Otherwise discard the sequence and fall back to code with
8729 branches. */
8730 end_sequence ();
8731 }
8732 #endif
8733 if (target != op0)
8734 emit_move_insn (target, op0);
8735
8736 temp = gen_label_rtx ();
8737 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8738 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8739 -1);
8740 }
8741 emit_move_insn (target, op1);
8742 emit_label (temp);
8743 return target;
8744
8745 case BIT_NOT_EXPR:
8746 op0 = expand_expr (treeop0, subtarget,
8747 VOIDmode, EXPAND_NORMAL);
8748 if (modifier == EXPAND_STACK_PARM)
8749 target = 0;
8750 /* In case we have to reduce the result to bitfield precision
8751 for unsigned bitfield expand this as XOR with a proper constant
8752 instead. */
8753 if (reduce_bit_field && TYPE_UNSIGNED (type))
8754 temp = expand_binop (mode, xor_optab, op0,
8755 immed_double_int_const
8756 (double_int_mask (TYPE_PRECISION (type)), mode),
8757 target, 1, OPTAB_LIB_WIDEN);
8758 else
8759 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8760 gcc_assert (temp);
8761 return temp;
8762
8763 /* ??? Can optimize bitwise operations with one arg constant.
8764 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8765 and (a bitwise1 b) bitwise2 b (etc)
8766 but that is probably not worth while. */
8767
8768 case BIT_AND_EXPR:
8769 case BIT_IOR_EXPR:
8770 case BIT_XOR_EXPR:
8771 goto binop;
8772
8773 case LROTATE_EXPR:
8774 case RROTATE_EXPR:
8775 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8776 || (GET_MODE_PRECISION (TYPE_MODE (type))
8777 == TYPE_PRECISION (type)));
8778 /* fall through */
8779
8780 case LSHIFT_EXPR:
8781 case RSHIFT_EXPR:
8782 /* If this is a fixed-point operation, then we cannot use the code
8783 below because "expand_shift" doesn't support sat/no-sat fixed-point
8784 shifts. */
8785 if (ALL_FIXED_POINT_MODE_P (mode))
8786 goto binop;
8787
8788 if (! safe_from_p (subtarget, treeop1, 1))
8789 subtarget = 0;
8790 if (modifier == EXPAND_STACK_PARM)
8791 target = 0;
8792 op0 = expand_expr (treeop0, subtarget,
8793 VOIDmode, EXPAND_NORMAL);
8794 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8795 unsignedp);
8796 if (code == LSHIFT_EXPR)
8797 temp = REDUCE_BIT_FIELD (temp);
8798 return temp;
8799
8800 /* Could determine the answer when only additive constants differ. Also,
8801 the addition of one can be handled by changing the condition. */
8802 case LT_EXPR:
8803 case LE_EXPR:
8804 case GT_EXPR:
8805 case GE_EXPR:
8806 case EQ_EXPR:
8807 case NE_EXPR:
8808 case UNORDERED_EXPR:
8809 case ORDERED_EXPR:
8810 case UNLT_EXPR:
8811 case UNLE_EXPR:
8812 case UNGT_EXPR:
8813 case UNGE_EXPR:
8814 case UNEQ_EXPR:
8815 case LTGT_EXPR:
8816 temp = do_store_flag (ops,
8817 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8818 tmode != VOIDmode ? tmode : mode);
8819 if (temp)
8820 return temp;
8821
8822 /* Use a compare and a jump for BLKmode comparisons, or for function
8823 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8824
8825 if ((target == 0
8826 || modifier == EXPAND_STACK_PARM
8827 || ! safe_from_p (target, treeop0, 1)
8828 || ! safe_from_p (target, treeop1, 1)
8829 /* Make sure we don't have a hard reg (such as function's return
8830 value) live across basic blocks, if not optimizing. */
8831 || (!optimize && REG_P (target)
8832 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8833 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8834
8835 emit_move_insn (target, const0_rtx);
8836
8837 op1 = gen_label_rtx ();
8838 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8839
8840 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8841 emit_move_insn (target, constm1_rtx);
8842 else
8843 emit_move_insn (target, const1_rtx);
8844
8845 emit_label (op1);
8846 return target;
8847
8848 case COMPLEX_EXPR:
8849 /* Get the rtx code of the operands. */
8850 op0 = expand_normal (treeop0);
8851 op1 = expand_normal (treeop1);
8852
8853 if (!target)
8854 target = gen_reg_rtx (TYPE_MODE (type));
8855
8856 /* Move the real (op0) and imaginary (op1) parts to their location. */
8857 write_complex_part (target, op0, false);
8858 write_complex_part (target, op1, true);
8859
8860 return target;
8861
8862 case WIDEN_SUM_EXPR:
8863 {
8864 tree oprnd0 = treeop0;
8865 tree oprnd1 = treeop1;
8866
8867 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8868 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8869 target, unsignedp);
8870 return target;
8871 }
8872
8873 case REDUC_MAX_EXPR:
8874 case REDUC_MIN_EXPR:
8875 case REDUC_PLUS_EXPR:
8876 {
8877 op0 = expand_normal (treeop0);
8878 this_optab = optab_for_tree_code (code, type, optab_default);
8879 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8880 gcc_assert (temp);
8881 return temp;
8882 }
8883
8884 case VEC_LSHIFT_EXPR:
8885 case VEC_RSHIFT_EXPR:
8886 {
8887 target = expand_vec_shift_expr (ops, target);
8888 return target;
8889 }
8890
8891 case VEC_UNPACK_HI_EXPR:
8892 case VEC_UNPACK_LO_EXPR:
8893 {
8894 op0 = expand_normal (treeop0);
8895 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
8896 target, unsignedp);
8897 gcc_assert (temp);
8898 return temp;
8899 }
8900
8901 case VEC_UNPACK_FLOAT_HI_EXPR:
8902 case VEC_UNPACK_FLOAT_LO_EXPR:
8903 {
8904 op0 = expand_normal (treeop0);
8905 /* The signedness is determined from input operand. */
8906 temp = expand_widen_pattern_expr
8907 (ops, op0, NULL_RTX, NULL_RTX,
8908 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8909
8910 gcc_assert (temp);
8911 return temp;
8912 }
8913
8914 case VEC_WIDEN_MULT_HI_EXPR:
8915 case VEC_WIDEN_MULT_LO_EXPR:
8916 {
8917 tree oprnd0 = treeop0;
8918 tree oprnd1 = treeop1;
8919
8920 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8921 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8922 target, unsignedp);
8923 gcc_assert (target);
8924 return target;
8925 }
8926
8927 case VEC_WIDEN_LSHIFT_HI_EXPR:
8928 case VEC_WIDEN_LSHIFT_LO_EXPR:
8929 {
8930 tree oprnd0 = treeop0;
8931 tree oprnd1 = treeop1;
8932
8933 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8934 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8935 target, unsignedp);
8936 gcc_assert (target);
8937 return target;
8938 }
8939
8940 case VEC_PACK_TRUNC_EXPR:
8941 case VEC_PACK_SAT_EXPR:
8942 case VEC_PACK_FIX_TRUNC_EXPR:
8943 mode = TYPE_MODE (TREE_TYPE (treeop0));
8944 goto binop;
8945
8946 case VEC_PERM_EXPR:
8947 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
8948 op2 = expand_normal (treeop2);
8949
8950 /* Careful here: if the target doesn't support integral vector modes,
8951 a constant selection vector could wind up smooshed into a normal
8952 integral constant. */
8953 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
8954 {
8955 tree sel_type = TREE_TYPE (treeop2);
8956 enum machine_mode vmode
8957 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
8958 TYPE_VECTOR_SUBPARTS (sel_type));
8959 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
8960 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
8961 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
8962 }
8963 else
8964 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
8965
8966 temp = expand_vec_perm (mode, op0, op1, op2, target);
8967 gcc_assert (temp);
8968 return temp;
8969
8970 case DOT_PROD_EXPR:
8971 {
8972 tree oprnd0 = treeop0;
8973 tree oprnd1 = treeop1;
8974 tree oprnd2 = treeop2;
8975 rtx op2;
8976
8977 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8978 op2 = expand_normal (oprnd2);
8979 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8980 target, unsignedp);
8981 return target;
8982 }
8983
8984 case REALIGN_LOAD_EXPR:
8985 {
8986 tree oprnd0 = treeop0;
8987 tree oprnd1 = treeop1;
8988 tree oprnd2 = treeop2;
8989 rtx op2;
8990
8991 this_optab = optab_for_tree_code (code, type, optab_default);
8992 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8993 op2 = expand_normal (oprnd2);
8994 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8995 target, unsignedp);
8996 gcc_assert (temp);
8997 return temp;
8998 }
8999
9000 case COND_EXPR:
9001 /* A COND_EXPR with its type being VOID_TYPE represents a
9002 conditional jump and is handled in
9003 expand_gimple_cond_expr. */
9004 gcc_assert (!VOID_TYPE_P (type));
9005
9006 /* Note that COND_EXPRs whose type is a structure or union
9007 are required to be constructed to contain assignments of
9008 a temporary variable, so that we can evaluate them here
9009 for side effect only. If type is void, we must do likewise. */
9010
9011 gcc_assert (!TREE_ADDRESSABLE (type)
9012 && !ignore
9013 && TREE_TYPE (treeop1) != void_type_node
9014 && TREE_TYPE (treeop2) != void_type_node);
9015
9016 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9017 if (temp)
9018 return temp;
9019
9020 /* If we are not to produce a result, we have no target. Otherwise,
9021 if a target was specified use it; it will not be used as an
9022 intermediate target unless it is safe. If no target, use a
9023 temporary. */
9024
9025 if (modifier != EXPAND_STACK_PARM
9026 && original_target
9027 && safe_from_p (original_target, treeop0, 1)
9028 && GET_MODE (original_target) == mode
9029 && !MEM_P (original_target))
9030 temp = original_target;
9031 else
9032 temp = assign_temp (type, 0, 1);
9033
9034 do_pending_stack_adjust ();
9035 NO_DEFER_POP;
9036 op0 = gen_label_rtx ();
9037 op1 = gen_label_rtx ();
9038 jumpifnot (treeop0, op0, -1);
9039 store_expr (treeop1, temp,
9040 modifier == EXPAND_STACK_PARM,
9041 false);
9042
9043 emit_jump_insn (gen_jump (op1));
9044 emit_barrier ();
9045 emit_label (op0);
9046 store_expr (treeop2, temp,
9047 modifier == EXPAND_STACK_PARM,
9048 false);
9049
9050 emit_label (op1);
9051 OK_DEFER_POP;
9052 return temp;
9053
9054 case VEC_COND_EXPR:
9055 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9056 return target;
9057
9058 default:
9059 gcc_unreachable ();
9060 }
9061
9062 /* Here to do an ordinary binary operator. */
9063 binop:
9064 expand_operands (treeop0, treeop1,
9065 subtarget, &op0, &op1, EXPAND_NORMAL);
9066 binop2:
9067 this_optab = optab_for_tree_code (code, type, optab_default);
9068 binop3:
9069 if (modifier == EXPAND_STACK_PARM)
9070 target = 0;
9071 temp = expand_binop (mode, this_optab, op0, op1, target,
9072 unsignedp, OPTAB_LIB_WIDEN);
9073 gcc_assert (temp);
9074 /* Bitwise operations do not need bitfield reduction as we expect their
9075 operands being properly truncated. */
9076 if (code == BIT_XOR_EXPR
9077 || code == BIT_AND_EXPR
9078 || code == BIT_IOR_EXPR)
9079 return temp;
9080 return REDUCE_BIT_FIELD (temp);
9081 }
9082 #undef REDUCE_BIT_FIELD
9083
9084 rtx
9085 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
9086 enum expand_modifier modifier, rtx *alt_rtl)
9087 {
9088 rtx op0, op1, temp, decl_rtl;
9089 tree type;
9090 int unsignedp;
9091 enum machine_mode mode;
9092 enum tree_code code = TREE_CODE (exp);
9093 rtx subtarget, original_target;
9094 int ignore;
9095 tree context;
9096 bool reduce_bit_field;
9097 location_t loc = EXPR_LOCATION (exp);
9098 struct separate_ops ops;
9099 tree treeop0, treeop1, treeop2;
9100 tree ssa_name = NULL_TREE;
9101 gimple g;
9102
9103 type = TREE_TYPE (exp);
9104 mode = TYPE_MODE (type);
9105 unsignedp = TYPE_UNSIGNED (type);
9106
9107 treeop0 = treeop1 = treeop2 = NULL_TREE;
9108 if (!VL_EXP_CLASS_P (exp))
9109 switch (TREE_CODE_LENGTH (code))
9110 {
9111 default:
9112 case 3: treeop2 = TREE_OPERAND (exp, 2);
9113 case 2: treeop1 = TREE_OPERAND (exp, 1);
9114 case 1: treeop0 = TREE_OPERAND (exp, 0);
9115 case 0: break;
9116 }
9117 ops.code = code;
9118 ops.type = type;
9119 ops.op0 = treeop0;
9120 ops.op1 = treeop1;
9121 ops.op2 = treeop2;
9122 ops.location = loc;
9123
9124 ignore = (target == const0_rtx
9125 || ((CONVERT_EXPR_CODE_P (code)
9126 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9127 && TREE_CODE (type) == VOID_TYPE));
9128
9129 /* An operation in what may be a bit-field type needs the
9130 result to be reduced to the precision of the bit-field type,
9131 which is narrower than that of the type's mode. */
9132 reduce_bit_field = (!ignore
9133 && INTEGRAL_TYPE_P (type)
9134 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9135
9136 /* If we are going to ignore this result, we need only do something
9137 if there is a side-effect somewhere in the expression. If there
9138 is, short-circuit the most common cases here. Note that we must
9139 not call expand_expr with anything but const0_rtx in case this
9140 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9141
9142 if (ignore)
9143 {
9144 if (! TREE_SIDE_EFFECTS (exp))
9145 return const0_rtx;
9146
9147 /* Ensure we reference a volatile object even if value is ignored, but
9148 don't do this if all we are doing is taking its address. */
9149 if (TREE_THIS_VOLATILE (exp)
9150 && TREE_CODE (exp) != FUNCTION_DECL
9151 && mode != VOIDmode && mode != BLKmode
9152 && modifier != EXPAND_CONST_ADDRESS)
9153 {
9154 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9155 if (MEM_P (temp))
9156 copy_to_reg (temp);
9157 return const0_rtx;
9158 }
9159
9160 if (TREE_CODE_CLASS (code) == tcc_unary
9161 || code == COMPONENT_REF || code == INDIRECT_REF)
9162 return expand_expr (treeop0, const0_rtx, VOIDmode,
9163 modifier);
9164
9165 else if (TREE_CODE_CLASS (code) == tcc_binary
9166 || TREE_CODE_CLASS (code) == tcc_comparison
9167 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9168 {
9169 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9170 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9171 return const0_rtx;
9172 }
9173 else if (code == BIT_FIELD_REF)
9174 {
9175 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9176 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9177 expand_expr (treeop2, const0_rtx, VOIDmode, modifier);
9178 return const0_rtx;
9179 }
9180
9181 target = 0;
9182 }
9183
9184 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9185 target = 0;
9186
9187 /* Use subtarget as the target for operand 0 of a binary operation. */
9188 subtarget = get_subtarget (target);
9189 original_target = target;
9190
9191 switch (code)
9192 {
9193 case LABEL_DECL:
9194 {
9195 tree function = decl_function_context (exp);
9196
9197 temp = label_rtx (exp);
9198 temp = gen_rtx_LABEL_REF (Pmode, temp);
9199
9200 if (function != current_function_decl
9201 && function != 0)
9202 LABEL_REF_NONLOCAL_P (temp) = 1;
9203
9204 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9205 return temp;
9206 }
9207
9208 case SSA_NAME:
9209 /* ??? ivopts calls expander, without any preparation from
9210 out-of-ssa. So fake instructions as if this was an access to the
9211 base variable. This unnecessarily allocates a pseudo, see how we can
9212 reuse it, if partition base vars have it set already. */
9213 if (!currently_expanding_to_rtl)
9214 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
9215 NULL);
9216
9217 g = get_gimple_for_ssa_name (exp);
9218 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9219 if (g == NULL
9220 && modifier == EXPAND_INITIALIZER
9221 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9222 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9223 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9224 g = SSA_NAME_DEF_STMT (exp);
9225 if (g)
9226 {
9227 rtx r = expand_expr_real (gimple_assign_rhs_to_tree (g), target,
9228 tmode, modifier, NULL);
9229 if (REG_P (r) && !REG_EXPR (r))
9230 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9231 return r;
9232 }
9233
9234 ssa_name = exp;
9235 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9236 exp = SSA_NAME_VAR (ssa_name);
9237 goto expand_decl_rtl;
9238
9239 case PARM_DECL:
9240 case VAR_DECL:
9241 /* If a static var's type was incomplete when the decl was written,
9242 but the type is complete now, lay out the decl now. */
9243 if (DECL_SIZE (exp) == 0
9244 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9245 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9246 layout_decl (exp, 0);
9247
9248 /* ... fall through ... */
9249
9250 case FUNCTION_DECL:
9251 case RESULT_DECL:
9252 decl_rtl = DECL_RTL (exp);
9253 expand_decl_rtl:
9254 gcc_assert (decl_rtl);
9255 decl_rtl = copy_rtx (decl_rtl);
9256 /* Record writes to register variables. */
9257 if (modifier == EXPAND_WRITE
9258 && REG_P (decl_rtl)
9259 && HARD_REGISTER_P (decl_rtl))
9260 add_to_hard_reg_set (&crtl->asm_clobbers,
9261 GET_MODE (decl_rtl), REGNO (decl_rtl));
9262
9263 /* Ensure variable marked as used even if it doesn't go through
9264 a parser. If it hasn't be used yet, write out an external
9265 definition. */
9266 TREE_USED (exp) = 1;
9267
9268 /* Show we haven't gotten RTL for this yet. */
9269 temp = 0;
9270
9271 /* Variables inherited from containing functions should have
9272 been lowered by this point. */
9273 context = decl_function_context (exp);
9274 gcc_assert (!context
9275 || context == current_function_decl
9276 || TREE_STATIC (exp)
9277 || DECL_EXTERNAL (exp)
9278 /* ??? C++ creates functions that are not TREE_STATIC. */
9279 || TREE_CODE (exp) == FUNCTION_DECL);
9280
9281 /* This is the case of an array whose size is to be determined
9282 from its initializer, while the initializer is still being parsed.
9283 ??? We aren't parsing while expanding anymore. */
9284
9285 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9286 temp = validize_mem (decl_rtl);
9287
9288 /* If DECL_RTL is memory, we are in the normal case and the
9289 address is not valid, get the address into a register. */
9290
9291 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9292 {
9293 if (alt_rtl)
9294 *alt_rtl = decl_rtl;
9295 decl_rtl = use_anchored_address (decl_rtl);
9296 if (modifier != EXPAND_CONST_ADDRESS
9297 && modifier != EXPAND_SUM
9298 && !memory_address_addr_space_p (DECL_MODE (exp),
9299 XEXP (decl_rtl, 0),
9300 MEM_ADDR_SPACE (decl_rtl)))
9301 temp = replace_equiv_address (decl_rtl,
9302 copy_rtx (XEXP (decl_rtl, 0)));
9303 }
9304
9305 /* If we got something, return it. But first, set the alignment
9306 if the address is a register. */
9307 if (temp != 0)
9308 {
9309 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9310 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9311
9312 return temp;
9313 }
9314
9315 /* If the mode of DECL_RTL does not match that of the decl,
9316 there are two cases: we are dealing with a BLKmode value
9317 that is returned in a register, or we are dealing with
9318 a promoted value. In the latter case, return a SUBREG
9319 of the wanted mode, but mark it so that we know that it
9320 was already extended. */
9321 if (REG_P (decl_rtl)
9322 && DECL_MODE (exp) != BLKmode
9323 && GET_MODE (decl_rtl) != DECL_MODE (exp))
9324 {
9325 enum machine_mode pmode;
9326
9327 /* Get the signedness to be used for this variable. Ensure we get
9328 the same mode we got when the variable was declared. */
9329 if (code == SSA_NAME
9330 && (g = SSA_NAME_DEF_STMT (ssa_name))
9331 && gimple_code (g) == GIMPLE_CALL)
9332 {
9333 gcc_assert (!gimple_call_internal_p (g));
9334 pmode = promote_function_mode (type, mode, &unsignedp,
9335 gimple_call_fntype (g),
9336 2);
9337 }
9338 else
9339 pmode = promote_decl_mode (exp, &unsignedp);
9340 gcc_assert (GET_MODE (decl_rtl) == pmode);
9341
9342 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9343 SUBREG_PROMOTED_VAR_P (temp) = 1;
9344 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
9345 return temp;
9346 }
9347
9348 return decl_rtl;
9349
9350 case INTEGER_CST:
9351 temp = immed_double_const (TREE_INT_CST_LOW (exp),
9352 TREE_INT_CST_HIGH (exp), mode);
9353
9354 return temp;
9355
9356 case VECTOR_CST:
9357 {
9358 tree tmp = NULL_TREE;
9359 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9360 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9361 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9362 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9363 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9364 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9365 return const_vector_from_tree (exp);
9366 if (GET_MODE_CLASS (mode) == MODE_INT)
9367 {
9368 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9369 if (type_for_mode)
9370 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9371 }
9372 if (!tmp)
9373 {
9374 VEC(constructor_elt,gc) *v;
9375 unsigned i;
9376 v = VEC_alloc (constructor_elt, gc, VECTOR_CST_NELTS (exp));
9377 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9378 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9379 tmp = build_constructor (type, v);
9380 }
9381 return expand_expr (tmp, ignore ? const0_rtx : target,
9382 tmode, modifier);
9383 }
9384
9385 case CONST_DECL:
9386 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9387
9388 case REAL_CST:
9389 /* If optimized, generate immediate CONST_DOUBLE
9390 which will be turned into memory by reload if necessary.
9391
9392 We used to force a register so that loop.c could see it. But
9393 this does not allow gen_* patterns to perform optimizations with
9394 the constants. It also produces two insns in cases like "x = 1.0;".
9395 On most machines, floating-point constants are not permitted in
9396 many insns, so we'd end up copying it to a register in any case.
9397
9398 Now, we do the copying in expand_binop, if appropriate. */
9399 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9400 TYPE_MODE (TREE_TYPE (exp)));
9401
9402 case FIXED_CST:
9403 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9404 TYPE_MODE (TREE_TYPE (exp)));
9405
9406 case COMPLEX_CST:
9407 /* Handle evaluating a complex constant in a CONCAT target. */
9408 if (original_target && GET_CODE (original_target) == CONCAT)
9409 {
9410 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9411 rtx rtarg, itarg;
9412
9413 rtarg = XEXP (original_target, 0);
9414 itarg = XEXP (original_target, 1);
9415
9416 /* Move the real and imaginary parts separately. */
9417 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9418 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9419
9420 if (op0 != rtarg)
9421 emit_move_insn (rtarg, op0);
9422 if (op1 != itarg)
9423 emit_move_insn (itarg, op1);
9424
9425 return original_target;
9426 }
9427
9428 /* ... fall through ... */
9429
9430 case STRING_CST:
9431 temp = expand_expr_constant (exp, 1, modifier);
9432
9433 /* temp contains a constant address.
9434 On RISC machines where a constant address isn't valid,
9435 make some insns to get that address into a register. */
9436 if (modifier != EXPAND_CONST_ADDRESS
9437 && modifier != EXPAND_INITIALIZER
9438 && modifier != EXPAND_SUM
9439 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9440 MEM_ADDR_SPACE (temp)))
9441 return replace_equiv_address (temp,
9442 copy_rtx (XEXP (temp, 0)));
9443 return temp;
9444
9445 case SAVE_EXPR:
9446 {
9447 tree val = treeop0;
9448 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
9449
9450 if (!SAVE_EXPR_RESOLVED_P (exp))
9451 {
9452 /* We can indeed still hit this case, typically via builtin
9453 expanders calling save_expr immediately before expanding
9454 something. Assume this means that we only have to deal
9455 with non-BLKmode values. */
9456 gcc_assert (GET_MODE (ret) != BLKmode);
9457
9458 val = build_decl (EXPR_LOCATION (exp),
9459 VAR_DECL, NULL, TREE_TYPE (exp));
9460 DECL_ARTIFICIAL (val) = 1;
9461 DECL_IGNORED_P (val) = 1;
9462 treeop0 = val;
9463 TREE_OPERAND (exp, 0) = treeop0;
9464 SAVE_EXPR_RESOLVED_P (exp) = 1;
9465
9466 if (!CONSTANT_P (ret))
9467 ret = copy_to_reg (ret);
9468 SET_DECL_RTL (val, ret);
9469 }
9470
9471 return ret;
9472 }
9473
9474
9475 case CONSTRUCTOR:
9476 /* If we don't need the result, just ensure we evaluate any
9477 subexpressions. */
9478 if (ignore)
9479 {
9480 unsigned HOST_WIDE_INT idx;
9481 tree value;
9482
9483 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9484 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9485
9486 return const0_rtx;
9487 }
9488
9489 return expand_constructor (exp, target, modifier, false);
9490
9491 case TARGET_MEM_REF:
9492 {
9493 addr_space_t as
9494 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9495 struct mem_address addr;
9496 enum insn_code icode;
9497 unsigned int align;
9498
9499 get_address_description (exp, &addr);
9500 op0 = addr_for_mem_ref (&addr, as, true);
9501 op0 = memory_address_addr_space (mode, op0, as);
9502 temp = gen_rtx_MEM (mode, op0);
9503 set_mem_attributes (temp, exp, 0);
9504 set_mem_addr_space (temp, as);
9505 align = get_object_or_type_alignment (exp);
9506 if (modifier != EXPAND_WRITE
9507 && mode != BLKmode
9508 && align < GET_MODE_ALIGNMENT (mode)
9509 /* If the target does not have special handling for unaligned
9510 loads of mode then it can use regular moves for them. */
9511 && ((icode = optab_handler (movmisalign_optab, mode))
9512 != CODE_FOR_nothing))
9513 {
9514 struct expand_operand ops[2];
9515
9516 /* We've already validated the memory, and we're creating a
9517 new pseudo destination. The predicates really can't fail,
9518 nor can the generator. */
9519 create_output_operand (&ops[0], NULL_RTX, mode);
9520 create_fixed_operand (&ops[1], temp);
9521 expand_insn (icode, 2, ops);
9522 return ops[0].value;
9523 }
9524 return temp;
9525 }
9526
9527 case MEM_REF:
9528 {
9529 addr_space_t as
9530 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9531 enum machine_mode address_mode;
9532 tree base = TREE_OPERAND (exp, 0);
9533 gimple def_stmt;
9534 enum insn_code icode;
9535 unsigned align;
9536 /* Handle expansion of non-aliased memory with non-BLKmode. That
9537 might end up in a register. */
9538 if (mem_ref_refers_to_non_mem_p (exp))
9539 {
9540 HOST_WIDE_INT offset = mem_ref_offset (exp).low;
9541 tree bit_offset;
9542 tree bftype;
9543 base = TREE_OPERAND (base, 0);
9544 if (offset == 0
9545 && host_integerp (TYPE_SIZE (TREE_TYPE (exp)), 1)
9546 && (GET_MODE_BITSIZE (DECL_MODE (base))
9547 == TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp)))))
9548 return expand_expr (build1 (VIEW_CONVERT_EXPR,
9549 TREE_TYPE (exp), base),
9550 target, tmode, modifier);
9551 bit_offset = bitsize_int (offset * BITS_PER_UNIT);
9552 bftype = TREE_TYPE (base);
9553 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
9554 bftype = TREE_TYPE (exp);
9555 else
9556 {
9557 temp = assign_stack_temp (DECL_MODE (base),
9558 GET_MODE_SIZE (DECL_MODE (base)));
9559 store_expr (base, temp, 0, false);
9560 temp = adjust_address (temp, BLKmode, offset);
9561 set_mem_size (temp, int_size_in_bytes (TREE_TYPE (exp)));
9562 return temp;
9563 }
9564 return expand_expr (build3 (BIT_FIELD_REF, bftype,
9565 base,
9566 TYPE_SIZE (TREE_TYPE (exp)),
9567 bit_offset),
9568 target, tmode, modifier);
9569 }
9570 address_mode = targetm.addr_space.address_mode (as);
9571 base = TREE_OPERAND (exp, 0);
9572 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9573 {
9574 tree mask = gimple_assign_rhs2 (def_stmt);
9575 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9576 gimple_assign_rhs1 (def_stmt), mask);
9577 TREE_OPERAND (exp, 0) = base;
9578 }
9579 align = get_object_or_type_alignment (exp);
9580 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9581 op0 = memory_address_addr_space (address_mode, op0, as);
9582 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9583 {
9584 rtx off
9585 = immed_double_int_const (mem_ref_offset (exp), address_mode);
9586 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9587 }
9588 op0 = memory_address_addr_space (mode, op0, as);
9589 temp = gen_rtx_MEM (mode, op0);
9590 set_mem_attributes (temp, exp, 0);
9591 set_mem_addr_space (temp, as);
9592 if (TREE_THIS_VOLATILE (exp))
9593 MEM_VOLATILE_P (temp) = 1;
9594 if (modifier != EXPAND_WRITE
9595 && mode != BLKmode
9596 && align < GET_MODE_ALIGNMENT (mode))
9597 {
9598 if ((icode = optab_handler (movmisalign_optab, mode))
9599 != CODE_FOR_nothing)
9600 {
9601 struct expand_operand ops[2];
9602
9603 /* We've already validated the memory, and we're creating a
9604 new pseudo destination. The predicates really can't fail,
9605 nor can the generator. */
9606 create_output_operand (&ops[0], NULL_RTX, mode);
9607 create_fixed_operand (&ops[1], temp);
9608 expand_insn (icode, 2, ops);
9609 return ops[0].value;
9610 }
9611 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9612 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9613 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9614 true, (modifier == EXPAND_STACK_PARM
9615 ? NULL_RTX : target),
9616 mode, mode);
9617 }
9618 return temp;
9619 }
9620
9621 case ARRAY_REF:
9622
9623 {
9624 tree array = treeop0;
9625 tree index = treeop1;
9626
9627 /* Fold an expression like: "foo"[2].
9628 This is not done in fold so it won't happen inside &.
9629 Don't fold if this is for wide characters since it's too
9630 difficult to do correctly and this is a very rare case. */
9631
9632 if (modifier != EXPAND_CONST_ADDRESS
9633 && modifier != EXPAND_INITIALIZER
9634 && modifier != EXPAND_MEMORY)
9635 {
9636 tree t = fold_read_from_constant_string (exp);
9637
9638 if (t)
9639 return expand_expr (t, target, tmode, modifier);
9640 }
9641
9642 /* If this is a constant index into a constant array,
9643 just get the value from the array. Handle both the cases when
9644 we have an explicit constructor and when our operand is a variable
9645 that was declared const. */
9646
9647 if (modifier != EXPAND_CONST_ADDRESS
9648 && modifier != EXPAND_INITIALIZER
9649 && modifier != EXPAND_MEMORY
9650 && TREE_CODE (array) == CONSTRUCTOR
9651 && ! TREE_SIDE_EFFECTS (array)
9652 && TREE_CODE (index) == INTEGER_CST)
9653 {
9654 unsigned HOST_WIDE_INT ix;
9655 tree field, value;
9656
9657 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9658 field, value)
9659 if (tree_int_cst_equal (field, index))
9660 {
9661 if (!TREE_SIDE_EFFECTS (value))
9662 return expand_expr (fold (value), target, tmode, modifier);
9663 break;
9664 }
9665 }
9666
9667 else if (optimize >= 1
9668 && modifier != EXPAND_CONST_ADDRESS
9669 && modifier != EXPAND_INITIALIZER
9670 && modifier != EXPAND_MEMORY
9671 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9672 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
9673 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
9674 && const_value_known_p (array))
9675 {
9676 if (TREE_CODE (index) == INTEGER_CST)
9677 {
9678 tree init = DECL_INITIAL (array);
9679
9680 if (TREE_CODE (init) == CONSTRUCTOR)
9681 {
9682 unsigned HOST_WIDE_INT ix;
9683 tree field, value;
9684
9685 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
9686 field, value)
9687 if (tree_int_cst_equal (field, index))
9688 {
9689 if (TREE_SIDE_EFFECTS (value))
9690 break;
9691
9692 if (TREE_CODE (value) == CONSTRUCTOR)
9693 {
9694 /* If VALUE is a CONSTRUCTOR, this
9695 optimization is only useful if
9696 this doesn't store the CONSTRUCTOR
9697 into memory. If it does, it is more
9698 efficient to just load the data from
9699 the array directly. */
9700 rtx ret = expand_constructor (value, target,
9701 modifier, true);
9702 if (ret == NULL_RTX)
9703 break;
9704 }
9705
9706 return expand_expr (fold (value), target, tmode,
9707 modifier);
9708 }
9709 }
9710 else if(TREE_CODE (init) == STRING_CST)
9711 {
9712 tree index1 = index;
9713 tree low_bound = array_ref_low_bound (exp);
9714 index1 = fold_convert_loc (loc, sizetype,
9715 treeop1);
9716
9717 /* Optimize the special-case of a zero lower bound.
9718
9719 We convert the low_bound to sizetype to avoid some problems
9720 with constant folding. (E.g. suppose the lower bound is 1,
9721 and its mode is QI. Without the conversion,l (ARRAY
9722 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
9723 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
9724
9725 if (! integer_zerop (low_bound))
9726 index1 = size_diffop_loc (loc, index1,
9727 fold_convert_loc (loc, sizetype,
9728 low_bound));
9729
9730 if (0 > compare_tree_int (index1,
9731 TREE_STRING_LENGTH (init)))
9732 {
9733 tree type = TREE_TYPE (TREE_TYPE (init));
9734 enum machine_mode mode = TYPE_MODE (type);
9735
9736 if (GET_MODE_CLASS (mode) == MODE_INT
9737 && GET_MODE_SIZE (mode) == 1)
9738 return gen_int_mode (TREE_STRING_POINTER (init)
9739 [TREE_INT_CST_LOW (index1)],
9740 mode);
9741 }
9742 }
9743 }
9744 }
9745 }
9746 goto normal_inner_ref;
9747
9748 case COMPONENT_REF:
9749 /* If the operand is a CONSTRUCTOR, we can just extract the
9750 appropriate field if it is present. */
9751 if (TREE_CODE (treeop0) == CONSTRUCTOR)
9752 {
9753 unsigned HOST_WIDE_INT idx;
9754 tree field, value;
9755
9756 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
9757 idx, field, value)
9758 if (field == treeop1
9759 /* We can normally use the value of the field in the
9760 CONSTRUCTOR. However, if this is a bitfield in
9761 an integral mode that we can fit in a HOST_WIDE_INT,
9762 we must mask only the number of bits in the bitfield,
9763 since this is done implicitly by the constructor. If
9764 the bitfield does not meet either of those conditions,
9765 we can't do this optimization. */
9766 && (! DECL_BIT_FIELD (field)
9767 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
9768 && (GET_MODE_PRECISION (DECL_MODE (field))
9769 <= HOST_BITS_PER_WIDE_INT))))
9770 {
9771 if (DECL_BIT_FIELD (field)
9772 && modifier == EXPAND_STACK_PARM)
9773 target = 0;
9774 op0 = expand_expr (value, target, tmode, modifier);
9775 if (DECL_BIT_FIELD (field))
9776 {
9777 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
9778 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
9779
9780 if (TYPE_UNSIGNED (TREE_TYPE (field)))
9781 {
9782 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
9783 op0 = expand_and (imode, op0, op1, target);
9784 }
9785 else
9786 {
9787 int count = GET_MODE_PRECISION (imode) - bitsize;
9788
9789 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
9790 target, 0);
9791 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
9792 target, 0);
9793 }
9794 }
9795
9796 return op0;
9797 }
9798 }
9799 goto normal_inner_ref;
9800
9801 case BIT_FIELD_REF:
9802 case ARRAY_RANGE_REF:
9803 normal_inner_ref:
9804 {
9805 enum machine_mode mode1, mode2;
9806 HOST_WIDE_INT bitsize, bitpos;
9807 tree offset;
9808 int volatilep = 0, must_force_mem;
9809 bool packedp = false;
9810 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9811 &mode1, &unsignedp, &volatilep, true);
9812 rtx orig_op0, memloc;
9813 bool mem_attrs_from_type = false;
9814
9815 /* If we got back the original object, something is wrong. Perhaps
9816 we are evaluating an expression too early. In any event, don't
9817 infinitely recurse. */
9818 gcc_assert (tem != exp);
9819
9820 if (TYPE_PACKED (TREE_TYPE (TREE_OPERAND (exp, 0)))
9821 || (TREE_CODE (TREE_OPERAND (exp, 1)) == FIELD_DECL
9822 && DECL_PACKED (TREE_OPERAND (exp, 1))))
9823 packedp = true;
9824
9825 /* If TEM's type is a union of variable size, pass TARGET to the inner
9826 computation, since it will need a temporary and TARGET is known
9827 to have to do. This occurs in unchecked conversion in Ada. */
9828 orig_op0 = op0
9829 = expand_expr (tem,
9830 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9831 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9832 != INTEGER_CST)
9833 && modifier != EXPAND_STACK_PARM
9834 ? target : NULL_RTX),
9835 VOIDmode,
9836 (modifier == EXPAND_INITIALIZER
9837 || modifier == EXPAND_CONST_ADDRESS
9838 || modifier == EXPAND_STACK_PARM)
9839 ? modifier : EXPAND_NORMAL);
9840
9841
9842 /* If the bitfield is volatile, we want to access it in the
9843 field's mode, not the computed mode.
9844 If a MEM has VOIDmode (external with incomplete type),
9845 use BLKmode for it instead. */
9846 if (MEM_P (op0))
9847 {
9848 if (volatilep && flag_strict_volatile_bitfields > 0)
9849 op0 = adjust_address (op0, mode1, 0);
9850 else if (GET_MODE (op0) == VOIDmode)
9851 op0 = adjust_address (op0, BLKmode, 0);
9852 }
9853
9854 mode2
9855 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
9856
9857 /* If we have either an offset, a BLKmode result, or a reference
9858 outside the underlying object, we must force it to memory.
9859 Such a case can occur in Ada if we have unchecked conversion
9860 of an expression from a scalar type to an aggregate type or
9861 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
9862 passed a partially uninitialized object or a view-conversion
9863 to a larger size. */
9864 must_force_mem = (offset
9865 || mode1 == BLKmode
9866 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
9867
9868 /* Handle CONCAT first. */
9869 if (GET_CODE (op0) == CONCAT && !must_force_mem)
9870 {
9871 if (bitpos == 0
9872 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
9873 return op0;
9874 if (bitpos == 0
9875 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9876 && bitsize)
9877 {
9878 op0 = XEXP (op0, 0);
9879 mode2 = GET_MODE (op0);
9880 }
9881 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9882 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
9883 && bitpos
9884 && bitsize)
9885 {
9886 op0 = XEXP (op0, 1);
9887 bitpos = 0;
9888 mode2 = GET_MODE (op0);
9889 }
9890 else
9891 /* Otherwise force into memory. */
9892 must_force_mem = 1;
9893 }
9894
9895 /* If this is a constant, put it in a register if it is a legitimate
9896 constant and we don't need a memory reference. */
9897 if (CONSTANT_P (op0)
9898 && mode2 != BLKmode
9899 && targetm.legitimate_constant_p (mode2, op0)
9900 && !must_force_mem)
9901 op0 = force_reg (mode2, op0);
9902
9903 /* Otherwise, if this is a constant, try to force it to the constant
9904 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
9905 is a legitimate constant. */
9906 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
9907 op0 = validize_mem (memloc);
9908
9909 /* Otherwise, if this is a constant or the object is not in memory
9910 and need be, put it there. */
9911 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
9912 {
9913 tree nt = build_qualified_type (TREE_TYPE (tem),
9914 (TYPE_QUALS (TREE_TYPE (tem))
9915 | TYPE_QUAL_CONST));
9916 memloc = assign_temp (nt, 1, 1);
9917 emit_move_insn (memloc, op0);
9918 op0 = memloc;
9919 mem_attrs_from_type = true;
9920 }
9921
9922 if (offset)
9923 {
9924 enum machine_mode address_mode;
9925 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
9926 EXPAND_SUM);
9927
9928 gcc_assert (MEM_P (op0));
9929
9930 address_mode = get_address_mode (op0);
9931 if (GET_MODE (offset_rtx) != address_mode)
9932 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
9933
9934 if (GET_MODE (op0) == BLKmode
9935 /* A constant address in OP0 can have VOIDmode, we must
9936 not try to call force_reg in that case. */
9937 && GET_MODE (XEXP (op0, 0)) != VOIDmode
9938 && bitsize != 0
9939 && (bitpos % bitsize) == 0
9940 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
9941 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
9942 {
9943 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9944 bitpos = 0;
9945 }
9946
9947 op0 = offset_address (op0, offset_rtx,
9948 highest_pow2_factor (offset));
9949 }
9950
9951 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
9952 record its alignment as BIGGEST_ALIGNMENT. */
9953 if (MEM_P (op0) && bitpos == 0 && offset != 0
9954 && is_aligning_offset (offset, tem))
9955 set_mem_align (op0, BIGGEST_ALIGNMENT);
9956
9957 /* Don't forget about volatility even if this is a bitfield. */
9958 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
9959 {
9960 if (op0 == orig_op0)
9961 op0 = copy_rtx (op0);
9962
9963 MEM_VOLATILE_P (op0) = 1;
9964 }
9965
9966 /* In cases where an aligned union has an unaligned object
9967 as a field, we might be extracting a BLKmode value from
9968 an integer-mode (e.g., SImode) object. Handle this case
9969 by doing the extract into an object as wide as the field
9970 (which we know to be the width of a basic mode), then
9971 storing into memory, and changing the mode to BLKmode. */
9972 if (mode1 == VOIDmode
9973 || REG_P (op0) || GET_CODE (op0) == SUBREG
9974 || (mode1 != BLKmode && ! direct_load[(int) mode1]
9975 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9976 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
9977 && modifier != EXPAND_CONST_ADDRESS
9978 && modifier != EXPAND_INITIALIZER)
9979 /* If the field is volatile, we always want an aligned
9980 access. Do this in following two situations:
9981 1. the access is not already naturally
9982 aligned, otherwise "normal" (non-bitfield) volatile fields
9983 become non-addressable.
9984 2. the bitsize is narrower than the access size. Need
9985 to extract bitfields from the access. */
9986 || (volatilep && flag_strict_volatile_bitfields > 0
9987 && (bitpos % GET_MODE_ALIGNMENT (mode) != 0
9988 || (mode1 != BLKmode
9989 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)))
9990 /* If the field isn't aligned enough to fetch as a memref,
9991 fetch it as a bit field. */
9992 || (mode1 != BLKmode
9993 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
9994 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
9995 || (MEM_P (op0)
9996 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
9997 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
9998 && ((modifier == EXPAND_CONST_ADDRESS
9999 || modifier == EXPAND_INITIALIZER)
10000 ? STRICT_ALIGNMENT
10001 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10002 || (bitpos % BITS_PER_UNIT != 0)))
10003 /* If the type and the field are a constant size and the
10004 size of the type isn't the same size as the bitfield,
10005 we must use bitfield operations. */
10006 || (bitsize >= 0
10007 && TYPE_SIZE (TREE_TYPE (exp))
10008 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10009 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10010 bitsize)))
10011 {
10012 enum machine_mode ext_mode = mode;
10013
10014 if (ext_mode == BLKmode
10015 && ! (target != 0 && MEM_P (op0)
10016 && MEM_P (target)
10017 && bitpos % BITS_PER_UNIT == 0))
10018 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10019
10020 if (ext_mode == BLKmode)
10021 {
10022 if (target == 0)
10023 target = assign_temp (type, 1, 1);
10024
10025 if (bitsize == 0)
10026 return target;
10027
10028 /* In this case, BITPOS must start at a byte boundary and
10029 TARGET, if specified, must be a MEM. */
10030 gcc_assert (MEM_P (op0)
10031 && (!target || MEM_P (target))
10032 && !(bitpos % BITS_PER_UNIT));
10033
10034 emit_block_move (target,
10035 adjust_address (op0, VOIDmode,
10036 bitpos / BITS_PER_UNIT),
10037 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10038 / BITS_PER_UNIT),
10039 (modifier == EXPAND_STACK_PARM
10040 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10041
10042 return target;
10043 }
10044
10045 op0 = validize_mem (op0);
10046
10047 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10048 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10049
10050 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp, packedp,
10051 (modifier == EXPAND_STACK_PARM
10052 ? NULL_RTX : target),
10053 ext_mode, ext_mode);
10054
10055 /* If the result is a record type and BITSIZE is narrower than
10056 the mode of OP0, an integral mode, and this is a big endian
10057 machine, we must put the field into the high-order bits. */
10058 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10059 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10060 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10061 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10062 GET_MODE_BITSIZE (GET_MODE (op0))
10063 - bitsize, op0, 1);
10064
10065 /* If the result type is BLKmode, store the data into a temporary
10066 of the appropriate type, but with the mode corresponding to the
10067 mode for the data we have (op0's mode). It's tempting to make
10068 this a constant type, since we know it's only being stored once,
10069 but that can cause problems if we are taking the address of this
10070 COMPONENT_REF because the MEM of any reference via that address
10071 will have flags corresponding to the type, which will not
10072 necessarily be constant. */
10073 if (mode == BLKmode)
10074 {
10075 rtx new_rtx;
10076
10077 new_rtx = assign_stack_temp_for_type (ext_mode,
10078 GET_MODE_BITSIZE (ext_mode),
10079 type);
10080 emit_move_insn (new_rtx, op0);
10081 op0 = copy_rtx (new_rtx);
10082 PUT_MODE (op0, BLKmode);
10083 }
10084
10085 return op0;
10086 }
10087
10088 /* If the result is BLKmode, use that to access the object
10089 now as well. */
10090 if (mode == BLKmode)
10091 mode1 = BLKmode;
10092
10093 /* Get a reference to just this component. */
10094 if (modifier == EXPAND_CONST_ADDRESS
10095 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10096 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10097 else
10098 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10099
10100 if (op0 == orig_op0)
10101 op0 = copy_rtx (op0);
10102
10103 /* If op0 is a temporary because of forcing to memory, pass only the
10104 type to set_mem_attributes so that the original expression is never
10105 marked as ADDRESSABLE through MEM_EXPR of the temporary. */
10106 if (mem_attrs_from_type)
10107 set_mem_attributes (op0, type, 0);
10108 else
10109 set_mem_attributes (op0, exp, 0);
10110
10111 if (REG_P (XEXP (op0, 0)))
10112 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10113
10114 MEM_VOLATILE_P (op0) |= volatilep;
10115 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10116 || modifier == EXPAND_CONST_ADDRESS
10117 || modifier == EXPAND_INITIALIZER)
10118 return op0;
10119 else if (target == 0)
10120 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10121
10122 convert_move (target, op0, unsignedp);
10123 return target;
10124 }
10125
10126 case OBJ_TYPE_REF:
10127 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10128
10129 case CALL_EXPR:
10130 /* All valid uses of __builtin_va_arg_pack () are removed during
10131 inlining. */
10132 if (CALL_EXPR_VA_ARG_PACK (exp))
10133 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10134 {
10135 tree fndecl = get_callee_fndecl (exp), attr;
10136
10137 if (fndecl
10138 && (attr = lookup_attribute ("error",
10139 DECL_ATTRIBUTES (fndecl))) != NULL)
10140 error ("%Kcall to %qs declared with attribute error: %s",
10141 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10142 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10143 if (fndecl
10144 && (attr = lookup_attribute ("warning",
10145 DECL_ATTRIBUTES (fndecl))) != NULL)
10146 warning_at (tree_nonartificial_location (exp),
10147 0, "%Kcall to %qs declared with attribute warning: %s",
10148 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10149 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10150
10151 /* Check for a built-in function. */
10152 if (fndecl && DECL_BUILT_IN (fndecl))
10153 {
10154 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10155 return expand_builtin (exp, target, subtarget, tmode, ignore);
10156 }
10157 }
10158 return expand_call (exp, target, ignore);
10159
10160 case VIEW_CONVERT_EXPR:
10161 op0 = NULL_RTX;
10162
10163 /* If we are converting to BLKmode, try to avoid an intermediate
10164 temporary by fetching an inner memory reference. */
10165 if (mode == BLKmode
10166 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10167 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10168 && handled_component_p (treeop0))
10169 {
10170 enum machine_mode mode1;
10171 HOST_WIDE_INT bitsize, bitpos;
10172 tree offset;
10173 int unsignedp;
10174 int volatilep = 0;
10175 tree tem
10176 = get_inner_reference (treeop0, &bitsize, &bitpos,
10177 &offset, &mode1, &unsignedp, &volatilep,
10178 true);
10179 rtx orig_op0;
10180
10181 /* ??? We should work harder and deal with non-zero offsets. */
10182 if (!offset
10183 && (bitpos % BITS_PER_UNIT) == 0
10184 && bitsize >= 0
10185 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0)
10186 {
10187 /* See the normal_inner_ref case for the rationale. */
10188 orig_op0
10189 = expand_expr (tem,
10190 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10191 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10192 != INTEGER_CST)
10193 && modifier != EXPAND_STACK_PARM
10194 ? target : NULL_RTX),
10195 VOIDmode,
10196 (modifier == EXPAND_INITIALIZER
10197 || modifier == EXPAND_CONST_ADDRESS
10198 || modifier == EXPAND_STACK_PARM)
10199 ? modifier : EXPAND_NORMAL);
10200
10201 if (MEM_P (orig_op0))
10202 {
10203 op0 = orig_op0;
10204
10205 /* Get a reference to just this component. */
10206 if (modifier == EXPAND_CONST_ADDRESS
10207 || modifier == EXPAND_SUM
10208 || modifier == EXPAND_INITIALIZER)
10209 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10210 else
10211 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10212
10213 if (op0 == orig_op0)
10214 op0 = copy_rtx (op0);
10215
10216 set_mem_attributes (op0, treeop0, 0);
10217 if (REG_P (XEXP (op0, 0)))
10218 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10219
10220 MEM_VOLATILE_P (op0) |= volatilep;
10221 }
10222 }
10223 }
10224
10225 if (!op0)
10226 op0 = expand_expr (treeop0,
10227 NULL_RTX, VOIDmode, modifier);
10228
10229 /* If the input and output modes are both the same, we are done. */
10230 if (mode == GET_MODE (op0))
10231 ;
10232 /* If neither mode is BLKmode, and both modes are the same size
10233 then we can use gen_lowpart. */
10234 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10235 && (GET_MODE_PRECISION (mode)
10236 == GET_MODE_PRECISION (GET_MODE (op0)))
10237 && !COMPLEX_MODE_P (GET_MODE (op0)))
10238 {
10239 if (GET_CODE (op0) == SUBREG)
10240 op0 = force_reg (GET_MODE (op0), op0);
10241 temp = gen_lowpart_common (mode, op0);
10242 if (temp)
10243 op0 = temp;
10244 else
10245 {
10246 if (!REG_P (op0) && !MEM_P (op0))
10247 op0 = force_reg (GET_MODE (op0), op0);
10248 op0 = gen_lowpart (mode, op0);
10249 }
10250 }
10251 /* If both types are integral, convert from one mode to the other. */
10252 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10253 op0 = convert_modes (mode, GET_MODE (op0), op0,
10254 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10255 /* As a last resort, spill op0 to memory, and reload it in a
10256 different mode. */
10257 else if (!MEM_P (op0))
10258 {
10259 /* If the operand is not a MEM, force it into memory. Since we
10260 are going to be changing the mode of the MEM, don't call
10261 force_const_mem for constants because we don't allow pool
10262 constants to change mode. */
10263 tree inner_type = TREE_TYPE (treeop0);
10264
10265 gcc_assert (!TREE_ADDRESSABLE (exp));
10266
10267 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10268 target
10269 = assign_stack_temp_for_type
10270 (TYPE_MODE (inner_type),
10271 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10272
10273 emit_move_insn (target, op0);
10274 op0 = target;
10275 }
10276
10277 /* At this point, OP0 is in the correct mode. If the output type is
10278 such that the operand is known to be aligned, indicate that it is.
10279 Otherwise, we need only be concerned about alignment for non-BLKmode
10280 results. */
10281 if (MEM_P (op0))
10282 {
10283 enum insn_code icode;
10284
10285 op0 = copy_rtx (op0);
10286
10287 if (TYPE_ALIGN_OK (type))
10288 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10289 else if (mode != BLKmode
10290 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode)
10291 /* If the target does have special handling for unaligned
10292 loads of mode then use them. */
10293 && ((icode = optab_handler (movmisalign_optab, mode))
10294 != CODE_FOR_nothing))
10295 {
10296 rtx reg, insn;
10297
10298 op0 = adjust_address (op0, mode, 0);
10299 /* We've already validated the memory, and we're creating a
10300 new pseudo destination. The predicates really can't
10301 fail. */
10302 reg = gen_reg_rtx (mode);
10303
10304 /* Nor can the insn generator. */
10305 insn = GEN_FCN (icode) (reg, op0);
10306 emit_insn (insn);
10307 return reg;
10308 }
10309 else if (STRICT_ALIGNMENT
10310 && mode != BLKmode
10311 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10312 {
10313 tree inner_type = TREE_TYPE (treeop0);
10314 HOST_WIDE_INT temp_size
10315 = MAX (int_size_in_bytes (inner_type),
10316 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10317 rtx new_rtx
10318 = assign_stack_temp_for_type (mode, temp_size, type);
10319 rtx new_with_op0_mode
10320 = adjust_address (new_rtx, GET_MODE (op0), 0);
10321
10322 gcc_assert (!TREE_ADDRESSABLE (exp));
10323
10324 if (GET_MODE (op0) == BLKmode)
10325 emit_block_move (new_with_op0_mode, op0,
10326 GEN_INT (GET_MODE_SIZE (mode)),
10327 (modifier == EXPAND_STACK_PARM
10328 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10329 else
10330 emit_move_insn (new_with_op0_mode, op0);
10331
10332 op0 = new_rtx;
10333 }
10334
10335 op0 = adjust_address (op0, mode, 0);
10336 }
10337
10338 return op0;
10339
10340 case MODIFY_EXPR:
10341 {
10342 tree lhs = treeop0;
10343 tree rhs = treeop1;
10344 gcc_assert (ignore);
10345
10346 /* Check for |= or &= of a bitfield of size one into another bitfield
10347 of size 1. In this case, (unless we need the result of the
10348 assignment) we can do this more efficiently with a
10349 test followed by an assignment, if necessary.
10350
10351 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10352 things change so we do, this code should be enhanced to
10353 support it. */
10354 if (TREE_CODE (lhs) == COMPONENT_REF
10355 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10356 || TREE_CODE (rhs) == BIT_AND_EXPR)
10357 && TREE_OPERAND (rhs, 0) == lhs
10358 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10359 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10360 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10361 {
10362 rtx label = gen_label_rtx ();
10363 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10364 do_jump (TREE_OPERAND (rhs, 1),
10365 value ? label : 0,
10366 value ? 0 : label, -1);
10367 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10368 MOVE_NONTEMPORAL (exp));
10369 do_pending_stack_adjust ();
10370 emit_label (label);
10371 return const0_rtx;
10372 }
10373
10374 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
10375 return const0_rtx;
10376 }
10377
10378 case ADDR_EXPR:
10379 return expand_expr_addr_expr (exp, target, tmode, modifier);
10380
10381 case REALPART_EXPR:
10382 op0 = expand_normal (treeop0);
10383 return read_complex_part (op0, false);
10384
10385 case IMAGPART_EXPR:
10386 op0 = expand_normal (treeop0);
10387 return read_complex_part (op0, true);
10388
10389 case RETURN_EXPR:
10390 case LABEL_EXPR:
10391 case GOTO_EXPR:
10392 case SWITCH_EXPR:
10393 case ASM_EXPR:
10394 /* Expanded in cfgexpand.c. */
10395 gcc_unreachable ();
10396
10397 case TRY_CATCH_EXPR:
10398 case CATCH_EXPR:
10399 case EH_FILTER_EXPR:
10400 case TRY_FINALLY_EXPR:
10401 /* Lowered by tree-eh.c. */
10402 gcc_unreachable ();
10403
10404 case WITH_CLEANUP_EXPR:
10405 case CLEANUP_POINT_EXPR:
10406 case TARGET_EXPR:
10407 case CASE_LABEL_EXPR:
10408 case VA_ARG_EXPR:
10409 case BIND_EXPR:
10410 case INIT_EXPR:
10411 case CONJ_EXPR:
10412 case COMPOUND_EXPR:
10413 case PREINCREMENT_EXPR:
10414 case PREDECREMENT_EXPR:
10415 case POSTINCREMENT_EXPR:
10416 case POSTDECREMENT_EXPR:
10417 case LOOP_EXPR:
10418 case EXIT_EXPR:
10419 case COMPOUND_LITERAL_EXPR:
10420 /* Lowered by gimplify.c. */
10421 gcc_unreachable ();
10422
10423 case FDESC_EXPR:
10424 /* Function descriptors are not valid except for as
10425 initialization constants, and should not be expanded. */
10426 gcc_unreachable ();
10427
10428 case WITH_SIZE_EXPR:
10429 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10430 have pulled out the size to use in whatever context it needed. */
10431 return expand_expr_real (treeop0, original_target, tmode,
10432 modifier, alt_rtl);
10433
10434 default:
10435 return expand_expr_real_2 (&ops, target, tmode, modifier);
10436 }
10437 }
10438 \f
10439 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10440 signedness of TYPE), possibly returning the result in TARGET. */
10441 static rtx
10442 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10443 {
10444 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10445 if (target && GET_MODE (target) != GET_MODE (exp))
10446 target = 0;
10447 /* For constant values, reduce using build_int_cst_type. */
10448 if (CONST_INT_P (exp))
10449 {
10450 HOST_WIDE_INT value = INTVAL (exp);
10451 tree t = build_int_cst_type (type, value);
10452 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10453 }
10454 else if (TYPE_UNSIGNED (type))
10455 {
10456 rtx mask = immed_double_int_const (double_int_mask (prec),
10457 GET_MODE (exp));
10458 return expand_and (GET_MODE (exp), exp, mask, target);
10459 }
10460 else
10461 {
10462 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10463 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10464 exp, count, target, 0);
10465 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10466 exp, count, target, 0);
10467 }
10468 }
10469 \f
10470 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10471 when applied to the address of EXP produces an address known to be
10472 aligned more than BIGGEST_ALIGNMENT. */
10473
10474 static int
10475 is_aligning_offset (const_tree offset, const_tree exp)
10476 {
10477 /* Strip off any conversions. */
10478 while (CONVERT_EXPR_P (offset))
10479 offset = TREE_OPERAND (offset, 0);
10480
10481 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10482 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10483 if (TREE_CODE (offset) != BIT_AND_EXPR
10484 || !host_integerp (TREE_OPERAND (offset, 1), 1)
10485 || compare_tree_int (TREE_OPERAND (offset, 1),
10486 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10487 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
10488 return 0;
10489
10490 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10491 It must be NEGATE_EXPR. Then strip any more conversions. */
10492 offset = TREE_OPERAND (offset, 0);
10493 while (CONVERT_EXPR_P (offset))
10494 offset = TREE_OPERAND (offset, 0);
10495
10496 if (TREE_CODE (offset) != NEGATE_EXPR)
10497 return 0;
10498
10499 offset = TREE_OPERAND (offset, 0);
10500 while (CONVERT_EXPR_P (offset))
10501 offset = TREE_OPERAND (offset, 0);
10502
10503 /* This must now be the address of EXP. */
10504 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10505 }
10506 \f
10507 /* Return the tree node if an ARG corresponds to a string constant or zero
10508 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10509 in bytes within the string that ARG is accessing. The type of the
10510 offset will be `sizetype'. */
10511
10512 tree
10513 string_constant (tree arg, tree *ptr_offset)
10514 {
10515 tree array, offset, lower_bound;
10516 STRIP_NOPS (arg);
10517
10518 if (TREE_CODE (arg) == ADDR_EXPR)
10519 {
10520 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10521 {
10522 *ptr_offset = size_zero_node;
10523 return TREE_OPERAND (arg, 0);
10524 }
10525 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10526 {
10527 array = TREE_OPERAND (arg, 0);
10528 offset = size_zero_node;
10529 }
10530 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10531 {
10532 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10533 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10534 if (TREE_CODE (array) != STRING_CST
10535 && TREE_CODE (array) != VAR_DECL)
10536 return 0;
10537
10538 /* Check if the array has a nonzero lower bound. */
10539 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10540 if (!integer_zerop (lower_bound))
10541 {
10542 /* If the offset and base aren't both constants, return 0. */
10543 if (TREE_CODE (lower_bound) != INTEGER_CST)
10544 return 0;
10545 if (TREE_CODE (offset) != INTEGER_CST)
10546 return 0;
10547 /* Adjust offset by the lower bound. */
10548 offset = size_diffop (fold_convert (sizetype, offset),
10549 fold_convert (sizetype, lower_bound));
10550 }
10551 }
10552 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10553 {
10554 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10555 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10556 if (TREE_CODE (array) != ADDR_EXPR)
10557 return 0;
10558 array = TREE_OPERAND (array, 0);
10559 if (TREE_CODE (array) != STRING_CST
10560 && TREE_CODE (array) != VAR_DECL)
10561 return 0;
10562 }
10563 else
10564 return 0;
10565 }
10566 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10567 {
10568 tree arg0 = TREE_OPERAND (arg, 0);
10569 tree arg1 = TREE_OPERAND (arg, 1);
10570
10571 STRIP_NOPS (arg0);
10572 STRIP_NOPS (arg1);
10573
10574 if (TREE_CODE (arg0) == ADDR_EXPR
10575 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10576 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10577 {
10578 array = TREE_OPERAND (arg0, 0);
10579 offset = arg1;
10580 }
10581 else if (TREE_CODE (arg1) == ADDR_EXPR
10582 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10583 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10584 {
10585 array = TREE_OPERAND (arg1, 0);
10586 offset = arg0;
10587 }
10588 else
10589 return 0;
10590 }
10591 else
10592 return 0;
10593
10594 if (TREE_CODE (array) == STRING_CST)
10595 {
10596 *ptr_offset = fold_convert (sizetype, offset);
10597 return array;
10598 }
10599 else if (TREE_CODE (array) == VAR_DECL
10600 || TREE_CODE (array) == CONST_DECL)
10601 {
10602 int length;
10603
10604 /* Variables initialized to string literals can be handled too. */
10605 if (!const_value_known_p (array)
10606 || !DECL_INITIAL (array)
10607 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
10608 return 0;
10609
10610 /* Avoid const char foo[4] = "abcde"; */
10611 if (DECL_SIZE_UNIT (array) == NULL_TREE
10612 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10613 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
10614 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10615 return 0;
10616
10617 /* If variable is bigger than the string literal, OFFSET must be constant
10618 and inside of the bounds of the string literal. */
10619 offset = fold_convert (sizetype, offset);
10620 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10621 && (! host_integerp (offset, 1)
10622 || compare_tree_int (offset, length) >= 0))
10623 return 0;
10624
10625 *ptr_offset = offset;
10626 return DECL_INITIAL (array);
10627 }
10628
10629 return 0;
10630 }
10631 \f
10632 /* Generate code to calculate OPS, and exploded expression
10633 using a store-flag instruction and return an rtx for the result.
10634 OPS reflects a comparison.
10635
10636 If TARGET is nonzero, store the result there if convenient.
10637
10638 Return zero if there is no suitable set-flag instruction
10639 available on this machine.
10640
10641 Once expand_expr has been called on the arguments of the comparison,
10642 we are committed to doing the store flag, since it is not safe to
10643 re-evaluate the expression. We emit the store-flag insn by calling
10644 emit_store_flag, but only expand the arguments if we have a reason
10645 to believe that emit_store_flag will be successful. If we think that
10646 it will, but it isn't, we have to simulate the store-flag with a
10647 set/jump/set sequence. */
10648
10649 static rtx
10650 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
10651 {
10652 enum rtx_code code;
10653 tree arg0, arg1, type;
10654 tree tem;
10655 enum machine_mode operand_mode;
10656 int unsignedp;
10657 rtx op0, op1;
10658 rtx subtarget = target;
10659 location_t loc = ops->location;
10660
10661 arg0 = ops->op0;
10662 arg1 = ops->op1;
10663
10664 /* Don't crash if the comparison was erroneous. */
10665 if (arg0 == error_mark_node || arg1 == error_mark_node)
10666 return const0_rtx;
10667
10668 type = TREE_TYPE (arg0);
10669 operand_mode = TYPE_MODE (type);
10670 unsignedp = TYPE_UNSIGNED (type);
10671
10672 /* We won't bother with BLKmode store-flag operations because it would mean
10673 passing a lot of information to emit_store_flag. */
10674 if (operand_mode == BLKmode)
10675 return 0;
10676
10677 /* We won't bother with store-flag operations involving function pointers
10678 when function pointers must be canonicalized before comparisons. */
10679 #ifdef HAVE_canonicalize_funcptr_for_compare
10680 if (HAVE_canonicalize_funcptr_for_compare
10681 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
10682 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
10683 == FUNCTION_TYPE))
10684 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
10685 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
10686 == FUNCTION_TYPE))))
10687 return 0;
10688 #endif
10689
10690 STRIP_NOPS (arg0);
10691 STRIP_NOPS (arg1);
10692
10693 /* For vector typed comparisons emit code to generate the desired
10694 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10695 expander for this. */
10696 if (TREE_CODE (ops->type) == VECTOR_TYPE)
10697 {
10698 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10699 tree if_true = constant_boolean_node (true, ops->type);
10700 tree if_false = constant_boolean_node (false, ops->type);
10701 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10702 }
10703
10704 /* For vector typed comparisons emit code to generate the desired
10705 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10706 expander for this. */
10707 if (TREE_CODE (ops->type) == VECTOR_TYPE)
10708 {
10709 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10710 tree if_true = constant_boolean_node (true, ops->type);
10711 tree if_false = constant_boolean_node (false, ops->type);
10712 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10713 }
10714
10715 /* Get the rtx comparison code to use. We know that EXP is a comparison
10716 operation of some type. Some comparisons against 1 and -1 can be
10717 converted to comparisons with zero. Do so here so that the tests
10718 below will be aware that we have a comparison with zero. These
10719 tests will not catch constants in the first operand, but constants
10720 are rarely passed as the first operand. */
10721
10722 switch (ops->code)
10723 {
10724 case EQ_EXPR:
10725 code = EQ;
10726 break;
10727 case NE_EXPR:
10728 code = NE;
10729 break;
10730 case LT_EXPR:
10731 if (integer_onep (arg1))
10732 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10733 else
10734 code = unsignedp ? LTU : LT;
10735 break;
10736 case LE_EXPR:
10737 if (! unsignedp && integer_all_onesp (arg1))
10738 arg1 = integer_zero_node, code = LT;
10739 else
10740 code = unsignedp ? LEU : LE;
10741 break;
10742 case GT_EXPR:
10743 if (! unsignedp && integer_all_onesp (arg1))
10744 arg1 = integer_zero_node, code = GE;
10745 else
10746 code = unsignedp ? GTU : GT;
10747 break;
10748 case GE_EXPR:
10749 if (integer_onep (arg1))
10750 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10751 else
10752 code = unsignedp ? GEU : GE;
10753 break;
10754
10755 case UNORDERED_EXPR:
10756 code = UNORDERED;
10757 break;
10758 case ORDERED_EXPR:
10759 code = ORDERED;
10760 break;
10761 case UNLT_EXPR:
10762 code = UNLT;
10763 break;
10764 case UNLE_EXPR:
10765 code = UNLE;
10766 break;
10767 case UNGT_EXPR:
10768 code = UNGT;
10769 break;
10770 case UNGE_EXPR:
10771 code = UNGE;
10772 break;
10773 case UNEQ_EXPR:
10774 code = UNEQ;
10775 break;
10776 case LTGT_EXPR:
10777 code = LTGT;
10778 break;
10779
10780 default:
10781 gcc_unreachable ();
10782 }
10783
10784 /* Put a constant second. */
10785 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10786 || TREE_CODE (arg0) == FIXED_CST)
10787 {
10788 tem = arg0; arg0 = arg1; arg1 = tem;
10789 code = swap_condition (code);
10790 }
10791
10792 /* If this is an equality or inequality test of a single bit, we can
10793 do this by shifting the bit being tested to the low-order bit and
10794 masking the result with the constant 1. If the condition was EQ,
10795 we xor it with 1. This does not require an scc insn and is faster
10796 than an scc insn even if we have it.
10797
10798 The code to make this transformation was moved into fold_single_bit_test,
10799 so we just call into the folder and expand its result. */
10800
10801 if ((code == NE || code == EQ)
10802 && integer_zerop (arg1)
10803 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
10804 {
10805 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
10806 if (srcstmt
10807 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
10808 {
10809 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
10810 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
10811 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
10812 gimple_assign_rhs1 (srcstmt),
10813 gimple_assign_rhs2 (srcstmt));
10814 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
10815 if (temp)
10816 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
10817 }
10818 }
10819
10820 if (! get_subtarget (target)
10821 || GET_MODE (subtarget) != operand_mode)
10822 subtarget = 0;
10823
10824 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
10825
10826 if (target == 0)
10827 target = gen_reg_rtx (mode);
10828
10829 /* Try a cstore if possible. */
10830 return emit_store_flag_force (target, code, op0, op1,
10831 operand_mode, unsignedp,
10832 (TYPE_PRECISION (ops->type) == 1
10833 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
10834 }
10835 \f
10836
10837 /* Stubs in case we haven't got a casesi insn. */
10838 #ifndef HAVE_casesi
10839 # define HAVE_casesi 0
10840 # define gen_casesi(a, b, c, d, e) (0)
10841 # define CODE_FOR_casesi CODE_FOR_nothing
10842 #endif
10843
10844 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10845 0 otherwise (i.e. if there is no casesi instruction). */
10846 int
10847 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10848 rtx table_label ATTRIBUTE_UNUSED, rtx default_label,
10849 rtx fallback_label ATTRIBUTE_UNUSED)
10850 {
10851 struct expand_operand ops[5];
10852 enum machine_mode index_mode = SImode;
10853 rtx op1, op2, index;
10854
10855 if (! HAVE_casesi)
10856 return 0;
10857
10858 /* Convert the index to SImode. */
10859 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10860 {
10861 enum machine_mode omode = TYPE_MODE (index_type);
10862 rtx rangertx = expand_normal (range);
10863
10864 /* We must handle the endpoints in the original mode. */
10865 index_expr = build2 (MINUS_EXPR, index_type,
10866 index_expr, minval);
10867 minval = integer_zero_node;
10868 index = expand_normal (index_expr);
10869 if (default_label)
10870 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10871 omode, 1, default_label);
10872 /* Now we can safely truncate. */
10873 index = convert_to_mode (index_mode, index, 0);
10874 }
10875 else
10876 {
10877 if (TYPE_MODE (index_type) != index_mode)
10878 {
10879 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
10880 index_expr = fold_convert (index_type, index_expr);
10881 }
10882
10883 index = expand_normal (index_expr);
10884 }
10885
10886 do_pending_stack_adjust ();
10887
10888 op1 = expand_normal (minval);
10889 op2 = expand_normal (range);
10890
10891 create_input_operand (&ops[0], index, index_mode);
10892 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
10893 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
10894 create_fixed_operand (&ops[3], table_label);
10895 create_fixed_operand (&ops[4], (default_label
10896 ? default_label
10897 : fallback_label));
10898 expand_jump_insn (CODE_FOR_casesi, 5, ops);
10899 return 1;
10900 }
10901
10902 /* Attempt to generate a tablejump instruction; same concept. */
10903 #ifndef HAVE_tablejump
10904 #define HAVE_tablejump 0
10905 #define gen_tablejump(x, y) (0)
10906 #endif
10907
10908 /* Subroutine of the next function.
10909
10910 INDEX is the value being switched on, with the lowest value
10911 in the table already subtracted.
10912 MODE is its expected mode (needed if INDEX is constant).
10913 RANGE is the length of the jump table.
10914 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10915
10916 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10917 index value is out of range. */
10918
10919 static void
10920 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10921 rtx default_label)
10922 {
10923 rtx temp, vector;
10924
10925 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
10926 cfun->cfg->max_jumptable_ents = INTVAL (range);
10927
10928 /* Do an unsigned comparison (in the proper mode) between the index
10929 expression and the value which represents the length of the range.
10930 Since we just finished subtracting the lower bound of the range
10931 from the index expression, this comparison allows us to simultaneously
10932 check that the original index expression value is both greater than
10933 or equal to the minimum value of the range and less than or equal to
10934 the maximum value of the range. */
10935
10936 if (default_label)
10937 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10938 default_label);
10939
10940 /* If index is in range, it must fit in Pmode.
10941 Convert to Pmode so we can index with it. */
10942 if (mode != Pmode)
10943 index = convert_to_mode (Pmode, index, 1);
10944
10945 /* Don't let a MEM slip through, because then INDEX that comes
10946 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10947 and break_out_memory_refs will go to work on it and mess it up. */
10948 #ifdef PIC_CASE_VECTOR_ADDRESS
10949 if (flag_pic && !REG_P (index))
10950 index = copy_to_mode_reg (Pmode, index);
10951 #endif
10952
10953 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10954 GET_MODE_SIZE, because this indicates how large insns are. The other
10955 uses should all be Pmode, because they are addresses. This code
10956 could fail if addresses and insns are not the same size. */
10957 index = gen_rtx_PLUS (Pmode,
10958 gen_rtx_MULT (Pmode, index,
10959 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10960 gen_rtx_LABEL_REF (Pmode, table_label));
10961 #ifdef PIC_CASE_VECTOR_ADDRESS
10962 if (flag_pic)
10963 index = PIC_CASE_VECTOR_ADDRESS (index);
10964 else
10965 #endif
10966 index = memory_address (CASE_VECTOR_MODE, index);
10967 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10968 vector = gen_const_mem (CASE_VECTOR_MODE, index);
10969 convert_move (temp, vector, 0);
10970
10971 emit_jump_insn (gen_tablejump (temp, table_label));
10972
10973 /* If we are generating PIC code or if the table is PC-relative, the
10974 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10975 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10976 emit_barrier ();
10977 }
10978
10979 int
10980 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10981 rtx table_label, rtx default_label)
10982 {
10983 rtx index;
10984
10985 if (! HAVE_tablejump)
10986 return 0;
10987
10988 index_expr = fold_build2 (MINUS_EXPR, index_type,
10989 fold_convert (index_type, index_expr),
10990 fold_convert (index_type, minval));
10991 index = expand_normal (index_expr);
10992 do_pending_stack_adjust ();
10993
10994 do_tablejump (index, TYPE_MODE (index_type),
10995 convert_modes (TYPE_MODE (index_type),
10996 TYPE_MODE (TREE_TYPE (range)),
10997 expand_normal (range),
10998 TYPE_UNSIGNED (TREE_TYPE (range))),
10999 table_label, default_label);
11000 return 1;
11001 }
11002
11003 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11004 static rtx
11005 const_vector_from_tree (tree exp)
11006 {
11007 rtvec v;
11008 unsigned i;
11009 int units;
11010 tree elt;
11011 enum machine_mode inner, mode;
11012
11013 mode = TYPE_MODE (TREE_TYPE (exp));
11014
11015 if (initializer_zerop (exp))
11016 return CONST0_RTX (mode);
11017
11018 units = GET_MODE_NUNITS (mode);
11019 inner = GET_MODE_INNER (mode);
11020
11021 v = rtvec_alloc (units);
11022
11023 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11024 {
11025 elt = VECTOR_CST_ELT (exp, i);
11026
11027 if (TREE_CODE (elt) == REAL_CST)
11028 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11029 inner);
11030 else if (TREE_CODE (elt) == FIXED_CST)
11031 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11032 inner);
11033 else
11034 RTVEC_ELT (v, i) = immed_double_int_const (tree_to_double_int (elt),
11035 inner);
11036 }
11037
11038 return gen_rtx_CONST_VECTOR (mode, v);
11039 }
11040
11041 /* Build a decl for a personality function given a language prefix. */
11042
11043 tree
11044 build_personality_function (const char *lang)
11045 {
11046 const char *unwind_and_version;
11047 tree decl, type;
11048 char *name;
11049
11050 switch (targetm_common.except_unwind_info (&global_options))
11051 {
11052 case UI_NONE:
11053 return NULL;
11054 case UI_SJLJ:
11055 unwind_and_version = "_sj0";
11056 break;
11057 case UI_DWARF2:
11058 case UI_TARGET:
11059 unwind_and_version = "_v0";
11060 break;
11061 default:
11062 gcc_unreachable ();
11063 }
11064
11065 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11066
11067 type = build_function_type_list (integer_type_node, integer_type_node,
11068 long_long_unsigned_type_node,
11069 ptr_type_node, ptr_type_node, NULL_TREE);
11070 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11071 get_identifier (name), type);
11072 DECL_ARTIFICIAL (decl) = 1;
11073 DECL_EXTERNAL (decl) = 1;
11074 TREE_PUBLIC (decl) = 1;
11075
11076 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11077 are the flags assigned by targetm.encode_section_info. */
11078 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11079
11080 return decl;
11081 }
11082
11083 /* Extracts the personality function of DECL and returns the corresponding
11084 libfunc. */
11085
11086 rtx
11087 get_personality_function (tree decl)
11088 {
11089 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11090 enum eh_personality_kind pk;
11091
11092 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11093 if (pk == eh_personality_none)
11094 return NULL;
11095
11096 if (!personality
11097 && pk == eh_personality_any)
11098 personality = lang_hooks.eh_personality ();
11099
11100 if (pk == eh_personality_lang)
11101 gcc_assert (personality != NULL_TREE);
11102
11103 return XEXP (DECL_RTL (personality), 0);
11104 }
11105
11106 #include "gt-expr.h"