emit_move et al return rtx_insn *
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "attribs.h"
30 #include "varasm.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
39 #include "expr.h"
40 #include "optabs.h"
41 #include "libfuncs.h"
42 #include "recog.h"
43 #include "reload.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "langhooks.h"
47 #include "intl.h"
48 #include "tm_p.h"
49 #include "tree-iterator.h"
50 #include "basic-block.h"
51 #include "tree-ssa-alias.h"
52 #include "internal-fn.h"
53 #include "gimple-expr.h"
54 #include "is-a.h"
55 #include "gimple.h"
56 #include "gimple-ssa.h"
57 #include "cgraph.h"
58 #include "tree-ssanames.h"
59 #include "target.h"
60 #include "common/common-target.h"
61 #include "timevar.h"
62 #include "df.h"
63 #include "diagnostic.h"
64 #include "tree-ssa-live.h"
65 #include "tree-outof-ssa.h"
66 #include "target-globals.h"
67 #include "params.h"
68 #include "tree-ssa-address.h"
69 #include "cfgexpand.h"
70 #include "builtins.h"
71 #include "tree-ssa.h"
72
73 #ifndef STACK_PUSH_CODE
74 #ifdef STACK_GROWS_DOWNWARD
75 #define STACK_PUSH_CODE PRE_DEC
76 #else
77 #define STACK_PUSH_CODE PRE_INC
78 #endif
79 #endif
80
81
82 /* If this is nonzero, we do not bother generating VOLATILE
83 around volatile memory references, and we are willing to
84 output indirect addresses. If cse is to follow, we reject
85 indirect addresses so a useful potential cse is generated;
86 if it is used only once, instruction combination will produce
87 the same indirect address eventually. */
88 int cse_not_expected;
89
90 /* This structure is used by move_by_pieces to describe the move to
91 be performed. */
92 struct move_by_pieces_d
93 {
94 rtx to;
95 rtx to_addr;
96 int autinc_to;
97 int explicit_inc_to;
98 rtx from;
99 rtx from_addr;
100 int autinc_from;
101 int explicit_inc_from;
102 unsigned HOST_WIDE_INT len;
103 HOST_WIDE_INT offset;
104 int reverse;
105 };
106
107 /* This structure is used by store_by_pieces to describe the clear to
108 be performed. */
109
110 struct store_by_pieces_d
111 {
112 rtx to;
113 rtx to_addr;
114 int autinc_to;
115 int explicit_inc_to;
116 unsigned HOST_WIDE_INT len;
117 HOST_WIDE_INT offset;
118 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
119 void *constfundata;
120 int reverse;
121 };
122
123 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
124 struct move_by_pieces_d *);
125 static bool block_move_libcall_safe_for_call_parm (void);
126 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
127 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
128 unsigned HOST_WIDE_INT);
129 static tree emit_block_move_libcall_fn (int);
130 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
131 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
132 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
133 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
134 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
135 struct store_by_pieces_d *);
136 static tree clear_storage_libcall_fn (int);
137 static rtx_insn *compress_float_constant (rtx, rtx);
138 static rtx get_subtarget (rtx);
139 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
140 HOST_WIDE_INT, enum machine_mode,
141 tree, int, alias_set_type);
142 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
143 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
144 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
145 enum machine_mode, tree, alias_set_type, bool);
146
147 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
148
149 static int is_aligning_offset (const_tree, const_tree);
150 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
151 enum expand_modifier);
152 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
153 static rtx do_store_flag (sepops, rtx, enum machine_mode);
154 #ifdef PUSH_ROUNDING
155 static void emit_single_push_insn (enum machine_mode, rtx, tree);
156 #endif
157 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx, int);
158 static rtx const_vector_from_tree (tree);
159 static void write_complex_part (rtx, rtx, bool);
160
161 /* This macro is used to determine whether move_by_pieces should be called
162 to perform a structure copy. */
163 #ifndef MOVE_BY_PIECES_P
164 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
165 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
166 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
167 #endif
168
169 /* This macro is used to determine whether clear_by_pieces should be
170 called to clear storage. */
171 #ifndef CLEAR_BY_PIECES_P
172 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
173 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
174 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
175 #endif
176
177 /* This macro is used to determine whether store_by_pieces should be
178 called to "memset" storage with byte values other than zero. */
179 #ifndef SET_BY_PIECES_P
180 #define SET_BY_PIECES_P(SIZE, ALIGN) \
181 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
182 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
183 #endif
184
185 /* This macro is used to determine whether store_by_pieces should be
186 called to "memcpy" storage when the source is a constant string. */
187 #ifndef STORE_BY_PIECES_P
188 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
189 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
190 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
191 #endif
192 \f
193 /* This is run to set up which modes can be used
194 directly in memory and to initialize the block move optab. It is run
195 at the beginning of compilation and when the target is reinitialized. */
196
197 void
198 init_expr_target (void)
199 {
200 rtx insn, pat;
201 enum machine_mode mode;
202 int num_clobbers;
203 rtx mem, mem1;
204 rtx reg;
205
206 /* Try indexing by frame ptr and try by stack ptr.
207 It is known that on the Convex the stack ptr isn't a valid index.
208 With luck, one or the other is valid on any machine. */
209 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
210 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
211
212 /* A scratch register we can modify in-place below to avoid
213 useless RTL allocations. */
214 reg = gen_rtx_REG (VOIDmode, -1);
215
216 insn = rtx_alloc (INSN);
217 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
218 PATTERN (insn) = pat;
219
220 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
221 mode = (enum machine_mode) ((int) mode + 1))
222 {
223 int regno;
224
225 direct_load[(int) mode] = direct_store[(int) mode] = 0;
226 PUT_MODE (mem, mode);
227 PUT_MODE (mem1, mode);
228 PUT_MODE (reg, mode);
229
230 /* See if there is some register that can be used in this mode and
231 directly loaded or stored from memory. */
232
233 if (mode != VOIDmode && mode != BLKmode)
234 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
235 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
236 regno++)
237 {
238 if (! HARD_REGNO_MODE_OK (regno, mode))
239 continue;
240
241 SET_REGNO (reg, regno);
242
243 SET_SRC (pat) = mem;
244 SET_DEST (pat) = reg;
245 if (recog (pat, insn, &num_clobbers) >= 0)
246 direct_load[(int) mode] = 1;
247
248 SET_SRC (pat) = mem1;
249 SET_DEST (pat) = reg;
250 if (recog (pat, insn, &num_clobbers) >= 0)
251 direct_load[(int) mode] = 1;
252
253 SET_SRC (pat) = reg;
254 SET_DEST (pat) = mem;
255 if (recog (pat, insn, &num_clobbers) >= 0)
256 direct_store[(int) mode] = 1;
257
258 SET_SRC (pat) = reg;
259 SET_DEST (pat) = mem1;
260 if (recog (pat, insn, &num_clobbers) >= 0)
261 direct_store[(int) mode] = 1;
262 }
263 }
264
265 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
266
267 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
268 mode = GET_MODE_WIDER_MODE (mode))
269 {
270 enum machine_mode srcmode;
271 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
272 srcmode = GET_MODE_WIDER_MODE (srcmode))
273 {
274 enum insn_code ic;
275
276 ic = can_extend_p (mode, srcmode, 0);
277 if (ic == CODE_FOR_nothing)
278 continue;
279
280 PUT_MODE (mem, srcmode);
281
282 if (insn_operand_matches (ic, 1, mem))
283 float_extend_from_mem[mode][srcmode] = true;
284 }
285 }
286 }
287
288 /* This is run at the start of compiling a function. */
289
290 void
291 init_expr (void)
292 {
293 memset (&crtl->expr, 0, sizeof (crtl->expr));
294 }
295 \f
296 /* Copy data from FROM to TO, where the machine modes are not the same.
297 Both modes may be integer, or both may be floating, or both may be
298 fixed-point.
299 UNSIGNEDP should be nonzero if FROM is an unsigned type.
300 This causes zero-extension instead of sign-extension. */
301
302 void
303 convert_move (rtx to, rtx from, int unsignedp)
304 {
305 enum machine_mode to_mode = GET_MODE (to);
306 enum machine_mode from_mode = GET_MODE (from);
307 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
308 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
309 enum insn_code code;
310 rtx libcall;
311
312 /* rtx code for making an equivalent value. */
313 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
314 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
315
316
317 gcc_assert (to_real == from_real);
318 gcc_assert (to_mode != BLKmode);
319 gcc_assert (from_mode != BLKmode);
320
321 /* If the source and destination are already the same, then there's
322 nothing to do. */
323 if (to == from)
324 return;
325
326 /* If FROM is a SUBREG that indicates that we have already done at least
327 the required extension, strip it. We don't handle such SUBREGs as
328 TO here. */
329
330 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
331 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
332 >= GET_MODE_PRECISION (to_mode))
333 && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
334 from = gen_lowpart (to_mode, from), from_mode = to_mode;
335
336 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
337
338 if (to_mode == from_mode
339 || (from_mode == VOIDmode && CONSTANT_P (from)))
340 {
341 emit_move_insn (to, from);
342 return;
343 }
344
345 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
346 {
347 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
348
349 if (VECTOR_MODE_P (to_mode))
350 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
351 else
352 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
353
354 emit_move_insn (to, from);
355 return;
356 }
357
358 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
359 {
360 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
361 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
362 return;
363 }
364
365 if (to_real)
366 {
367 rtx value, insns;
368 convert_optab tab;
369
370 gcc_assert ((GET_MODE_PRECISION (from_mode)
371 != GET_MODE_PRECISION (to_mode))
372 || (DECIMAL_FLOAT_MODE_P (from_mode)
373 != DECIMAL_FLOAT_MODE_P (to_mode)));
374
375 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
376 /* Conversion between decimal float and binary float, same size. */
377 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
378 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
379 tab = sext_optab;
380 else
381 tab = trunc_optab;
382
383 /* Try converting directly if the insn is supported. */
384
385 code = convert_optab_handler (tab, to_mode, from_mode);
386 if (code != CODE_FOR_nothing)
387 {
388 emit_unop_insn (code, to, from,
389 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
390 return;
391 }
392
393 /* Otherwise use a libcall. */
394 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
395
396 /* Is this conversion implemented yet? */
397 gcc_assert (libcall);
398
399 start_sequence ();
400 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
401 1, from, from_mode);
402 insns = get_insns ();
403 end_sequence ();
404 emit_libcall_block (insns, to, value,
405 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
406 from)
407 : gen_rtx_FLOAT_EXTEND (to_mode, from));
408 return;
409 }
410
411 /* Handle pointer conversion. */ /* SPEE 900220. */
412 /* Targets are expected to provide conversion insns between PxImode and
413 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
414 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
415 {
416 enum machine_mode full_mode
417 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
418
419 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
420 != CODE_FOR_nothing);
421
422 if (full_mode != from_mode)
423 from = convert_to_mode (full_mode, from, unsignedp);
424 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
425 to, from, UNKNOWN);
426 return;
427 }
428 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
429 {
430 rtx new_from;
431 enum machine_mode full_mode
432 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
433 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
434 enum insn_code icode;
435
436 icode = convert_optab_handler (ctab, full_mode, from_mode);
437 gcc_assert (icode != CODE_FOR_nothing);
438
439 if (to_mode == full_mode)
440 {
441 emit_unop_insn (icode, to, from, UNKNOWN);
442 return;
443 }
444
445 new_from = gen_reg_rtx (full_mode);
446 emit_unop_insn (icode, new_from, from, UNKNOWN);
447
448 /* else proceed to integer conversions below. */
449 from_mode = full_mode;
450 from = new_from;
451 }
452
453 /* Make sure both are fixed-point modes or both are not. */
454 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
455 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
456 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
457 {
458 /* If we widen from_mode to to_mode and they are in the same class,
459 we won't saturate the result.
460 Otherwise, always saturate the result to play safe. */
461 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
462 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
463 expand_fixed_convert (to, from, 0, 0);
464 else
465 expand_fixed_convert (to, from, 0, 1);
466 return;
467 }
468
469 /* Now both modes are integers. */
470
471 /* Handle expanding beyond a word. */
472 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
473 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
474 {
475 rtx insns;
476 rtx lowpart;
477 rtx fill_value;
478 rtx lowfrom;
479 int i;
480 enum machine_mode lowpart_mode;
481 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
482
483 /* Try converting directly if the insn is supported. */
484 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
485 != CODE_FOR_nothing)
486 {
487 /* If FROM is a SUBREG, put it into a register. Do this
488 so that we always generate the same set of insns for
489 better cse'ing; if an intermediate assignment occurred,
490 we won't be doing the operation directly on the SUBREG. */
491 if (optimize > 0 && GET_CODE (from) == SUBREG)
492 from = force_reg (from_mode, from);
493 emit_unop_insn (code, to, from, equiv_code);
494 return;
495 }
496 /* Next, try converting via full word. */
497 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
498 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
499 != CODE_FOR_nothing))
500 {
501 rtx word_to = gen_reg_rtx (word_mode);
502 if (REG_P (to))
503 {
504 if (reg_overlap_mentioned_p (to, from))
505 from = force_reg (from_mode, from);
506 emit_clobber (to);
507 }
508 convert_move (word_to, from, unsignedp);
509 emit_unop_insn (code, to, word_to, equiv_code);
510 return;
511 }
512
513 /* No special multiword conversion insn; do it by hand. */
514 start_sequence ();
515
516 /* Since we will turn this into a no conflict block, we must ensure the
517 the source does not overlap the target so force it into an isolated
518 register when maybe so. Likewise for any MEM input, since the
519 conversion sequence might require several references to it and we
520 must ensure we're getting the same value every time. */
521
522 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
523 from = force_reg (from_mode, from);
524
525 /* Get a copy of FROM widened to a word, if necessary. */
526 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
527 lowpart_mode = word_mode;
528 else
529 lowpart_mode = from_mode;
530
531 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
532
533 lowpart = gen_lowpart (lowpart_mode, to);
534 emit_move_insn (lowpart, lowfrom);
535
536 /* Compute the value to put in each remaining word. */
537 if (unsignedp)
538 fill_value = const0_rtx;
539 else
540 fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
541 LT, lowfrom, const0_rtx,
542 lowpart_mode, 0, -1);
543
544 /* Fill the remaining words. */
545 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
546 {
547 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
548 rtx subword = operand_subword (to, index, 1, to_mode);
549
550 gcc_assert (subword);
551
552 if (fill_value != subword)
553 emit_move_insn (subword, fill_value);
554 }
555
556 insns = get_insns ();
557 end_sequence ();
558
559 emit_insn (insns);
560 return;
561 }
562
563 /* Truncating multi-word to a word or less. */
564 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
565 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
566 {
567 if (!((MEM_P (from)
568 && ! MEM_VOLATILE_P (from)
569 && direct_load[(int) to_mode]
570 && ! mode_dependent_address_p (XEXP (from, 0),
571 MEM_ADDR_SPACE (from)))
572 || REG_P (from)
573 || GET_CODE (from) == SUBREG))
574 from = force_reg (from_mode, from);
575 convert_move (to, gen_lowpart (word_mode, from), 0);
576 return;
577 }
578
579 /* Now follow all the conversions between integers
580 no more than a word long. */
581
582 /* For truncation, usually we can just refer to FROM in a narrower mode. */
583 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
584 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
585 {
586 if (!((MEM_P (from)
587 && ! MEM_VOLATILE_P (from)
588 && direct_load[(int) to_mode]
589 && ! mode_dependent_address_p (XEXP (from, 0),
590 MEM_ADDR_SPACE (from)))
591 || REG_P (from)
592 || GET_CODE (from) == SUBREG))
593 from = force_reg (from_mode, from);
594 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
595 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
596 from = copy_to_reg (from);
597 emit_move_insn (to, gen_lowpart (to_mode, from));
598 return;
599 }
600
601 /* Handle extension. */
602 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
603 {
604 /* Convert directly if that works. */
605 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
606 != CODE_FOR_nothing)
607 {
608 emit_unop_insn (code, to, from, equiv_code);
609 return;
610 }
611 else
612 {
613 enum machine_mode intermediate;
614 rtx tmp;
615 int shift_amount;
616
617 /* Search for a mode to convert via. */
618 for (intermediate = from_mode; intermediate != VOIDmode;
619 intermediate = GET_MODE_WIDER_MODE (intermediate))
620 if (((can_extend_p (to_mode, intermediate, unsignedp)
621 != CODE_FOR_nothing)
622 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
623 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
624 && (can_extend_p (intermediate, from_mode, unsignedp)
625 != CODE_FOR_nothing))
626 {
627 convert_move (to, convert_to_mode (intermediate, from,
628 unsignedp), unsignedp);
629 return;
630 }
631
632 /* No suitable intermediate mode.
633 Generate what we need with shifts. */
634 shift_amount = (GET_MODE_PRECISION (to_mode)
635 - GET_MODE_PRECISION (from_mode));
636 from = gen_lowpart (to_mode, force_reg (from_mode, from));
637 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
638 to, unsignedp);
639 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
640 to, unsignedp);
641 if (tmp != to)
642 emit_move_insn (to, tmp);
643 return;
644 }
645 }
646
647 /* Support special truncate insns for certain modes. */
648 if (convert_optab_handler (trunc_optab, to_mode,
649 from_mode) != CODE_FOR_nothing)
650 {
651 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
652 to, from, UNKNOWN);
653 return;
654 }
655
656 /* Handle truncation of volatile memrefs, and so on;
657 the things that couldn't be truncated directly,
658 and for which there was no special instruction.
659
660 ??? Code above formerly short-circuited this, for most integer
661 mode pairs, with a force_reg in from_mode followed by a recursive
662 call to this routine. Appears always to have been wrong. */
663 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
664 {
665 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
666 emit_move_insn (to, temp);
667 return;
668 }
669
670 /* Mode combination is not recognized. */
671 gcc_unreachable ();
672 }
673
674 /* Return an rtx for a value that would result
675 from converting X to mode MODE.
676 Both X and MODE may be floating, or both integer.
677 UNSIGNEDP is nonzero if X is an unsigned value.
678 This can be done by referring to a part of X in place
679 or by copying to a new temporary with conversion. */
680
681 rtx
682 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
683 {
684 return convert_modes (mode, VOIDmode, x, unsignedp);
685 }
686
687 /* Return an rtx for a value that would result
688 from converting X from mode OLDMODE to mode MODE.
689 Both modes may be floating, or both integer.
690 UNSIGNEDP is nonzero if X is an unsigned value.
691
692 This can be done by referring to a part of X in place
693 or by copying to a new temporary with conversion.
694
695 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
696
697 rtx
698 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
699 {
700 rtx temp;
701
702 /* If FROM is a SUBREG that indicates that we have already done at least
703 the required extension, strip it. */
704
705 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
706 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
707 && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
708 x = gen_lowpart (mode, SUBREG_REG (x));
709
710 if (GET_MODE (x) != VOIDmode)
711 oldmode = GET_MODE (x);
712
713 if (mode == oldmode)
714 return x;
715
716 if (CONST_SCALAR_INT_P (x) && GET_MODE_CLASS (mode) == MODE_INT)
717 {
718 /* If the caller did not tell us the old mode, then there is not
719 much to do with respect to canonicalization. We have to
720 assume that all the bits are significant. */
721 if (GET_MODE_CLASS (oldmode) != MODE_INT)
722 oldmode = MAX_MODE_INT;
723 wide_int w = wide_int::from (std::make_pair (x, oldmode),
724 GET_MODE_PRECISION (mode),
725 unsignedp ? UNSIGNED : SIGNED);
726 return immed_wide_int_const (w, mode);
727 }
728
729 /* We can do this with a gen_lowpart if both desired and current modes
730 are integer, and this is either a constant integer, a register, or a
731 non-volatile MEM. */
732 if (GET_MODE_CLASS (mode) == MODE_INT
733 && GET_MODE_CLASS (oldmode) == MODE_INT
734 && GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
735 && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) mode])
736 || (REG_P (x)
737 && (!HARD_REGISTER_P (x)
738 || HARD_REGNO_MODE_OK (REGNO (x), mode))
739 && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x)))))
740
741 return gen_lowpart (mode, x);
742
743 /* Converting from integer constant into mode is always equivalent to an
744 subreg operation. */
745 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
746 {
747 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
748 return simplify_gen_subreg (mode, x, oldmode, 0);
749 }
750
751 temp = gen_reg_rtx (mode);
752 convert_move (temp, x, unsignedp);
753 return temp;
754 }
755 \f
756 /* Return the largest alignment we can use for doing a move (or store)
757 of MAX_PIECES. ALIGN is the largest alignment we could use. */
758
759 static unsigned int
760 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
761 {
762 enum machine_mode tmode;
763
764 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
765 if (align >= GET_MODE_ALIGNMENT (tmode))
766 align = GET_MODE_ALIGNMENT (tmode);
767 else
768 {
769 enum machine_mode tmode, xmode;
770
771 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
772 tmode != VOIDmode;
773 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
774 if (GET_MODE_SIZE (tmode) > max_pieces
775 || SLOW_UNALIGNED_ACCESS (tmode, align))
776 break;
777
778 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
779 }
780
781 return align;
782 }
783
784 /* Return the widest integer mode no wider than SIZE. If no such mode
785 can be found, return VOIDmode. */
786
787 static enum machine_mode
788 widest_int_mode_for_size (unsigned int size)
789 {
790 enum machine_mode tmode, mode = VOIDmode;
791
792 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
793 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
794 if (GET_MODE_SIZE (tmode) < size)
795 mode = tmode;
796
797 return mode;
798 }
799
800 /* STORE_MAX_PIECES is the number of bytes at a time that we can
801 store efficiently. Due to internal GCC limitations, this is
802 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
803 for an immediate constant. */
804
805 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
806
807 /* Determine whether the LEN bytes can be moved by using several move
808 instructions. Return nonzero if a call to move_by_pieces should
809 succeed. */
810
811 int
812 can_move_by_pieces (unsigned HOST_WIDE_INT len ATTRIBUTE_UNUSED,
813 unsigned int align ATTRIBUTE_UNUSED)
814 {
815 return MOVE_BY_PIECES_P (len, align);
816 }
817
818 /* Generate several move instructions to copy LEN bytes from block FROM to
819 block TO. (These are MEM rtx's with BLKmode).
820
821 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
822 used to push FROM to the stack.
823
824 ALIGN is maximum stack alignment we can assume.
825
826 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
827 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
828 stpcpy. */
829
830 rtx
831 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
832 unsigned int align, int endp)
833 {
834 struct move_by_pieces_d data;
835 enum machine_mode to_addr_mode;
836 enum machine_mode from_addr_mode = get_address_mode (from);
837 rtx to_addr, from_addr = XEXP (from, 0);
838 unsigned int max_size = MOVE_MAX_PIECES + 1;
839 enum insn_code icode;
840
841 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
842
843 data.offset = 0;
844 data.from_addr = from_addr;
845 if (to)
846 {
847 to_addr_mode = get_address_mode (to);
848 to_addr = XEXP (to, 0);
849 data.to = to;
850 data.autinc_to
851 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
852 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
853 data.reverse
854 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
855 }
856 else
857 {
858 to_addr_mode = VOIDmode;
859 to_addr = NULL_RTX;
860 data.to = NULL_RTX;
861 data.autinc_to = 1;
862 #ifdef STACK_GROWS_DOWNWARD
863 data.reverse = 1;
864 #else
865 data.reverse = 0;
866 #endif
867 }
868 data.to_addr = to_addr;
869 data.from = from;
870 data.autinc_from
871 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
872 || GET_CODE (from_addr) == POST_INC
873 || GET_CODE (from_addr) == POST_DEC);
874
875 data.explicit_inc_from = 0;
876 data.explicit_inc_to = 0;
877 if (data.reverse) data.offset = len;
878 data.len = len;
879
880 /* If copying requires more than two move insns,
881 copy addresses to registers (to make displacements shorter)
882 and use post-increment if available. */
883 if (!(data.autinc_from && data.autinc_to)
884 && move_by_pieces_ninsns (len, align, max_size) > 2)
885 {
886 /* Find the mode of the largest move...
887 MODE might not be used depending on the definitions of the
888 USE_* macros below. */
889 enum machine_mode mode ATTRIBUTE_UNUSED
890 = widest_int_mode_for_size (max_size);
891
892 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
893 {
894 data.from_addr = copy_to_mode_reg (from_addr_mode,
895 plus_constant (from_addr_mode,
896 from_addr, len));
897 data.autinc_from = 1;
898 data.explicit_inc_from = -1;
899 }
900 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
901 {
902 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
903 data.autinc_from = 1;
904 data.explicit_inc_from = 1;
905 }
906 if (!data.autinc_from && CONSTANT_P (from_addr))
907 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
908 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
909 {
910 data.to_addr = copy_to_mode_reg (to_addr_mode,
911 plus_constant (to_addr_mode,
912 to_addr, len));
913 data.autinc_to = 1;
914 data.explicit_inc_to = -1;
915 }
916 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
917 {
918 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
919 data.autinc_to = 1;
920 data.explicit_inc_to = 1;
921 }
922 if (!data.autinc_to && CONSTANT_P (to_addr))
923 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
924 }
925
926 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
927
928 /* First move what we can in the largest integer mode, then go to
929 successively smaller modes. */
930
931 while (max_size > 1 && data.len > 0)
932 {
933 enum machine_mode mode = widest_int_mode_for_size (max_size);
934
935 if (mode == VOIDmode)
936 break;
937
938 icode = optab_handler (mov_optab, mode);
939 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
940 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
941
942 max_size = GET_MODE_SIZE (mode);
943 }
944
945 /* The code above should have handled everything. */
946 gcc_assert (!data.len);
947
948 if (endp)
949 {
950 rtx to1;
951
952 gcc_assert (!data.reverse);
953 if (data.autinc_to)
954 {
955 if (endp == 2)
956 {
957 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
958 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
959 else
960 data.to_addr = copy_to_mode_reg (to_addr_mode,
961 plus_constant (to_addr_mode,
962 data.to_addr,
963 -1));
964 }
965 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
966 data.offset);
967 }
968 else
969 {
970 if (endp == 2)
971 --data.offset;
972 to1 = adjust_address (data.to, QImode, data.offset);
973 }
974 return to1;
975 }
976 else
977 return data.to;
978 }
979
980 /* Return number of insns required to move L bytes by pieces.
981 ALIGN (in bits) is maximum alignment we can assume. */
982
983 unsigned HOST_WIDE_INT
984 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
985 unsigned int max_size)
986 {
987 unsigned HOST_WIDE_INT n_insns = 0;
988
989 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
990
991 while (max_size > 1 && l > 0)
992 {
993 enum machine_mode mode;
994 enum insn_code icode;
995
996 mode = widest_int_mode_for_size (max_size);
997
998 if (mode == VOIDmode)
999 break;
1000
1001 icode = optab_handler (mov_optab, mode);
1002 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1003 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1004
1005 max_size = GET_MODE_SIZE (mode);
1006 }
1007
1008 gcc_assert (!l);
1009 return n_insns;
1010 }
1011
1012 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1013 with move instructions for mode MODE. GENFUN is the gen_... function
1014 to make a move insn for that mode. DATA has all the other info. */
1015
1016 static void
1017 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
1018 struct move_by_pieces_d *data)
1019 {
1020 unsigned int size = GET_MODE_SIZE (mode);
1021 rtx to1 = NULL_RTX, from1;
1022
1023 while (data->len >= size)
1024 {
1025 if (data->reverse)
1026 data->offset -= size;
1027
1028 if (data->to)
1029 {
1030 if (data->autinc_to)
1031 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1032 data->offset);
1033 else
1034 to1 = adjust_address (data->to, mode, data->offset);
1035 }
1036
1037 if (data->autinc_from)
1038 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1039 data->offset);
1040 else
1041 from1 = adjust_address (data->from, mode, data->offset);
1042
1043 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1044 emit_insn (gen_add2_insn (data->to_addr,
1045 gen_int_mode (-(HOST_WIDE_INT) size,
1046 GET_MODE (data->to_addr))));
1047 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1048 emit_insn (gen_add2_insn (data->from_addr,
1049 gen_int_mode (-(HOST_WIDE_INT) size,
1050 GET_MODE (data->from_addr))));
1051
1052 if (data->to)
1053 emit_insn ((*genfun) (to1, from1));
1054 else
1055 {
1056 #ifdef PUSH_ROUNDING
1057 emit_single_push_insn (mode, from1, NULL);
1058 #else
1059 gcc_unreachable ();
1060 #endif
1061 }
1062
1063 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1064 emit_insn (gen_add2_insn (data->to_addr,
1065 gen_int_mode (size,
1066 GET_MODE (data->to_addr))));
1067 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1068 emit_insn (gen_add2_insn (data->from_addr,
1069 gen_int_mode (size,
1070 GET_MODE (data->from_addr))));
1071
1072 if (! data->reverse)
1073 data->offset += size;
1074
1075 data->len -= size;
1076 }
1077 }
1078 \f
1079 /* Emit code to move a block Y to a block X. This may be done with
1080 string-move instructions, with multiple scalar move instructions,
1081 or with a library call.
1082
1083 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1084 SIZE is an rtx that says how long they are.
1085 ALIGN is the maximum alignment we can assume they have.
1086 METHOD describes what kind of copy this is, and what mechanisms may be used.
1087 MIN_SIZE is the minimal size of block to move
1088 MAX_SIZE is the maximal size of block to move, if it can not be represented
1089 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1090
1091 Return the address of the new block, if memcpy is called and returns it,
1092 0 otherwise. */
1093
1094 rtx
1095 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1096 unsigned int expected_align, HOST_WIDE_INT expected_size,
1097 unsigned HOST_WIDE_INT min_size,
1098 unsigned HOST_WIDE_INT max_size,
1099 unsigned HOST_WIDE_INT probable_max_size)
1100 {
1101 bool may_use_call;
1102 rtx retval = 0;
1103 unsigned int align;
1104
1105 gcc_assert (size);
1106 if (CONST_INT_P (size)
1107 && INTVAL (size) == 0)
1108 return 0;
1109
1110 switch (method)
1111 {
1112 case BLOCK_OP_NORMAL:
1113 case BLOCK_OP_TAILCALL:
1114 may_use_call = true;
1115 break;
1116
1117 case BLOCK_OP_CALL_PARM:
1118 may_use_call = block_move_libcall_safe_for_call_parm ();
1119
1120 /* Make inhibit_defer_pop nonzero around the library call
1121 to force it to pop the arguments right away. */
1122 NO_DEFER_POP;
1123 break;
1124
1125 case BLOCK_OP_NO_LIBCALL:
1126 may_use_call = false;
1127 break;
1128
1129 default:
1130 gcc_unreachable ();
1131 }
1132
1133 gcc_assert (MEM_P (x) && MEM_P (y));
1134 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1135 gcc_assert (align >= BITS_PER_UNIT);
1136
1137 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1138 block copy is more efficient for other large modes, e.g. DCmode. */
1139 x = adjust_address (x, BLKmode, 0);
1140 y = adjust_address (y, BLKmode, 0);
1141
1142 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1143 can be incorrect is coming from __builtin_memcpy. */
1144 if (CONST_INT_P (size))
1145 {
1146 x = shallow_copy_rtx (x);
1147 y = shallow_copy_rtx (y);
1148 set_mem_size (x, INTVAL (size));
1149 set_mem_size (y, INTVAL (size));
1150 }
1151
1152 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1153 move_by_pieces (x, y, INTVAL (size), align, 0);
1154 else if (emit_block_move_via_movmem (x, y, size, align,
1155 expected_align, expected_size,
1156 min_size, max_size, probable_max_size))
1157 ;
1158 else if (may_use_call
1159 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1160 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1161 {
1162 /* Since x and y are passed to a libcall, mark the corresponding
1163 tree EXPR as addressable. */
1164 tree y_expr = MEM_EXPR (y);
1165 tree x_expr = MEM_EXPR (x);
1166 if (y_expr)
1167 mark_addressable (y_expr);
1168 if (x_expr)
1169 mark_addressable (x_expr);
1170 retval = emit_block_move_via_libcall (x, y, size,
1171 method == BLOCK_OP_TAILCALL);
1172 }
1173
1174 else
1175 emit_block_move_via_loop (x, y, size, align);
1176
1177 if (method == BLOCK_OP_CALL_PARM)
1178 OK_DEFER_POP;
1179
1180 return retval;
1181 }
1182
1183 rtx
1184 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1185 {
1186 unsigned HOST_WIDE_INT max, min = 0;
1187 if (GET_CODE (size) == CONST_INT)
1188 min = max = UINTVAL (size);
1189 else
1190 max = GET_MODE_MASK (GET_MODE (size));
1191 return emit_block_move_hints (x, y, size, method, 0, -1,
1192 min, max, max);
1193 }
1194
1195 /* A subroutine of emit_block_move. Returns true if calling the
1196 block move libcall will not clobber any parameters which may have
1197 already been placed on the stack. */
1198
1199 static bool
1200 block_move_libcall_safe_for_call_parm (void)
1201 {
1202 #if defined (REG_PARM_STACK_SPACE)
1203 tree fn;
1204 #endif
1205
1206 /* If arguments are pushed on the stack, then they're safe. */
1207 if (PUSH_ARGS)
1208 return true;
1209
1210 /* If registers go on the stack anyway, any argument is sure to clobber
1211 an outgoing argument. */
1212 #if defined (REG_PARM_STACK_SPACE)
1213 fn = emit_block_move_libcall_fn (false);
1214 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1215 depend on its argument. */
1216 (void) fn;
1217 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1218 && REG_PARM_STACK_SPACE (fn) != 0)
1219 return false;
1220 #endif
1221
1222 /* If any argument goes in memory, then it might clobber an outgoing
1223 argument. */
1224 {
1225 CUMULATIVE_ARGS args_so_far_v;
1226 cumulative_args_t args_so_far;
1227 tree fn, arg;
1228
1229 fn = emit_block_move_libcall_fn (false);
1230 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1231 args_so_far = pack_cumulative_args (&args_so_far_v);
1232
1233 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1234 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1235 {
1236 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1237 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1238 NULL_TREE, true);
1239 if (!tmp || !REG_P (tmp))
1240 return false;
1241 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1242 return false;
1243 targetm.calls.function_arg_advance (args_so_far, mode,
1244 NULL_TREE, true);
1245 }
1246 }
1247 return true;
1248 }
1249
1250 /* A subroutine of emit_block_move. Expand a movmem pattern;
1251 return true if successful. */
1252
1253 static bool
1254 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1255 unsigned int expected_align, HOST_WIDE_INT expected_size,
1256 unsigned HOST_WIDE_INT min_size,
1257 unsigned HOST_WIDE_INT max_size,
1258 unsigned HOST_WIDE_INT probable_max_size)
1259 {
1260 int save_volatile_ok = volatile_ok;
1261 enum machine_mode mode;
1262
1263 if (expected_align < align)
1264 expected_align = align;
1265 if (expected_size != -1)
1266 {
1267 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1268 expected_size = probable_max_size;
1269 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1270 expected_size = min_size;
1271 }
1272
1273 /* Since this is a move insn, we don't care about volatility. */
1274 volatile_ok = 1;
1275
1276 /* Try the most limited insn first, because there's no point
1277 including more than one in the machine description unless
1278 the more limited one has some advantage. */
1279
1280 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1281 mode = GET_MODE_WIDER_MODE (mode))
1282 {
1283 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1284
1285 if (code != CODE_FOR_nothing
1286 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1287 here because if SIZE is less than the mode mask, as it is
1288 returned by the macro, it will definitely be less than the
1289 actual mode mask. Since SIZE is within the Pmode address
1290 space, we limit MODE to Pmode. */
1291 && ((CONST_INT_P (size)
1292 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1293 <= (GET_MODE_MASK (mode) >> 1)))
1294 || max_size <= (GET_MODE_MASK (mode) >> 1)
1295 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1296 {
1297 struct expand_operand ops[9];
1298 unsigned int nops;
1299
1300 /* ??? When called via emit_block_move_for_call, it'd be
1301 nice if there were some way to inform the backend, so
1302 that it doesn't fail the expansion because it thinks
1303 emitting the libcall would be more efficient. */
1304 nops = insn_data[(int) code].n_generator_args;
1305 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1306
1307 create_fixed_operand (&ops[0], x);
1308 create_fixed_operand (&ops[1], y);
1309 /* The check above guarantees that this size conversion is valid. */
1310 create_convert_operand_to (&ops[2], size, mode, true);
1311 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1312 if (nops >= 6)
1313 {
1314 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1315 create_integer_operand (&ops[5], expected_size);
1316 }
1317 if (nops >= 8)
1318 {
1319 create_integer_operand (&ops[6], min_size);
1320 /* If we can not represent the maximal size,
1321 make parameter NULL. */
1322 if ((HOST_WIDE_INT) max_size != -1)
1323 create_integer_operand (&ops[7], max_size);
1324 else
1325 create_fixed_operand (&ops[7], NULL);
1326 }
1327 if (nops == 9)
1328 {
1329 /* If we can not represent the maximal size,
1330 make parameter NULL. */
1331 if ((HOST_WIDE_INT) probable_max_size != -1)
1332 create_integer_operand (&ops[8], probable_max_size);
1333 else
1334 create_fixed_operand (&ops[8], NULL);
1335 }
1336 if (maybe_expand_insn (code, nops, ops))
1337 {
1338 volatile_ok = save_volatile_ok;
1339 return true;
1340 }
1341 }
1342 }
1343
1344 volatile_ok = save_volatile_ok;
1345 return false;
1346 }
1347
1348 /* A subroutine of emit_block_move. Expand a call to memcpy.
1349 Return the return value from memcpy, 0 otherwise. */
1350
1351 rtx
1352 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1353 {
1354 rtx dst_addr, src_addr;
1355 tree call_expr, fn, src_tree, dst_tree, size_tree;
1356 enum machine_mode size_mode;
1357 rtx retval;
1358
1359 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1360 pseudos. We can then place those new pseudos into a VAR_DECL and
1361 use them later. */
1362
1363 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1364 src_addr = copy_addr_to_reg (XEXP (src, 0));
1365
1366 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1367 src_addr = convert_memory_address (ptr_mode, src_addr);
1368
1369 dst_tree = make_tree (ptr_type_node, dst_addr);
1370 src_tree = make_tree (ptr_type_node, src_addr);
1371
1372 size_mode = TYPE_MODE (sizetype);
1373
1374 size = convert_to_mode (size_mode, size, 1);
1375 size = copy_to_mode_reg (size_mode, size);
1376
1377 /* It is incorrect to use the libcall calling conventions to call
1378 memcpy in this context. This could be a user call to memcpy and
1379 the user may wish to examine the return value from memcpy. For
1380 targets where libcalls and normal calls have different conventions
1381 for returning pointers, we could end up generating incorrect code. */
1382
1383 size_tree = make_tree (sizetype, size);
1384
1385 fn = emit_block_move_libcall_fn (true);
1386 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1387 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1388
1389 retval = expand_normal (call_expr);
1390
1391 return retval;
1392 }
1393
1394 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1395 for the function we use for block copies. */
1396
1397 static GTY(()) tree block_move_fn;
1398
1399 void
1400 init_block_move_fn (const char *asmspec)
1401 {
1402 if (!block_move_fn)
1403 {
1404 tree args, fn, attrs, attr_args;
1405
1406 fn = get_identifier ("memcpy");
1407 args = build_function_type_list (ptr_type_node, ptr_type_node,
1408 const_ptr_type_node, sizetype,
1409 NULL_TREE);
1410
1411 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1412 DECL_EXTERNAL (fn) = 1;
1413 TREE_PUBLIC (fn) = 1;
1414 DECL_ARTIFICIAL (fn) = 1;
1415 TREE_NOTHROW (fn) = 1;
1416 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1417 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1418
1419 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1420 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1421
1422 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1423
1424 block_move_fn = fn;
1425 }
1426
1427 if (asmspec)
1428 set_user_assembler_name (block_move_fn, asmspec);
1429 }
1430
1431 static tree
1432 emit_block_move_libcall_fn (int for_call)
1433 {
1434 static bool emitted_extern;
1435
1436 if (!block_move_fn)
1437 init_block_move_fn (NULL);
1438
1439 if (for_call && !emitted_extern)
1440 {
1441 emitted_extern = true;
1442 make_decl_rtl (block_move_fn);
1443 }
1444
1445 return block_move_fn;
1446 }
1447
1448 /* A subroutine of emit_block_move. Copy the data via an explicit
1449 loop. This is used only when libcalls are forbidden. */
1450 /* ??? It'd be nice to copy in hunks larger than QImode. */
1451
1452 static void
1453 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1454 unsigned int align ATTRIBUTE_UNUSED)
1455 {
1456 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1457 enum machine_mode x_addr_mode = get_address_mode (x);
1458 enum machine_mode y_addr_mode = get_address_mode (y);
1459 enum machine_mode iter_mode;
1460
1461 iter_mode = GET_MODE (size);
1462 if (iter_mode == VOIDmode)
1463 iter_mode = word_mode;
1464
1465 top_label = gen_label_rtx ();
1466 cmp_label = gen_label_rtx ();
1467 iter = gen_reg_rtx (iter_mode);
1468
1469 emit_move_insn (iter, const0_rtx);
1470
1471 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1472 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1473 do_pending_stack_adjust ();
1474
1475 emit_jump (cmp_label);
1476 emit_label (top_label);
1477
1478 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1479 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1480
1481 if (x_addr_mode != y_addr_mode)
1482 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1483 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1484
1485 x = change_address (x, QImode, x_addr);
1486 y = change_address (y, QImode, y_addr);
1487
1488 emit_move_insn (x, y);
1489
1490 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1491 true, OPTAB_LIB_WIDEN);
1492 if (tmp != iter)
1493 emit_move_insn (iter, tmp);
1494
1495 emit_label (cmp_label);
1496
1497 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1498 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1499 }
1500 \f
1501 /* Copy all or part of a value X into registers starting at REGNO.
1502 The number of registers to be filled is NREGS. */
1503
1504 void
1505 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1506 {
1507 int i;
1508 #ifdef HAVE_load_multiple
1509 rtx pat;
1510 rtx last;
1511 #endif
1512
1513 if (nregs == 0)
1514 return;
1515
1516 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1517 x = validize_mem (force_const_mem (mode, x));
1518
1519 /* See if the machine can do this with a load multiple insn. */
1520 #ifdef HAVE_load_multiple
1521 if (HAVE_load_multiple)
1522 {
1523 last = get_last_insn ();
1524 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1525 GEN_INT (nregs));
1526 if (pat)
1527 {
1528 emit_insn (pat);
1529 return;
1530 }
1531 else
1532 delete_insns_since (last);
1533 }
1534 #endif
1535
1536 for (i = 0; i < nregs; i++)
1537 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1538 operand_subword_force (x, i, mode));
1539 }
1540
1541 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1542 The number of registers to be filled is NREGS. */
1543
1544 void
1545 move_block_from_reg (int regno, rtx x, int nregs)
1546 {
1547 int i;
1548
1549 if (nregs == 0)
1550 return;
1551
1552 /* See if the machine can do this with a store multiple insn. */
1553 #ifdef HAVE_store_multiple
1554 if (HAVE_store_multiple)
1555 {
1556 rtx last = get_last_insn ();
1557 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1558 GEN_INT (nregs));
1559 if (pat)
1560 {
1561 emit_insn (pat);
1562 return;
1563 }
1564 else
1565 delete_insns_since (last);
1566 }
1567 #endif
1568
1569 for (i = 0; i < nregs; i++)
1570 {
1571 rtx tem = operand_subword (x, i, 1, BLKmode);
1572
1573 gcc_assert (tem);
1574
1575 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1576 }
1577 }
1578
1579 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1580 ORIG, where ORIG is a non-consecutive group of registers represented by
1581 a PARALLEL. The clone is identical to the original except in that the
1582 original set of registers is replaced by a new set of pseudo registers.
1583 The new set has the same modes as the original set. */
1584
1585 rtx
1586 gen_group_rtx (rtx orig)
1587 {
1588 int i, length;
1589 rtx *tmps;
1590
1591 gcc_assert (GET_CODE (orig) == PARALLEL);
1592
1593 length = XVECLEN (orig, 0);
1594 tmps = XALLOCAVEC (rtx, length);
1595
1596 /* Skip a NULL entry in first slot. */
1597 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1598
1599 if (i)
1600 tmps[0] = 0;
1601
1602 for (; i < length; i++)
1603 {
1604 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1605 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1606
1607 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1608 }
1609
1610 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1611 }
1612
1613 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1614 except that values are placed in TMPS[i], and must later be moved
1615 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1616
1617 static void
1618 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1619 {
1620 rtx src;
1621 int start, i;
1622 enum machine_mode m = GET_MODE (orig_src);
1623
1624 gcc_assert (GET_CODE (dst) == PARALLEL);
1625
1626 if (m != VOIDmode
1627 && !SCALAR_INT_MODE_P (m)
1628 && !MEM_P (orig_src)
1629 && GET_CODE (orig_src) != CONCAT)
1630 {
1631 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1632 if (imode == BLKmode)
1633 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1634 else
1635 src = gen_reg_rtx (imode);
1636 if (imode != BLKmode)
1637 src = gen_lowpart (GET_MODE (orig_src), src);
1638 emit_move_insn (src, orig_src);
1639 /* ...and back again. */
1640 if (imode != BLKmode)
1641 src = gen_lowpart (imode, src);
1642 emit_group_load_1 (tmps, dst, src, type, ssize);
1643 return;
1644 }
1645
1646 /* Check for a NULL entry, used to indicate that the parameter goes
1647 both on the stack and in registers. */
1648 if (XEXP (XVECEXP (dst, 0, 0), 0))
1649 start = 0;
1650 else
1651 start = 1;
1652
1653 /* Process the pieces. */
1654 for (i = start; i < XVECLEN (dst, 0); i++)
1655 {
1656 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1657 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1658 unsigned int bytelen = GET_MODE_SIZE (mode);
1659 int shift = 0;
1660
1661 /* Handle trailing fragments that run over the size of the struct. */
1662 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1663 {
1664 /* Arrange to shift the fragment to where it belongs.
1665 extract_bit_field loads to the lsb of the reg. */
1666 if (
1667 #ifdef BLOCK_REG_PADDING
1668 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1669 == (BYTES_BIG_ENDIAN ? upward : downward)
1670 #else
1671 BYTES_BIG_ENDIAN
1672 #endif
1673 )
1674 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1675 bytelen = ssize - bytepos;
1676 gcc_assert (bytelen > 0);
1677 }
1678
1679 /* If we won't be loading directly from memory, protect the real source
1680 from strange tricks we might play; but make sure that the source can
1681 be loaded directly into the destination. */
1682 src = orig_src;
1683 if (!MEM_P (orig_src)
1684 && (!CONSTANT_P (orig_src)
1685 || (GET_MODE (orig_src) != mode
1686 && GET_MODE (orig_src) != VOIDmode)))
1687 {
1688 if (GET_MODE (orig_src) == VOIDmode)
1689 src = gen_reg_rtx (mode);
1690 else
1691 src = gen_reg_rtx (GET_MODE (orig_src));
1692
1693 emit_move_insn (src, orig_src);
1694 }
1695
1696 /* Optimize the access just a bit. */
1697 if (MEM_P (src)
1698 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1699 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1700 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1701 && bytelen == GET_MODE_SIZE (mode))
1702 {
1703 tmps[i] = gen_reg_rtx (mode);
1704 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1705 }
1706 else if (COMPLEX_MODE_P (mode)
1707 && GET_MODE (src) == mode
1708 && bytelen == GET_MODE_SIZE (mode))
1709 /* Let emit_move_complex do the bulk of the work. */
1710 tmps[i] = src;
1711 else if (GET_CODE (src) == CONCAT)
1712 {
1713 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1714 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1715
1716 if ((bytepos == 0 && bytelen == slen0)
1717 || (bytepos != 0 && bytepos + bytelen <= slen))
1718 {
1719 /* The following assumes that the concatenated objects all
1720 have the same size. In this case, a simple calculation
1721 can be used to determine the object and the bit field
1722 to be extracted. */
1723 tmps[i] = XEXP (src, bytepos / slen0);
1724 if (! CONSTANT_P (tmps[i])
1725 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1726 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1727 (bytepos % slen0) * BITS_PER_UNIT,
1728 1, NULL_RTX, mode, mode);
1729 }
1730 else
1731 {
1732 rtx mem;
1733
1734 gcc_assert (!bytepos);
1735 mem = assign_stack_temp (GET_MODE (src), slen);
1736 emit_move_insn (mem, src);
1737 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1738 0, 1, NULL_RTX, mode, mode);
1739 }
1740 }
1741 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1742 SIMD register, which is currently broken. While we get GCC
1743 to emit proper RTL for these cases, let's dump to memory. */
1744 else if (VECTOR_MODE_P (GET_MODE (dst))
1745 && REG_P (src))
1746 {
1747 int slen = GET_MODE_SIZE (GET_MODE (src));
1748 rtx mem;
1749
1750 mem = assign_stack_temp (GET_MODE (src), slen);
1751 emit_move_insn (mem, src);
1752 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1753 }
1754 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1755 && XVECLEN (dst, 0) > 1)
1756 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1757 else if (CONSTANT_P (src))
1758 {
1759 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1760
1761 if (len == ssize)
1762 tmps[i] = src;
1763 else
1764 {
1765 rtx first, second;
1766
1767 /* TODO: const_wide_int can have sizes other than this... */
1768 gcc_assert (2 * len == ssize);
1769 split_double (src, &first, &second);
1770 if (i)
1771 tmps[i] = second;
1772 else
1773 tmps[i] = first;
1774 }
1775 }
1776 else if (REG_P (src) && GET_MODE (src) == mode)
1777 tmps[i] = src;
1778 else
1779 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1780 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1781 mode, mode);
1782
1783 if (shift)
1784 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1785 shift, tmps[i], 0);
1786 }
1787 }
1788
1789 /* Emit code to move a block SRC of type TYPE to a block DST,
1790 where DST is non-consecutive registers represented by a PARALLEL.
1791 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1792 if not known. */
1793
1794 void
1795 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1796 {
1797 rtx *tmps;
1798 int i;
1799
1800 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1801 emit_group_load_1 (tmps, dst, src, type, ssize);
1802
1803 /* Copy the extracted pieces into the proper (probable) hard regs. */
1804 for (i = 0; i < XVECLEN (dst, 0); i++)
1805 {
1806 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1807 if (d == NULL)
1808 continue;
1809 emit_move_insn (d, tmps[i]);
1810 }
1811 }
1812
1813 /* Similar, but load SRC into new pseudos in a format that looks like
1814 PARALLEL. This can later be fed to emit_group_move to get things
1815 in the right place. */
1816
1817 rtx
1818 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1819 {
1820 rtvec vec;
1821 int i;
1822
1823 vec = rtvec_alloc (XVECLEN (parallel, 0));
1824 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1825
1826 /* Convert the vector to look just like the original PARALLEL, except
1827 with the computed values. */
1828 for (i = 0; i < XVECLEN (parallel, 0); i++)
1829 {
1830 rtx e = XVECEXP (parallel, 0, i);
1831 rtx d = XEXP (e, 0);
1832
1833 if (d)
1834 {
1835 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1836 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1837 }
1838 RTVEC_ELT (vec, i) = e;
1839 }
1840
1841 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1842 }
1843
1844 /* Emit code to move a block SRC to block DST, where SRC and DST are
1845 non-consecutive groups of registers, each represented by a PARALLEL. */
1846
1847 void
1848 emit_group_move (rtx dst, rtx src)
1849 {
1850 int i;
1851
1852 gcc_assert (GET_CODE (src) == PARALLEL
1853 && GET_CODE (dst) == PARALLEL
1854 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1855
1856 /* Skip first entry if NULL. */
1857 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1858 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1859 XEXP (XVECEXP (src, 0, i), 0));
1860 }
1861
1862 /* Move a group of registers represented by a PARALLEL into pseudos. */
1863
1864 rtx
1865 emit_group_move_into_temps (rtx src)
1866 {
1867 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1868 int i;
1869
1870 for (i = 0; i < XVECLEN (src, 0); i++)
1871 {
1872 rtx e = XVECEXP (src, 0, i);
1873 rtx d = XEXP (e, 0);
1874
1875 if (d)
1876 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1877 RTVEC_ELT (vec, i) = e;
1878 }
1879
1880 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1881 }
1882
1883 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1884 where SRC is non-consecutive registers represented by a PARALLEL.
1885 SSIZE represents the total size of block ORIG_DST, or -1 if not
1886 known. */
1887
1888 void
1889 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1890 {
1891 rtx *tmps, dst;
1892 int start, finish, i;
1893 enum machine_mode m = GET_MODE (orig_dst);
1894
1895 gcc_assert (GET_CODE (src) == PARALLEL);
1896
1897 if (!SCALAR_INT_MODE_P (m)
1898 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1899 {
1900 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1901 if (imode == BLKmode)
1902 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1903 else
1904 dst = gen_reg_rtx (imode);
1905 emit_group_store (dst, src, type, ssize);
1906 if (imode != BLKmode)
1907 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1908 emit_move_insn (orig_dst, dst);
1909 return;
1910 }
1911
1912 /* Check for a NULL entry, used to indicate that the parameter goes
1913 both on the stack and in registers. */
1914 if (XEXP (XVECEXP (src, 0, 0), 0))
1915 start = 0;
1916 else
1917 start = 1;
1918 finish = XVECLEN (src, 0);
1919
1920 tmps = XALLOCAVEC (rtx, finish);
1921
1922 /* Copy the (probable) hard regs into pseudos. */
1923 for (i = start; i < finish; i++)
1924 {
1925 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1926 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1927 {
1928 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1929 emit_move_insn (tmps[i], reg);
1930 }
1931 else
1932 tmps[i] = reg;
1933 }
1934
1935 /* If we won't be storing directly into memory, protect the real destination
1936 from strange tricks we might play. */
1937 dst = orig_dst;
1938 if (GET_CODE (dst) == PARALLEL)
1939 {
1940 rtx temp;
1941
1942 /* We can get a PARALLEL dst if there is a conditional expression in
1943 a return statement. In that case, the dst and src are the same,
1944 so no action is necessary. */
1945 if (rtx_equal_p (dst, src))
1946 return;
1947
1948 /* It is unclear if we can ever reach here, but we may as well handle
1949 it. Allocate a temporary, and split this into a store/load to/from
1950 the temporary. */
1951 temp = assign_stack_temp (GET_MODE (dst), ssize);
1952 emit_group_store (temp, src, type, ssize);
1953 emit_group_load (dst, temp, type, ssize);
1954 return;
1955 }
1956 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1957 {
1958 enum machine_mode outer = GET_MODE (dst);
1959 enum machine_mode inner;
1960 HOST_WIDE_INT bytepos;
1961 bool done = false;
1962 rtx temp;
1963
1964 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1965 dst = gen_reg_rtx (outer);
1966
1967 /* Make life a bit easier for combine. */
1968 /* If the first element of the vector is the low part
1969 of the destination mode, use a paradoxical subreg to
1970 initialize the destination. */
1971 if (start < finish)
1972 {
1973 inner = GET_MODE (tmps[start]);
1974 bytepos = subreg_lowpart_offset (inner, outer);
1975 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1976 {
1977 temp = simplify_gen_subreg (outer, tmps[start],
1978 inner, 0);
1979 if (temp)
1980 {
1981 emit_move_insn (dst, temp);
1982 done = true;
1983 start++;
1984 }
1985 }
1986 }
1987
1988 /* If the first element wasn't the low part, try the last. */
1989 if (!done
1990 && start < finish - 1)
1991 {
1992 inner = GET_MODE (tmps[finish - 1]);
1993 bytepos = subreg_lowpart_offset (inner, outer);
1994 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1995 {
1996 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1997 inner, 0);
1998 if (temp)
1999 {
2000 emit_move_insn (dst, temp);
2001 done = true;
2002 finish--;
2003 }
2004 }
2005 }
2006
2007 /* Otherwise, simply initialize the result to zero. */
2008 if (!done)
2009 emit_move_insn (dst, CONST0_RTX (outer));
2010 }
2011
2012 /* Process the pieces. */
2013 for (i = start; i < finish; i++)
2014 {
2015 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2016 enum machine_mode mode = GET_MODE (tmps[i]);
2017 unsigned int bytelen = GET_MODE_SIZE (mode);
2018 unsigned int adj_bytelen;
2019 rtx dest = dst;
2020
2021 /* Handle trailing fragments that run over the size of the struct. */
2022 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2023 adj_bytelen = ssize - bytepos;
2024 else
2025 adj_bytelen = bytelen;
2026
2027 if (GET_CODE (dst) == CONCAT)
2028 {
2029 if (bytepos + adj_bytelen
2030 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2031 dest = XEXP (dst, 0);
2032 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2033 {
2034 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2035 dest = XEXP (dst, 1);
2036 }
2037 else
2038 {
2039 enum machine_mode dest_mode = GET_MODE (dest);
2040 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2041
2042 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2043
2044 if (GET_MODE_ALIGNMENT (dest_mode)
2045 >= GET_MODE_ALIGNMENT (tmp_mode))
2046 {
2047 dest = assign_stack_temp (dest_mode,
2048 GET_MODE_SIZE (dest_mode));
2049 emit_move_insn (adjust_address (dest,
2050 tmp_mode,
2051 bytepos),
2052 tmps[i]);
2053 dst = dest;
2054 }
2055 else
2056 {
2057 dest = assign_stack_temp (tmp_mode,
2058 GET_MODE_SIZE (tmp_mode));
2059 emit_move_insn (dest, tmps[i]);
2060 dst = adjust_address (dest, dest_mode, bytepos);
2061 }
2062 break;
2063 }
2064 }
2065
2066 /* Handle trailing fragments that run over the size of the struct. */
2067 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2068 {
2069 /* store_bit_field always takes its value from the lsb.
2070 Move the fragment to the lsb if it's not already there. */
2071 if (
2072 #ifdef BLOCK_REG_PADDING
2073 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2074 == (BYTES_BIG_ENDIAN ? upward : downward)
2075 #else
2076 BYTES_BIG_ENDIAN
2077 #endif
2078 )
2079 {
2080 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2081 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2082 shift, tmps[i], 0);
2083 }
2084
2085 /* Make sure not to write past the end of the struct. */
2086 store_bit_field (dest,
2087 adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2088 bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2089 VOIDmode, tmps[i]);
2090 }
2091
2092 /* Optimize the access just a bit. */
2093 else if (MEM_P (dest)
2094 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2095 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2096 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2097 && bytelen == GET_MODE_SIZE (mode))
2098 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2099
2100 else
2101 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2102 0, 0, mode, tmps[i]);
2103 }
2104
2105 /* Copy from the pseudo into the (probable) hard reg. */
2106 if (orig_dst != dst)
2107 emit_move_insn (orig_dst, dst);
2108 }
2109
2110 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2111 of the value stored in X. */
2112
2113 rtx
2114 maybe_emit_group_store (rtx x, tree type)
2115 {
2116 enum machine_mode mode = TYPE_MODE (type);
2117 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2118 if (GET_CODE (x) == PARALLEL)
2119 {
2120 rtx result = gen_reg_rtx (mode);
2121 emit_group_store (result, x, type, int_size_in_bytes (type));
2122 return result;
2123 }
2124 return x;
2125 }
2126
2127 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2128
2129 This is used on targets that return BLKmode values in registers. */
2130
2131 void
2132 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2133 {
2134 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2135 rtx src = NULL, dst = NULL;
2136 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2137 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2138 enum machine_mode mode = GET_MODE (srcreg);
2139 enum machine_mode tmode = GET_MODE (target);
2140 enum machine_mode copy_mode;
2141
2142 /* BLKmode registers created in the back-end shouldn't have survived. */
2143 gcc_assert (mode != BLKmode);
2144
2145 /* If the structure doesn't take up a whole number of words, see whether
2146 SRCREG is padded on the left or on the right. If it's on the left,
2147 set PADDING_CORRECTION to the number of bits to skip.
2148
2149 In most ABIs, the structure will be returned at the least end of
2150 the register, which translates to right padding on little-endian
2151 targets and left padding on big-endian targets. The opposite
2152 holds if the structure is returned at the most significant
2153 end of the register. */
2154 if (bytes % UNITS_PER_WORD != 0
2155 && (targetm.calls.return_in_msb (type)
2156 ? !BYTES_BIG_ENDIAN
2157 : BYTES_BIG_ENDIAN))
2158 padding_correction
2159 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2160
2161 /* We can use a single move if we have an exact mode for the size. */
2162 else if (MEM_P (target)
2163 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2164 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2165 && bytes == GET_MODE_SIZE (mode))
2166 {
2167 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2168 return;
2169 }
2170
2171 /* And if we additionally have the same mode for a register. */
2172 else if (REG_P (target)
2173 && GET_MODE (target) == mode
2174 && bytes == GET_MODE_SIZE (mode))
2175 {
2176 emit_move_insn (target, srcreg);
2177 return;
2178 }
2179
2180 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2181 into a new pseudo which is a full word. */
2182 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2183 {
2184 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2185 mode = word_mode;
2186 }
2187
2188 /* Copy the structure BITSIZE bits at a time. If the target lives in
2189 memory, take care of not reading/writing past its end by selecting
2190 a copy mode suited to BITSIZE. This should always be possible given
2191 how it is computed.
2192
2193 If the target lives in register, make sure not to select a copy mode
2194 larger than the mode of the register.
2195
2196 We could probably emit more efficient code for machines which do not use
2197 strict alignment, but it doesn't seem worth the effort at the current
2198 time. */
2199
2200 copy_mode = word_mode;
2201 if (MEM_P (target))
2202 {
2203 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2204 if (mem_mode != BLKmode)
2205 copy_mode = mem_mode;
2206 }
2207 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2208 copy_mode = tmode;
2209
2210 for (bitpos = 0, xbitpos = padding_correction;
2211 bitpos < bytes * BITS_PER_UNIT;
2212 bitpos += bitsize, xbitpos += bitsize)
2213 {
2214 /* We need a new source operand each time xbitpos is on a
2215 word boundary and when xbitpos == padding_correction
2216 (the first time through). */
2217 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2218 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2219
2220 /* We need a new destination operand each time bitpos is on
2221 a word boundary. */
2222 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2223 dst = target;
2224 else if (bitpos % BITS_PER_WORD == 0)
2225 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2226
2227 /* Use xbitpos for the source extraction (right justified) and
2228 bitpos for the destination store (left justified). */
2229 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2230 extract_bit_field (src, bitsize,
2231 xbitpos % BITS_PER_WORD, 1,
2232 NULL_RTX, copy_mode, copy_mode));
2233 }
2234 }
2235
2236 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2237 register if it contains any data, otherwise return null.
2238
2239 This is used on targets that return BLKmode values in registers. */
2240
2241 rtx
2242 copy_blkmode_to_reg (enum machine_mode mode, tree src)
2243 {
2244 int i, n_regs;
2245 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2246 unsigned int bitsize;
2247 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2248 enum machine_mode dst_mode;
2249
2250 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2251
2252 x = expand_normal (src);
2253
2254 bytes = int_size_in_bytes (TREE_TYPE (src));
2255 if (bytes == 0)
2256 return NULL_RTX;
2257
2258 /* If the structure doesn't take up a whole number of words, see
2259 whether the register value should be padded on the left or on
2260 the right. Set PADDING_CORRECTION to the number of padding
2261 bits needed on the left side.
2262
2263 In most ABIs, the structure will be returned at the least end of
2264 the register, which translates to right padding on little-endian
2265 targets and left padding on big-endian targets. The opposite
2266 holds if the structure is returned at the most significant
2267 end of the register. */
2268 if (bytes % UNITS_PER_WORD != 0
2269 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2270 ? !BYTES_BIG_ENDIAN
2271 : BYTES_BIG_ENDIAN))
2272 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2273 * BITS_PER_UNIT));
2274
2275 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2276 dst_words = XALLOCAVEC (rtx, n_regs);
2277 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2278
2279 /* Copy the structure BITSIZE bits at a time. */
2280 for (bitpos = 0, xbitpos = padding_correction;
2281 bitpos < bytes * BITS_PER_UNIT;
2282 bitpos += bitsize, xbitpos += bitsize)
2283 {
2284 /* We need a new destination pseudo each time xbitpos is
2285 on a word boundary and when xbitpos == padding_correction
2286 (the first time through). */
2287 if (xbitpos % BITS_PER_WORD == 0
2288 || xbitpos == padding_correction)
2289 {
2290 /* Generate an appropriate register. */
2291 dst_word = gen_reg_rtx (word_mode);
2292 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2293
2294 /* Clear the destination before we move anything into it. */
2295 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2296 }
2297
2298 /* We need a new source operand each time bitpos is on a word
2299 boundary. */
2300 if (bitpos % BITS_PER_WORD == 0)
2301 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2302
2303 /* Use bitpos for the source extraction (left justified) and
2304 xbitpos for the destination store (right justified). */
2305 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2306 0, 0, word_mode,
2307 extract_bit_field (src_word, bitsize,
2308 bitpos % BITS_PER_WORD, 1,
2309 NULL_RTX, word_mode, word_mode));
2310 }
2311
2312 if (mode == BLKmode)
2313 {
2314 /* Find the smallest integer mode large enough to hold the
2315 entire structure. */
2316 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2317 mode != VOIDmode;
2318 mode = GET_MODE_WIDER_MODE (mode))
2319 /* Have we found a large enough mode? */
2320 if (GET_MODE_SIZE (mode) >= bytes)
2321 break;
2322
2323 /* A suitable mode should have been found. */
2324 gcc_assert (mode != VOIDmode);
2325 }
2326
2327 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2328 dst_mode = word_mode;
2329 else
2330 dst_mode = mode;
2331 dst = gen_reg_rtx (dst_mode);
2332
2333 for (i = 0; i < n_regs; i++)
2334 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2335
2336 if (mode != dst_mode)
2337 dst = gen_lowpart (mode, dst);
2338
2339 return dst;
2340 }
2341
2342 /* Add a USE expression for REG to the (possibly empty) list pointed
2343 to by CALL_FUSAGE. REG must denote a hard register. */
2344
2345 void
2346 use_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode)
2347 {
2348 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2349
2350 *call_fusage
2351 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2352 }
2353
2354 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2355 to by CALL_FUSAGE. REG must denote a hard register. */
2356
2357 void
2358 clobber_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode)
2359 {
2360 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2361
2362 *call_fusage
2363 = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2364 }
2365
2366 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2367 starting at REGNO. All of these registers must be hard registers. */
2368
2369 void
2370 use_regs (rtx *call_fusage, int regno, int nregs)
2371 {
2372 int i;
2373
2374 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2375
2376 for (i = 0; i < nregs; i++)
2377 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2378 }
2379
2380 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2381 PARALLEL REGS. This is for calls that pass values in multiple
2382 non-contiguous locations. The Irix 6 ABI has examples of this. */
2383
2384 void
2385 use_group_regs (rtx *call_fusage, rtx regs)
2386 {
2387 int i;
2388
2389 for (i = 0; i < XVECLEN (regs, 0); i++)
2390 {
2391 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2392
2393 /* A NULL entry means the parameter goes both on the stack and in
2394 registers. This can also be a MEM for targets that pass values
2395 partially on the stack and partially in registers. */
2396 if (reg != 0 && REG_P (reg))
2397 use_reg (call_fusage, reg);
2398 }
2399 }
2400
2401 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2402 assigment and the code of the expresion on the RHS is CODE. Return
2403 NULL otherwise. */
2404
2405 static gimple
2406 get_def_for_expr (tree name, enum tree_code code)
2407 {
2408 gimple def_stmt;
2409
2410 if (TREE_CODE (name) != SSA_NAME)
2411 return NULL;
2412
2413 def_stmt = get_gimple_for_ssa_name (name);
2414 if (!def_stmt
2415 || gimple_assign_rhs_code (def_stmt) != code)
2416 return NULL;
2417
2418 return def_stmt;
2419 }
2420
2421 #ifdef HAVE_conditional_move
2422 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2423 assigment and the class of the expresion on the RHS is CLASS. Return
2424 NULL otherwise. */
2425
2426 static gimple
2427 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2428 {
2429 gimple def_stmt;
2430
2431 if (TREE_CODE (name) != SSA_NAME)
2432 return NULL;
2433
2434 def_stmt = get_gimple_for_ssa_name (name);
2435 if (!def_stmt
2436 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2437 return NULL;
2438
2439 return def_stmt;
2440 }
2441 #endif
2442 \f
2443
2444 /* Determine whether the LEN bytes generated by CONSTFUN can be
2445 stored to memory using several move instructions. CONSTFUNDATA is
2446 a pointer which will be passed as argument in every CONSTFUN call.
2447 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2448 a memset operation and false if it's a copy of a constant string.
2449 Return nonzero if a call to store_by_pieces should succeed. */
2450
2451 int
2452 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2453 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2454 void *constfundata, unsigned int align, bool memsetp)
2455 {
2456 unsigned HOST_WIDE_INT l;
2457 unsigned int max_size;
2458 HOST_WIDE_INT offset = 0;
2459 enum machine_mode mode;
2460 enum insn_code icode;
2461 int reverse;
2462 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2463 rtx cst ATTRIBUTE_UNUSED;
2464
2465 if (len == 0)
2466 return 1;
2467
2468 if (! (memsetp
2469 ? SET_BY_PIECES_P (len, align)
2470 : STORE_BY_PIECES_P (len, align)))
2471 return 0;
2472
2473 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2474
2475 /* We would first store what we can in the largest integer mode, then go to
2476 successively smaller modes. */
2477
2478 for (reverse = 0;
2479 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2480 reverse++)
2481 {
2482 l = len;
2483 max_size = STORE_MAX_PIECES + 1;
2484 while (max_size > 1 && l > 0)
2485 {
2486 mode = widest_int_mode_for_size (max_size);
2487
2488 if (mode == VOIDmode)
2489 break;
2490
2491 icode = optab_handler (mov_optab, mode);
2492 if (icode != CODE_FOR_nothing
2493 && align >= GET_MODE_ALIGNMENT (mode))
2494 {
2495 unsigned int size = GET_MODE_SIZE (mode);
2496
2497 while (l >= size)
2498 {
2499 if (reverse)
2500 offset -= size;
2501
2502 cst = (*constfun) (constfundata, offset, mode);
2503 if (!targetm.legitimate_constant_p (mode, cst))
2504 return 0;
2505
2506 if (!reverse)
2507 offset += size;
2508
2509 l -= size;
2510 }
2511 }
2512
2513 max_size = GET_MODE_SIZE (mode);
2514 }
2515
2516 /* The code above should have handled everything. */
2517 gcc_assert (!l);
2518 }
2519
2520 return 1;
2521 }
2522
2523 /* Generate several move instructions to store LEN bytes generated by
2524 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2525 pointer which will be passed as argument in every CONSTFUN call.
2526 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2527 a memset operation and false if it's a copy of a constant string.
2528 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2529 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2530 stpcpy. */
2531
2532 rtx
2533 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2534 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2535 void *constfundata, unsigned int align, bool memsetp, int endp)
2536 {
2537 enum machine_mode to_addr_mode = get_address_mode (to);
2538 struct store_by_pieces_d data;
2539
2540 if (len == 0)
2541 {
2542 gcc_assert (endp != 2);
2543 return to;
2544 }
2545
2546 gcc_assert (memsetp
2547 ? SET_BY_PIECES_P (len, align)
2548 : STORE_BY_PIECES_P (len, align));
2549 data.constfun = constfun;
2550 data.constfundata = constfundata;
2551 data.len = len;
2552 data.to = to;
2553 store_by_pieces_1 (&data, align);
2554 if (endp)
2555 {
2556 rtx to1;
2557
2558 gcc_assert (!data.reverse);
2559 if (data.autinc_to)
2560 {
2561 if (endp == 2)
2562 {
2563 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2564 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2565 else
2566 data.to_addr = copy_to_mode_reg (to_addr_mode,
2567 plus_constant (to_addr_mode,
2568 data.to_addr,
2569 -1));
2570 }
2571 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2572 data.offset);
2573 }
2574 else
2575 {
2576 if (endp == 2)
2577 --data.offset;
2578 to1 = adjust_address (data.to, QImode, data.offset);
2579 }
2580 return to1;
2581 }
2582 else
2583 return data.to;
2584 }
2585
2586 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2587 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2588
2589 static void
2590 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2591 {
2592 struct store_by_pieces_d data;
2593
2594 if (len == 0)
2595 return;
2596
2597 data.constfun = clear_by_pieces_1;
2598 data.constfundata = NULL;
2599 data.len = len;
2600 data.to = to;
2601 store_by_pieces_1 (&data, align);
2602 }
2603
2604 /* Callback routine for clear_by_pieces.
2605 Return const0_rtx unconditionally. */
2606
2607 static rtx
2608 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2609 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2610 enum machine_mode mode ATTRIBUTE_UNUSED)
2611 {
2612 return const0_rtx;
2613 }
2614
2615 /* Subroutine of clear_by_pieces and store_by_pieces.
2616 Generate several move instructions to store LEN bytes of block TO. (A MEM
2617 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2618
2619 static void
2620 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2621 unsigned int align ATTRIBUTE_UNUSED)
2622 {
2623 enum machine_mode to_addr_mode = get_address_mode (data->to);
2624 rtx to_addr = XEXP (data->to, 0);
2625 unsigned int max_size = STORE_MAX_PIECES + 1;
2626 enum insn_code icode;
2627
2628 data->offset = 0;
2629 data->to_addr = to_addr;
2630 data->autinc_to
2631 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2632 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2633
2634 data->explicit_inc_to = 0;
2635 data->reverse
2636 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2637 if (data->reverse)
2638 data->offset = data->len;
2639
2640 /* If storing requires more than two move insns,
2641 copy addresses to registers (to make displacements shorter)
2642 and use post-increment if available. */
2643 if (!data->autinc_to
2644 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2645 {
2646 /* Determine the main mode we'll be using.
2647 MODE might not be used depending on the definitions of the
2648 USE_* macros below. */
2649 enum machine_mode mode ATTRIBUTE_UNUSED
2650 = widest_int_mode_for_size (max_size);
2651
2652 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2653 {
2654 data->to_addr = copy_to_mode_reg (to_addr_mode,
2655 plus_constant (to_addr_mode,
2656 to_addr,
2657 data->len));
2658 data->autinc_to = 1;
2659 data->explicit_inc_to = -1;
2660 }
2661
2662 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2663 && ! data->autinc_to)
2664 {
2665 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2666 data->autinc_to = 1;
2667 data->explicit_inc_to = 1;
2668 }
2669
2670 if ( !data->autinc_to && CONSTANT_P (to_addr))
2671 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2672 }
2673
2674 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2675
2676 /* First store what we can in the largest integer mode, then go to
2677 successively smaller modes. */
2678
2679 while (max_size > 1 && data->len > 0)
2680 {
2681 enum machine_mode mode = widest_int_mode_for_size (max_size);
2682
2683 if (mode == VOIDmode)
2684 break;
2685
2686 icode = optab_handler (mov_optab, mode);
2687 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2688 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2689
2690 max_size = GET_MODE_SIZE (mode);
2691 }
2692
2693 /* The code above should have handled everything. */
2694 gcc_assert (!data->len);
2695 }
2696
2697 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2698 with move instructions for mode MODE. GENFUN is the gen_... function
2699 to make a move insn for that mode. DATA has all the other info. */
2700
2701 static void
2702 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2703 struct store_by_pieces_d *data)
2704 {
2705 unsigned int size = GET_MODE_SIZE (mode);
2706 rtx to1, cst;
2707
2708 while (data->len >= size)
2709 {
2710 if (data->reverse)
2711 data->offset -= size;
2712
2713 if (data->autinc_to)
2714 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2715 data->offset);
2716 else
2717 to1 = adjust_address (data->to, mode, data->offset);
2718
2719 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2720 emit_insn (gen_add2_insn (data->to_addr,
2721 gen_int_mode (-(HOST_WIDE_INT) size,
2722 GET_MODE (data->to_addr))));
2723
2724 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2725 emit_insn ((*genfun) (to1, cst));
2726
2727 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2728 emit_insn (gen_add2_insn (data->to_addr,
2729 gen_int_mode (size,
2730 GET_MODE (data->to_addr))));
2731
2732 if (! data->reverse)
2733 data->offset += size;
2734
2735 data->len -= size;
2736 }
2737 }
2738 \f
2739 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2740 its length in bytes. */
2741
2742 rtx
2743 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2744 unsigned int expected_align, HOST_WIDE_INT expected_size,
2745 unsigned HOST_WIDE_INT min_size,
2746 unsigned HOST_WIDE_INT max_size,
2747 unsigned HOST_WIDE_INT probable_max_size)
2748 {
2749 enum machine_mode mode = GET_MODE (object);
2750 unsigned int align;
2751
2752 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2753
2754 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2755 just move a zero. Otherwise, do this a piece at a time. */
2756 if (mode != BLKmode
2757 && CONST_INT_P (size)
2758 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2759 {
2760 rtx zero = CONST0_RTX (mode);
2761 if (zero != NULL)
2762 {
2763 emit_move_insn (object, zero);
2764 return NULL;
2765 }
2766
2767 if (COMPLEX_MODE_P (mode))
2768 {
2769 zero = CONST0_RTX (GET_MODE_INNER (mode));
2770 if (zero != NULL)
2771 {
2772 write_complex_part (object, zero, 0);
2773 write_complex_part (object, zero, 1);
2774 return NULL;
2775 }
2776 }
2777 }
2778
2779 if (size == const0_rtx)
2780 return NULL;
2781
2782 align = MEM_ALIGN (object);
2783
2784 if (CONST_INT_P (size)
2785 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2786 clear_by_pieces (object, INTVAL (size), align);
2787 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2788 expected_align, expected_size,
2789 min_size, max_size, probable_max_size))
2790 ;
2791 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2792 return set_storage_via_libcall (object, size, const0_rtx,
2793 method == BLOCK_OP_TAILCALL);
2794 else
2795 gcc_unreachable ();
2796
2797 return NULL;
2798 }
2799
2800 rtx
2801 clear_storage (rtx object, rtx size, enum block_op_methods method)
2802 {
2803 unsigned HOST_WIDE_INT max, min = 0;
2804 if (GET_CODE (size) == CONST_INT)
2805 min = max = UINTVAL (size);
2806 else
2807 max = GET_MODE_MASK (GET_MODE (size));
2808 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
2809 }
2810
2811
2812 /* A subroutine of clear_storage. Expand a call to memset.
2813 Return the return value of memset, 0 otherwise. */
2814
2815 rtx
2816 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2817 {
2818 tree call_expr, fn, object_tree, size_tree, val_tree;
2819 enum machine_mode size_mode;
2820 rtx retval;
2821
2822 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2823 place those into new pseudos into a VAR_DECL and use them later. */
2824
2825 object = copy_addr_to_reg (XEXP (object, 0));
2826
2827 size_mode = TYPE_MODE (sizetype);
2828 size = convert_to_mode (size_mode, size, 1);
2829 size = copy_to_mode_reg (size_mode, size);
2830
2831 /* It is incorrect to use the libcall calling conventions to call
2832 memset in this context. This could be a user call to memset and
2833 the user may wish to examine the return value from memset. For
2834 targets where libcalls and normal calls have different conventions
2835 for returning pointers, we could end up generating incorrect code. */
2836
2837 object_tree = make_tree (ptr_type_node, object);
2838 if (!CONST_INT_P (val))
2839 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2840 size_tree = make_tree (sizetype, size);
2841 val_tree = make_tree (integer_type_node, val);
2842
2843 fn = clear_storage_libcall_fn (true);
2844 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2845 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2846
2847 retval = expand_normal (call_expr);
2848
2849 return retval;
2850 }
2851
2852 /* A subroutine of set_storage_via_libcall. Create the tree node
2853 for the function we use for block clears. */
2854
2855 tree block_clear_fn;
2856
2857 void
2858 init_block_clear_fn (const char *asmspec)
2859 {
2860 if (!block_clear_fn)
2861 {
2862 tree fn, args;
2863
2864 fn = get_identifier ("memset");
2865 args = build_function_type_list (ptr_type_node, ptr_type_node,
2866 integer_type_node, sizetype,
2867 NULL_TREE);
2868
2869 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2870 DECL_EXTERNAL (fn) = 1;
2871 TREE_PUBLIC (fn) = 1;
2872 DECL_ARTIFICIAL (fn) = 1;
2873 TREE_NOTHROW (fn) = 1;
2874 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2875 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2876
2877 block_clear_fn = fn;
2878 }
2879
2880 if (asmspec)
2881 set_user_assembler_name (block_clear_fn, asmspec);
2882 }
2883
2884 static tree
2885 clear_storage_libcall_fn (int for_call)
2886 {
2887 static bool emitted_extern;
2888
2889 if (!block_clear_fn)
2890 init_block_clear_fn (NULL);
2891
2892 if (for_call && !emitted_extern)
2893 {
2894 emitted_extern = true;
2895 make_decl_rtl (block_clear_fn);
2896 }
2897
2898 return block_clear_fn;
2899 }
2900 \f
2901 /* Expand a setmem pattern; return true if successful. */
2902
2903 bool
2904 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2905 unsigned int expected_align, HOST_WIDE_INT expected_size,
2906 unsigned HOST_WIDE_INT min_size,
2907 unsigned HOST_WIDE_INT max_size,
2908 unsigned HOST_WIDE_INT probable_max_size)
2909 {
2910 /* Try the most limited insn first, because there's no point
2911 including more than one in the machine description unless
2912 the more limited one has some advantage. */
2913
2914 enum machine_mode mode;
2915
2916 if (expected_align < align)
2917 expected_align = align;
2918 if (expected_size != -1)
2919 {
2920 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
2921 expected_size = max_size;
2922 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
2923 expected_size = min_size;
2924 }
2925
2926 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2927 mode = GET_MODE_WIDER_MODE (mode))
2928 {
2929 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2930
2931 if (code != CODE_FOR_nothing
2932 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2933 here because if SIZE is less than the mode mask, as it is
2934 returned by the macro, it will definitely be less than the
2935 actual mode mask. Since SIZE is within the Pmode address
2936 space, we limit MODE to Pmode. */
2937 && ((CONST_INT_P (size)
2938 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2939 <= (GET_MODE_MASK (mode) >> 1)))
2940 || max_size <= (GET_MODE_MASK (mode) >> 1)
2941 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2942 {
2943 struct expand_operand ops[9];
2944 unsigned int nops;
2945
2946 nops = insn_data[(int) code].n_generator_args;
2947 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
2948
2949 create_fixed_operand (&ops[0], object);
2950 /* The check above guarantees that this size conversion is valid. */
2951 create_convert_operand_to (&ops[1], size, mode, true);
2952 create_convert_operand_from (&ops[2], val, byte_mode, true);
2953 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2954 if (nops >= 6)
2955 {
2956 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2957 create_integer_operand (&ops[5], expected_size);
2958 }
2959 if (nops >= 8)
2960 {
2961 create_integer_operand (&ops[6], min_size);
2962 /* If we can not represent the maximal size,
2963 make parameter NULL. */
2964 if ((HOST_WIDE_INT) max_size != -1)
2965 create_integer_operand (&ops[7], max_size);
2966 else
2967 create_fixed_operand (&ops[7], NULL);
2968 }
2969 if (nops == 9)
2970 {
2971 /* If we can not represent the maximal size,
2972 make parameter NULL. */
2973 if ((HOST_WIDE_INT) probable_max_size != -1)
2974 create_integer_operand (&ops[8], probable_max_size);
2975 else
2976 create_fixed_operand (&ops[8], NULL);
2977 }
2978 if (maybe_expand_insn (code, nops, ops))
2979 return true;
2980 }
2981 }
2982
2983 return false;
2984 }
2985
2986 \f
2987 /* Write to one of the components of the complex value CPLX. Write VAL to
2988 the real part if IMAG_P is false, and the imaginary part if its true. */
2989
2990 static void
2991 write_complex_part (rtx cplx, rtx val, bool imag_p)
2992 {
2993 enum machine_mode cmode;
2994 enum machine_mode imode;
2995 unsigned ibitsize;
2996
2997 if (GET_CODE (cplx) == CONCAT)
2998 {
2999 emit_move_insn (XEXP (cplx, imag_p), val);
3000 return;
3001 }
3002
3003 cmode = GET_MODE (cplx);
3004 imode = GET_MODE_INNER (cmode);
3005 ibitsize = GET_MODE_BITSIZE (imode);
3006
3007 /* For MEMs simplify_gen_subreg may generate an invalid new address
3008 because, e.g., the original address is considered mode-dependent
3009 by the target, which restricts simplify_subreg from invoking
3010 adjust_address_nv. Instead of preparing fallback support for an
3011 invalid address, we call adjust_address_nv directly. */
3012 if (MEM_P (cplx))
3013 {
3014 emit_move_insn (adjust_address_nv (cplx, imode,
3015 imag_p ? GET_MODE_SIZE (imode) : 0),
3016 val);
3017 return;
3018 }
3019
3020 /* If the sub-object is at least word sized, then we know that subregging
3021 will work. This special case is important, since store_bit_field
3022 wants to operate on integer modes, and there's rarely an OImode to
3023 correspond to TCmode. */
3024 if (ibitsize >= BITS_PER_WORD
3025 /* For hard regs we have exact predicates. Assume we can split
3026 the original object if it spans an even number of hard regs.
3027 This special case is important for SCmode on 64-bit platforms
3028 where the natural size of floating-point regs is 32-bit. */
3029 || (REG_P (cplx)
3030 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3031 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3032 {
3033 rtx part = simplify_gen_subreg (imode, cplx, cmode,
3034 imag_p ? GET_MODE_SIZE (imode) : 0);
3035 if (part)
3036 {
3037 emit_move_insn (part, val);
3038 return;
3039 }
3040 else
3041 /* simplify_gen_subreg may fail for sub-word MEMs. */
3042 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3043 }
3044
3045 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
3046 }
3047
3048 /* Extract one of the components of the complex value CPLX. Extract the
3049 real part if IMAG_P is false, and the imaginary part if it's true. */
3050
3051 static rtx
3052 read_complex_part (rtx cplx, bool imag_p)
3053 {
3054 enum machine_mode cmode, imode;
3055 unsigned ibitsize;
3056
3057 if (GET_CODE (cplx) == CONCAT)
3058 return XEXP (cplx, imag_p);
3059
3060 cmode = GET_MODE (cplx);
3061 imode = GET_MODE_INNER (cmode);
3062 ibitsize = GET_MODE_BITSIZE (imode);
3063
3064 /* Special case reads from complex constants that got spilled to memory. */
3065 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3066 {
3067 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3068 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3069 {
3070 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3071 if (CONSTANT_CLASS_P (part))
3072 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3073 }
3074 }
3075
3076 /* For MEMs simplify_gen_subreg may generate an invalid new address
3077 because, e.g., the original address is considered mode-dependent
3078 by the target, which restricts simplify_subreg from invoking
3079 adjust_address_nv. Instead of preparing fallback support for an
3080 invalid address, we call adjust_address_nv directly. */
3081 if (MEM_P (cplx))
3082 return adjust_address_nv (cplx, imode,
3083 imag_p ? GET_MODE_SIZE (imode) : 0);
3084
3085 /* If the sub-object is at least word sized, then we know that subregging
3086 will work. This special case is important, since extract_bit_field
3087 wants to operate on integer modes, and there's rarely an OImode to
3088 correspond to TCmode. */
3089 if (ibitsize >= BITS_PER_WORD
3090 /* For hard regs we have exact predicates. Assume we can split
3091 the original object if it spans an even number of hard regs.
3092 This special case is important for SCmode on 64-bit platforms
3093 where the natural size of floating-point regs is 32-bit. */
3094 || (REG_P (cplx)
3095 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3096 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3097 {
3098 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3099 imag_p ? GET_MODE_SIZE (imode) : 0);
3100 if (ret)
3101 return ret;
3102 else
3103 /* simplify_gen_subreg may fail for sub-word MEMs. */
3104 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3105 }
3106
3107 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3108 true, NULL_RTX, imode, imode);
3109 }
3110 \f
3111 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3112 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3113 represented in NEW_MODE. If FORCE is true, this will never happen, as
3114 we'll force-create a SUBREG if needed. */
3115
3116 static rtx
3117 emit_move_change_mode (enum machine_mode new_mode,
3118 enum machine_mode old_mode, rtx x, bool force)
3119 {
3120 rtx ret;
3121
3122 if (push_operand (x, GET_MODE (x)))
3123 {
3124 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3125 MEM_COPY_ATTRIBUTES (ret, x);
3126 }
3127 else if (MEM_P (x))
3128 {
3129 /* We don't have to worry about changing the address since the
3130 size in bytes is supposed to be the same. */
3131 if (reload_in_progress)
3132 {
3133 /* Copy the MEM to change the mode and move any
3134 substitutions from the old MEM to the new one. */
3135 ret = adjust_address_nv (x, new_mode, 0);
3136 copy_replacements (x, ret);
3137 }
3138 else
3139 ret = adjust_address (x, new_mode, 0);
3140 }
3141 else
3142 {
3143 /* Note that we do want simplify_subreg's behavior of validating
3144 that the new mode is ok for a hard register. If we were to use
3145 simplify_gen_subreg, we would create the subreg, but would
3146 probably run into the target not being able to implement it. */
3147 /* Except, of course, when FORCE is true, when this is exactly what
3148 we want. Which is needed for CCmodes on some targets. */
3149 if (force)
3150 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3151 else
3152 ret = simplify_subreg (new_mode, x, old_mode, 0);
3153 }
3154
3155 return ret;
3156 }
3157
3158 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3159 an integer mode of the same size as MODE. Returns the instruction
3160 emitted, or NULL if such a move could not be generated. */
3161
3162 static rtx_insn *
3163 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
3164 {
3165 enum machine_mode imode;
3166 enum insn_code code;
3167
3168 /* There must exist a mode of the exact size we require. */
3169 imode = int_mode_for_mode (mode);
3170 if (imode == BLKmode)
3171 return NULL;
3172
3173 /* The target must support moves in this mode. */
3174 code = optab_handler (mov_optab, imode);
3175 if (code == CODE_FOR_nothing)
3176 return NULL;
3177
3178 x = emit_move_change_mode (imode, mode, x, force);
3179 if (x == NULL_RTX)
3180 return NULL;
3181 y = emit_move_change_mode (imode, mode, y, force);
3182 if (y == NULL_RTX)
3183 return NULL;
3184 return emit_insn (GEN_FCN (code) (x, y));
3185 }
3186
3187 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3188 Return an equivalent MEM that does not use an auto-increment. */
3189
3190 rtx
3191 emit_move_resolve_push (enum machine_mode mode, rtx x)
3192 {
3193 enum rtx_code code = GET_CODE (XEXP (x, 0));
3194 HOST_WIDE_INT adjust;
3195 rtx temp;
3196
3197 adjust = GET_MODE_SIZE (mode);
3198 #ifdef PUSH_ROUNDING
3199 adjust = PUSH_ROUNDING (adjust);
3200 #endif
3201 if (code == PRE_DEC || code == POST_DEC)
3202 adjust = -adjust;
3203 else if (code == PRE_MODIFY || code == POST_MODIFY)
3204 {
3205 rtx expr = XEXP (XEXP (x, 0), 1);
3206 HOST_WIDE_INT val;
3207
3208 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3209 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3210 val = INTVAL (XEXP (expr, 1));
3211 if (GET_CODE (expr) == MINUS)
3212 val = -val;
3213 gcc_assert (adjust == val || adjust == -val);
3214 adjust = val;
3215 }
3216
3217 /* Do not use anti_adjust_stack, since we don't want to update
3218 stack_pointer_delta. */
3219 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3220 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3221 0, OPTAB_LIB_WIDEN);
3222 if (temp != stack_pointer_rtx)
3223 emit_move_insn (stack_pointer_rtx, temp);
3224
3225 switch (code)
3226 {
3227 case PRE_INC:
3228 case PRE_DEC:
3229 case PRE_MODIFY:
3230 temp = stack_pointer_rtx;
3231 break;
3232 case POST_INC:
3233 case POST_DEC:
3234 case POST_MODIFY:
3235 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3236 break;
3237 default:
3238 gcc_unreachable ();
3239 }
3240
3241 return replace_equiv_address (x, temp);
3242 }
3243
3244 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3245 X is known to satisfy push_operand, and MODE is known to be complex.
3246 Returns the last instruction emitted. */
3247
3248 rtx_insn *
3249 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3250 {
3251 enum machine_mode submode = GET_MODE_INNER (mode);
3252 bool imag_first;
3253
3254 #ifdef PUSH_ROUNDING
3255 unsigned int submodesize = GET_MODE_SIZE (submode);
3256
3257 /* In case we output to the stack, but the size is smaller than the
3258 machine can push exactly, we need to use move instructions. */
3259 if (PUSH_ROUNDING (submodesize) != submodesize)
3260 {
3261 x = emit_move_resolve_push (mode, x);
3262 return emit_move_insn (x, y);
3263 }
3264 #endif
3265
3266 /* Note that the real part always precedes the imag part in memory
3267 regardless of machine's endianness. */
3268 switch (GET_CODE (XEXP (x, 0)))
3269 {
3270 case PRE_DEC:
3271 case POST_DEC:
3272 imag_first = true;
3273 break;
3274 case PRE_INC:
3275 case POST_INC:
3276 imag_first = false;
3277 break;
3278 default:
3279 gcc_unreachable ();
3280 }
3281
3282 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3283 read_complex_part (y, imag_first));
3284 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3285 read_complex_part (y, !imag_first));
3286 }
3287
3288 /* A subroutine of emit_move_complex. Perform the move from Y to X
3289 via two moves of the parts. Returns the last instruction emitted. */
3290
3291 rtx_insn *
3292 emit_move_complex_parts (rtx x, rtx y)
3293 {
3294 /* Show the output dies here. This is necessary for SUBREGs
3295 of pseudos since we cannot track their lifetimes correctly;
3296 hard regs shouldn't appear here except as return values. */
3297 if (!reload_completed && !reload_in_progress
3298 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3299 emit_clobber (x);
3300
3301 write_complex_part (x, read_complex_part (y, false), false);
3302 write_complex_part (x, read_complex_part (y, true), true);
3303
3304 return get_last_insn ();
3305 }
3306
3307 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3308 MODE is known to be complex. Returns the last instruction emitted. */
3309
3310 static rtx_insn *
3311 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3312 {
3313 bool try_int;
3314
3315 /* Need to take special care for pushes, to maintain proper ordering
3316 of the data, and possibly extra padding. */
3317 if (push_operand (x, mode))
3318 return emit_move_complex_push (mode, x, y);
3319
3320 /* See if we can coerce the target into moving both values at once, except
3321 for floating point where we favor moving as parts if this is easy. */
3322 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3323 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3324 && !(REG_P (x)
3325 && HARD_REGISTER_P (x)
3326 && hard_regno_nregs[REGNO (x)][mode] == 1)
3327 && !(REG_P (y)
3328 && HARD_REGISTER_P (y)
3329 && hard_regno_nregs[REGNO (y)][mode] == 1))
3330 try_int = false;
3331 /* Not possible if the values are inherently not adjacent. */
3332 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3333 try_int = false;
3334 /* Is possible if both are registers (or subregs of registers). */
3335 else if (register_operand (x, mode) && register_operand (y, mode))
3336 try_int = true;
3337 /* If one of the operands is a memory, and alignment constraints
3338 are friendly enough, we may be able to do combined memory operations.
3339 We do not attempt this if Y is a constant because that combination is
3340 usually better with the by-parts thing below. */
3341 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3342 && (!STRICT_ALIGNMENT
3343 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3344 try_int = true;
3345 else
3346 try_int = false;
3347
3348 if (try_int)
3349 {
3350 rtx_insn *ret;
3351
3352 /* For memory to memory moves, optimal behavior can be had with the
3353 existing block move logic. */
3354 if (MEM_P (x) && MEM_P (y))
3355 {
3356 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3357 BLOCK_OP_NO_LIBCALL);
3358 return get_last_insn ();
3359 }
3360
3361 ret = emit_move_via_integer (mode, x, y, true);
3362 if (ret)
3363 return ret;
3364 }
3365
3366 return emit_move_complex_parts (x, y);
3367 }
3368
3369 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3370 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3371
3372 static rtx_insn *
3373 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3374 {
3375 rtx_insn *ret;
3376
3377 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3378 if (mode != CCmode)
3379 {
3380 enum insn_code code = optab_handler (mov_optab, CCmode);
3381 if (code != CODE_FOR_nothing)
3382 {
3383 x = emit_move_change_mode (CCmode, mode, x, true);
3384 y = emit_move_change_mode (CCmode, mode, y, true);
3385 return emit_insn (GEN_FCN (code) (x, y));
3386 }
3387 }
3388
3389 /* Otherwise, find the MODE_INT mode of the same width. */
3390 ret = emit_move_via_integer (mode, x, y, false);
3391 gcc_assert (ret != NULL);
3392 return ret;
3393 }
3394
3395 /* Return true if word I of OP lies entirely in the
3396 undefined bits of a paradoxical subreg. */
3397
3398 static bool
3399 undefined_operand_subword_p (const_rtx op, int i)
3400 {
3401 enum machine_mode innermode, innermostmode;
3402 int offset;
3403 if (GET_CODE (op) != SUBREG)
3404 return false;
3405 innermode = GET_MODE (op);
3406 innermostmode = GET_MODE (SUBREG_REG (op));
3407 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3408 /* The SUBREG_BYTE represents offset, as if the value were stored in
3409 memory, except for a paradoxical subreg where we define
3410 SUBREG_BYTE to be 0; undo this exception as in
3411 simplify_subreg. */
3412 if (SUBREG_BYTE (op) == 0
3413 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3414 {
3415 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3416 if (WORDS_BIG_ENDIAN)
3417 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3418 if (BYTES_BIG_ENDIAN)
3419 offset += difference % UNITS_PER_WORD;
3420 }
3421 if (offset >= GET_MODE_SIZE (innermostmode)
3422 || offset <= -GET_MODE_SIZE (word_mode))
3423 return true;
3424 return false;
3425 }
3426
3427 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3428 MODE is any multi-word or full-word mode that lacks a move_insn
3429 pattern. Note that you will get better code if you define such
3430 patterns, even if they must turn into multiple assembler instructions. */
3431
3432 static rtx_insn *
3433 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3434 {
3435 rtx_insn *last_insn = 0;
3436 rtx_insn *seq;
3437 rtx inner;
3438 bool need_clobber;
3439 int i;
3440
3441 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3442
3443 /* If X is a push on the stack, do the push now and replace
3444 X with a reference to the stack pointer. */
3445 if (push_operand (x, mode))
3446 x = emit_move_resolve_push (mode, x);
3447
3448 /* If we are in reload, see if either operand is a MEM whose address
3449 is scheduled for replacement. */
3450 if (reload_in_progress && MEM_P (x)
3451 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3452 x = replace_equiv_address_nv (x, inner);
3453 if (reload_in_progress && MEM_P (y)
3454 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3455 y = replace_equiv_address_nv (y, inner);
3456
3457 start_sequence ();
3458
3459 need_clobber = false;
3460 for (i = 0;
3461 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3462 i++)
3463 {
3464 rtx xpart = operand_subword (x, i, 1, mode);
3465 rtx ypart;
3466
3467 /* Do not generate code for a move if it would come entirely
3468 from the undefined bits of a paradoxical subreg. */
3469 if (undefined_operand_subword_p (y, i))
3470 continue;
3471
3472 ypart = operand_subword (y, i, 1, mode);
3473
3474 /* If we can't get a part of Y, put Y into memory if it is a
3475 constant. Otherwise, force it into a register. Then we must
3476 be able to get a part of Y. */
3477 if (ypart == 0 && CONSTANT_P (y))
3478 {
3479 y = use_anchored_address (force_const_mem (mode, y));
3480 ypart = operand_subword (y, i, 1, mode);
3481 }
3482 else if (ypart == 0)
3483 ypart = operand_subword_force (y, i, mode);
3484
3485 gcc_assert (xpart && ypart);
3486
3487 need_clobber |= (GET_CODE (xpart) == SUBREG);
3488
3489 last_insn = emit_move_insn (xpart, ypart);
3490 }
3491
3492 seq = get_insns ();
3493 end_sequence ();
3494
3495 /* Show the output dies here. This is necessary for SUBREGs
3496 of pseudos since we cannot track their lifetimes correctly;
3497 hard regs shouldn't appear here except as return values.
3498 We never want to emit such a clobber after reload. */
3499 if (x != y
3500 && ! (reload_in_progress || reload_completed)
3501 && need_clobber != 0)
3502 emit_clobber (x);
3503
3504 emit_insn (seq);
3505
3506 return last_insn;
3507 }
3508
3509 /* Low level part of emit_move_insn.
3510 Called just like emit_move_insn, but assumes X and Y
3511 are basically valid. */
3512
3513 rtx_insn *
3514 emit_move_insn_1 (rtx x, rtx y)
3515 {
3516 enum machine_mode mode = GET_MODE (x);
3517 enum insn_code code;
3518
3519 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3520
3521 code = optab_handler (mov_optab, mode);
3522 if (code != CODE_FOR_nothing)
3523 return emit_insn (GEN_FCN (code) (x, y));
3524
3525 /* Expand complex moves by moving real part and imag part. */
3526 if (COMPLEX_MODE_P (mode))
3527 return emit_move_complex (mode, x, y);
3528
3529 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3530 || ALL_FIXED_POINT_MODE_P (mode))
3531 {
3532 rtx_insn *result = emit_move_via_integer (mode, x, y, true);
3533
3534 /* If we can't find an integer mode, use multi words. */
3535 if (result)
3536 return result;
3537 else
3538 return emit_move_multi_word (mode, x, y);
3539 }
3540
3541 if (GET_MODE_CLASS (mode) == MODE_CC)
3542 return emit_move_ccmode (mode, x, y);
3543
3544 /* Try using a move pattern for the corresponding integer mode. This is
3545 only safe when simplify_subreg can convert MODE constants into integer
3546 constants. At present, it can only do this reliably if the value
3547 fits within a HOST_WIDE_INT. */
3548 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3549 {
3550 rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3551
3552 if (ret)
3553 {
3554 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3555 return ret;
3556 }
3557 }
3558
3559 return emit_move_multi_word (mode, x, y);
3560 }
3561
3562 /* Generate code to copy Y into X.
3563 Both Y and X must have the same mode, except that
3564 Y can be a constant with VOIDmode.
3565 This mode cannot be BLKmode; use emit_block_move for that.
3566
3567 Return the last instruction emitted. */
3568
3569 rtx_insn *
3570 emit_move_insn (rtx x, rtx y)
3571 {
3572 enum machine_mode mode = GET_MODE (x);
3573 rtx y_cst = NULL_RTX;
3574 rtx_insn *last_insn;
3575 rtx set;
3576
3577 gcc_assert (mode != BLKmode
3578 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3579
3580 if (CONSTANT_P (y))
3581 {
3582 if (optimize
3583 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3584 && (last_insn = compress_float_constant (x, y)))
3585 return last_insn;
3586
3587 y_cst = y;
3588
3589 if (!targetm.legitimate_constant_p (mode, y))
3590 {
3591 y = force_const_mem (mode, y);
3592
3593 /* If the target's cannot_force_const_mem prevented the spill,
3594 assume that the target's move expanders will also take care
3595 of the non-legitimate constant. */
3596 if (!y)
3597 y = y_cst;
3598 else
3599 y = use_anchored_address (y);
3600 }
3601 }
3602
3603 /* If X or Y are memory references, verify that their addresses are valid
3604 for the machine. */
3605 if (MEM_P (x)
3606 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3607 MEM_ADDR_SPACE (x))
3608 && ! push_operand (x, GET_MODE (x))))
3609 x = validize_mem (x);
3610
3611 if (MEM_P (y)
3612 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3613 MEM_ADDR_SPACE (y)))
3614 y = validize_mem (y);
3615
3616 gcc_assert (mode != BLKmode);
3617
3618 last_insn = emit_move_insn_1 (x, y);
3619
3620 if (y_cst && REG_P (x)
3621 && (set = single_set (last_insn)) != NULL_RTX
3622 && SET_DEST (set) == x
3623 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3624 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3625
3626 return last_insn;
3627 }
3628
3629 /* If Y is representable exactly in a narrower mode, and the target can
3630 perform the extension directly from constant or memory, then emit the
3631 move as an extension. */
3632
3633 static rtx_insn *
3634 compress_float_constant (rtx x, rtx y)
3635 {
3636 enum machine_mode dstmode = GET_MODE (x);
3637 enum machine_mode orig_srcmode = GET_MODE (y);
3638 enum machine_mode srcmode;
3639 REAL_VALUE_TYPE r;
3640 int oldcost, newcost;
3641 bool speed = optimize_insn_for_speed_p ();
3642
3643 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3644
3645 if (targetm.legitimate_constant_p (dstmode, y))
3646 oldcost = set_src_cost (y, speed);
3647 else
3648 oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3649
3650 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3651 srcmode != orig_srcmode;
3652 srcmode = GET_MODE_WIDER_MODE (srcmode))
3653 {
3654 enum insn_code ic;
3655 rtx trunc_y;
3656 rtx_insn *last_insn;
3657
3658 /* Skip if the target can't extend this way. */
3659 ic = can_extend_p (dstmode, srcmode, 0);
3660 if (ic == CODE_FOR_nothing)
3661 continue;
3662
3663 /* Skip if the narrowed value isn't exact. */
3664 if (! exact_real_truncate (srcmode, &r))
3665 continue;
3666
3667 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3668
3669 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3670 {
3671 /* Skip if the target needs extra instructions to perform
3672 the extension. */
3673 if (!insn_operand_matches (ic, 1, trunc_y))
3674 continue;
3675 /* This is valid, but may not be cheaper than the original. */
3676 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3677 speed);
3678 if (oldcost < newcost)
3679 continue;
3680 }
3681 else if (float_extend_from_mem[dstmode][srcmode])
3682 {
3683 trunc_y = force_const_mem (srcmode, trunc_y);
3684 /* This is valid, but may not be cheaper than the original. */
3685 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3686 speed);
3687 if (oldcost < newcost)
3688 continue;
3689 trunc_y = validize_mem (trunc_y);
3690 }
3691 else
3692 continue;
3693
3694 /* For CSE's benefit, force the compressed constant pool entry
3695 into a new pseudo. This constant may be used in different modes,
3696 and if not, combine will put things back together for us. */
3697 trunc_y = force_reg (srcmode, trunc_y);
3698
3699 /* If x is a hard register, perform the extension into a pseudo,
3700 so that e.g. stack realignment code is aware of it. */
3701 rtx target = x;
3702 if (REG_P (x) && HARD_REGISTER_P (x))
3703 target = gen_reg_rtx (dstmode);
3704
3705 emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3706 last_insn = get_last_insn ();
3707
3708 if (REG_P (target))
3709 set_unique_reg_note (last_insn, REG_EQUAL, y);
3710
3711 if (target != x)
3712 return emit_move_insn (x, target);
3713 return last_insn;
3714 }
3715
3716 return NULL;
3717 }
3718 \f
3719 /* Pushing data onto the stack. */
3720
3721 /* Push a block of length SIZE (perhaps variable)
3722 and return an rtx to address the beginning of the block.
3723 The value may be virtual_outgoing_args_rtx.
3724
3725 EXTRA is the number of bytes of padding to push in addition to SIZE.
3726 BELOW nonzero means this padding comes at low addresses;
3727 otherwise, the padding comes at high addresses. */
3728
3729 rtx
3730 push_block (rtx size, int extra, int below)
3731 {
3732 rtx temp;
3733
3734 size = convert_modes (Pmode, ptr_mode, size, 1);
3735 if (CONSTANT_P (size))
3736 anti_adjust_stack (plus_constant (Pmode, size, extra));
3737 else if (REG_P (size) && extra == 0)
3738 anti_adjust_stack (size);
3739 else
3740 {
3741 temp = copy_to_mode_reg (Pmode, size);
3742 if (extra != 0)
3743 temp = expand_binop (Pmode, add_optab, temp,
3744 gen_int_mode (extra, Pmode),
3745 temp, 0, OPTAB_LIB_WIDEN);
3746 anti_adjust_stack (temp);
3747 }
3748
3749 #ifndef STACK_GROWS_DOWNWARD
3750 if (0)
3751 #else
3752 if (1)
3753 #endif
3754 {
3755 temp = virtual_outgoing_args_rtx;
3756 if (extra != 0 && below)
3757 temp = plus_constant (Pmode, temp, extra);
3758 }
3759 else
3760 {
3761 if (CONST_INT_P (size))
3762 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3763 -INTVAL (size) - (below ? 0 : extra));
3764 else if (extra != 0 && !below)
3765 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3766 negate_rtx (Pmode, plus_constant (Pmode, size,
3767 extra)));
3768 else
3769 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3770 negate_rtx (Pmode, size));
3771 }
3772
3773 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3774 }
3775
3776 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3777
3778 static rtx
3779 mem_autoinc_base (rtx mem)
3780 {
3781 if (MEM_P (mem))
3782 {
3783 rtx addr = XEXP (mem, 0);
3784 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3785 return XEXP (addr, 0);
3786 }
3787 return NULL;
3788 }
3789
3790 /* A utility routine used here, in reload, and in try_split. The insns
3791 after PREV up to and including LAST are known to adjust the stack,
3792 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3793 placing notes as appropriate. PREV may be NULL, indicating the
3794 entire insn sequence prior to LAST should be scanned.
3795
3796 The set of allowed stack pointer modifications is small:
3797 (1) One or more auto-inc style memory references (aka pushes),
3798 (2) One or more addition/subtraction with the SP as destination,
3799 (3) A single move insn with the SP as destination,
3800 (4) A call_pop insn,
3801 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3802
3803 Insns in the sequence that do not modify the SP are ignored,
3804 except for noreturn calls.
3805
3806 The return value is the amount of adjustment that can be trivially
3807 verified, via immediate operand or auto-inc. If the adjustment
3808 cannot be trivially extracted, the return value is INT_MIN. */
3809
3810 HOST_WIDE_INT
3811 find_args_size_adjust (rtx insn)
3812 {
3813 rtx dest, set, pat;
3814 int i;
3815
3816 pat = PATTERN (insn);
3817 set = NULL;
3818
3819 /* Look for a call_pop pattern. */
3820 if (CALL_P (insn))
3821 {
3822 /* We have to allow non-call_pop patterns for the case
3823 of emit_single_push_insn of a TLS address. */
3824 if (GET_CODE (pat) != PARALLEL)
3825 return 0;
3826
3827 /* All call_pop have a stack pointer adjust in the parallel.
3828 The call itself is always first, and the stack adjust is
3829 usually last, so search from the end. */
3830 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3831 {
3832 set = XVECEXP (pat, 0, i);
3833 if (GET_CODE (set) != SET)
3834 continue;
3835 dest = SET_DEST (set);
3836 if (dest == stack_pointer_rtx)
3837 break;
3838 }
3839 /* We'd better have found the stack pointer adjust. */
3840 if (i == 0)
3841 return 0;
3842 /* Fall through to process the extracted SET and DEST
3843 as if it was a standalone insn. */
3844 }
3845 else if (GET_CODE (pat) == SET)
3846 set = pat;
3847 else if ((set = single_set (insn)) != NULL)
3848 ;
3849 else if (GET_CODE (pat) == PARALLEL)
3850 {
3851 /* ??? Some older ports use a parallel with a stack adjust
3852 and a store for a PUSH_ROUNDING pattern, rather than a
3853 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3854 /* ??? See h8300 and m68k, pushqi1. */
3855 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3856 {
3857 set = XVECEXP (pat, 0, i);
3858 if (GET_CODE (set) != SET)
3859 continue;
3860 dest = SET_DEST (set);
3861 if (dest == stack_pointer_rtx)
3862 break;
3863
3864 /* We do not expect an auto-inc of the sp in the parallel. */
3865 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3866 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3867 != stack_pointer_rtx);
3868 }
3869 if (i < 0)
3870 return 0;
3871 }
3872 else
3873 return 0;
3874
3875 dest = SET_DEST (set);
3876
3877 /* Look for direct modifications of the stack pointer. */
3878 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3879 {
3880 /* Look for a trivial adjustment, otherwise assume nothing. */
3881 /* Note that the SPU restore_stack_block pattern refers to
3882 the stack pointer in V4SImode. Consider that non-trivial. */
3883 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3884 && GET_CODE (SET_SRC (set)) == PLUS
3885 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3886 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3887 return INTVAL (XEXP (SET_SRC (set), 1));
3888 /* ??? Reload can generate no-op moves, which will be cleaned
3889 up later. Recognize it and continue searching. */
3890 else if (rtx_equal_p (dest, SET_SRC (set)))
3891 return 0;
3892 else
3893 return HOST_WIDE_INT_MIN;
3894 }
3895 else
3896 {
3897 rtx mem, addr;
3898
3899 /* Otherwise only think about autoinc patterns. */
3900 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3901 {
3902 mem = dest;
3903 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3904 != stack_pointer_rtx);
3905 }
3906 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3907 mem = SET_SRC (set);
3908 else
3909 return 0;
3910
3911 addr = XEXP (mem, 0);
3912 switch (GET_CODE (addr))
3913 {
3914 case PRE_INC:
3915 case POST_INC:
3916 return GET_MODE_SIZE (GET_MODE (mem));
3917 case PRE_DEC:
3918 case POST_DEC:
3919 return -GET_MODE_SIZE (GET_MODE (mem));
3920 case PRE_MODIFY:
3921 case POST_MODIFY:
3922 addr = XEXP (addr, 1);
3923 gcc_assert (GET_CODE (addr) == PLUS);
3924 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3925 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3926 return INTVAL (XEXP (addr, 1));
3927 default:
3928 gcc_unreachable ();
3929 }
3930 }
3931 }
3932
3933 int
3934 fixup_args_size_notes (rtx prev, rtx last, int end_args_size)
3935 {
3936 int args_size = end_args_size;
3937 bool saw_unknown = false;
3938 rtx insn;
3939
3940 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3941 {
3942 HOST_WIDE_INT this_delta;
3943
3944 if (!NONDEBUG_INSN_P (insn))
3945 continue;
3946
3947 this_delta = find_args_size_adjust (insn);
3948 if (this_delta == 0)
3949 {
3950 if (!CALL_P (insn)
3951 || ACCUMULATE_OUTGOING_ARGS
3952 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3953 continue;
3954 }
3955
3956 gcc_assert (!saw_unknown);
3957 if (this_delta == HOST_WIDE_INT_MIN)
3958 saw_unknown = true;
3959
3960 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3961 #ifdef STACK_GROWS_DOWNWARD
3962 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3963 #endif
3964 args_size -= this_delta;
3965 }
3966
3967 return saw_unknown ? INT_MIN : args_size;
3968 }
3969
3970 #ifdef PUSH_ROUNDING
3971 /* Emit single push insn. */
3972
3973 static void
3974 emit_single_push_insn_1 (enum machine_mode mode, rtx x, tree type)
3975 {
3976 rtx dest_addr;
3977 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3978 rtx dest;
3979 enum insn_code icode;
3980
3981 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3982 /* If there is push pattern, use it. Otherwise try old way of throwing
3983 MEM representing push operation to move expander. */
3984 icode = optab_handler (push_optab, mode);
3985 if (icode != CODE_FOR_nothing)
3986 {
3987 struct expand_operand ops[1];
3988
3989 create_input_operand (&ops[0], x, mode);
3990 if (maybe_expand_insn (icode, 1, ops))
3991 return;
3992 }
3993 if (GET_MODE_SIZE (mode) == rounded_size)
3994 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3995 /* If we are to pad downward, adjust the stack pointer first and
3996 then store X into the stack location using an offset. This is
3997 because emit_move_insn does not know how to pad; it does not have
3998 access to type. */
3999 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
4000 {
4001 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
4002 HOST_WIDE_INT offset;
4003
4004 emit_move_insn (stack_pointer_rtx,
4005 expand_binop (Pmode,
4006 #ifdef STACK_GROWS_DOWNWARD
4007 sub_optab,
4008 #else
4009 add_optab,
4010 #endif
4011 stack_pointer_rtx,
4012 gen_int_mode (rounded_size, Pmode),
4013 NULL_RTX, 0, OPTAB_LIB_WIDEN));
4014
4015 offset = (HOST_WIDE_INT) padding_size;
4016 #ifdef STACK_GROWS_DOWNWARD
4017 if (STACK_PUSH_CODE == POST_DEC)
4018 /* We have already decremented the stack pointer, so get the
4019 previous value. */
4020 offset += (HOST_WIDE_INT) rounded_size;
4021 #else
4022 if (STACK_PUSH_CODE == POST_INC)
4023 /* We have already incremented the stack pointer, so get the
4024 previous value. */
4025 offset -= (HOST_WIDE_INT) rounded_size;
4026 #endif
4027 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4028 gen_int_mode (offset, Pmode));
4029 }
4030 else
4031 {
4032 #ifdef STACK_GROWS_DOWNWARD
4033 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4034 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4035 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4036 Pmode));
4037 #else
4038 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4039 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4040 gen_int_mode (rounded_size, Pmode));
4041 #endif
4042 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4043 }
4044
4045 dest = gen_rtx_MEM (mode, dest_addr);
4046
4047 if (type != 0)
4048 {
4049 set_mem_attributes (dest, type, 1);
4050
4051 if (cfun->tail_call_marked)
4052 /* Function incoming arguments may overlap with sibling call
4053 outgoing arguments and we cannot allow reordering of reads
4054 from function arguments with stores to outgoing arguments
4055 of sibling calls. */
4056 set_mem_alias_set (dest, 0);
4057 }
4058 emit_move_insn (dest, x);
4059 }
4060
4061 /* Emit and annotate a single push insn. */
4062
4063 static void
4064 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
4065 {
4066 int delta, old_delta = stack_pointer_delta;
4067 rtx prev = get_last_insn ();
4068 rtx last;
4069
4070 emit_single_push_insn_1 (mode, x, type);
4071
4072 last = get_last_insn ();
4073
4074 /* Notice the common case where we emitted exactly one insn. */
4075 if (PREV_INSN (last) == prev)
4076 {
4077 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4078 return;
4079 }
4080
4081 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4082 gcc_assert (delta == INT_MIN || delta == old_delta);
4083 }
4084 #endif
4085
4086 /* Generate code to push X onto the stack, assuming it has mode MODE and
4087 type TYPE.
4088 MODE is redundant except when X is a CONST_INT (since they don't
4089 carry mode info).
4090 SIZE is an rtx for the size of data to be copied (in bytes),
4091 needed only if X is BLKmode.
4092
4093 ALIGN (in bits) is maximum alignment we can assume.
4094
4095 If PARTIAL and REG are both nonzero, then copy that many of the first
4096 bytes of X into registers starting with REG, and push the rest of X.
4097 The amount of space pushed is decreased by PARTIAL bytes.
4098 REG must be a hard register in this case.
4099 If REG is zero but PARTIAL is not, take any all others actions for an
4100 argument partially in registers, but do not actually load any
4101 registers.
4102
4103 EXTRA is the amount in bytes of extra space to leave next to this arg.
4104 This is ignored if an argument block has already been allocated.
4105
4106 On a machine that lacks real push insns, ARGS_ADDR is the address of
4107 the bottom of the argument block for this call. We use indexing off there
4108 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4109 argument block has not been preallocated.
4110
4111 ARGS_SO_FAR is the size of args previously pushed for this call.
4112
4113 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4114 for arguments passed in registers. If nonzero, it will be the number
4115 of bytes required. */
4116
4117 void
4118 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
4119 unsigned int align, int partial, rtx reg, int extra,
4120 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4121 rtx alignment_pad)
4122 {
4123 rtx xinner;
4124 enum direction stack_direction
4125 #ifdef STACK_GROWS_DOWNWARD
4126 = downward;
4127 #else
4128 = upward;
4129 #endif
4130
4131 /* Decide where to pad the argument: `downward' for below,
4132 `upward' for above, or `none' for don't pad it.
4133 Default is below for small data on big-endian machines; else above. */
4134 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4135
4136 /* Invert direction if stack is post-decrement.
4137 FIXME: why? */
4138 if (STACK_PUSH_CODE == POST_DEC)
4139 if (where_pad != none)
4140 where_pad = (where_pad == downward ? upward : downward);
4141
4142 xinner = x;
4143
4144 if (mode == BLKmode
4145 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4146 {
4147 /* Copy a block into the stack, entirely or partially. */
4148
4149 rtx temp;
4150 int used;
4151 int offset;
4152 int skip;
4153
4154 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4155 used = partial - offset;
4156
4157 if (mode != BLKmode)
4158 {
4159 /* A value is to be stored in an insufficiently aligned
4160 stack slot; copy via a suitably aligned slot if
4161 necessary. */
4162 size = GEN_INT (GET_MODE_SIZE (mode));
4163 if (!MEM_P (xinner))
4164 {
4165 temp = assign_temp (type, 1, 1);
4166 emit_move_insn (temp, xinner);
4167 xinner = temp;
4168 }
4169 }
4170
4171 gcc_assert (size);
4172
4173 /* USED is now the # of bytes we need not copy to the stack
4174 because registers will take care of them. */
4175
4176 if (partial != 0)
4177 xinner = adjust_address (xinner, BLKmode, used);
4178
4179 /* If the partial register-part of the arg counts in its stack size,
4180 skip the part of stack space corresponding to the registers.
4181 Otherwise, start copying to the beginning of the stack space,
4182 by setting SKIP to 0. */
4183 skip = (reg_parm_stack_space == 0) ? 0 : used;
4184
4185 #ifdef PUSH_ROUNDING
4186 /* Do it with several push insns if that doesn't take lots of insns
4187 and if there is no difficulty with push insns that skip bytes
4188 on the stack for alignment purposes. */
4189 if (args_addr == 0
4190 && PUSH_ARGS
4191 && CONST_INT_P (size)
4192 && skip == 0
4193 && MEM_ALIGN (xinner) >= align
4194 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
4195 /* Here we avoid the case of a structure whose weak alignment
4196 forces many pushes of a small amount of data,
4197 and such small pushes do rounding that causes trouble. */
4198 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4199 || align >= BIGGEST_ALIGNMENT
4200 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4201 == (align / BITS_PER_UNIT)))
4202 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4203 {
4204 /* Push padding now if padding above and stack grows down,
4205 or if padding below and stack grows up.
4206 But if space already allocated, this has already been done. */
4207 if (extra && args_addr == 0
4208 && where_pad != none && where_pad != stack_direction)
4209 anti_adjust_stack (GEN_INT (extra));
4210
4211 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4212 }
4213 else
4214 #endif /* PUSH_ROUNDING */
4215 {
4216 rtx target;
4217
4218 /* Otherwise make space on the stack and copy the data
4219 to the address of that space. */
4220
4221 /* Deduct words put into registers from the size we must copy. */
4222 if (partial != 0)
4223 {
4224 if (CONST_INT_P (size))
4225 size = GEN_INT (INTVAL (size) - used);
4226 else
4227 size = expand_binop (GET_MODE (size), sub_optab, size,
4228 gen_int_mode (used, GET_MODE (size)),
4229 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4230 }
4231
4232 /* Get the address of the stack space.
4233 In this case, we do not deal with EXTRA separately.
4234 A single stack adjust will do. */
4235 if (! args_addr)
4236 {
4237 temp = push_block (size, extra, where_pad == downward);
4238 extra = 0;
4239 }
4240 else if (CONST_INT_P (args_so_far))
4241 temp = memory_address (BLKmode,
4242 plus_constant (Pmode, args_addr,
4243 skip + INTVAL (args_so_far)));
4244 else
4245 temp = memory_address (BLKmode,
4246 plus_constant (Pmode,
4247 gen_rtx_PLUS (Pmode,
4248 args_addr,
4249 args_so_far),
4250 skip));
4251
4252 if (!ACCUMULATE_OUTGOING_ARGS)
4253 {
4254 /* If the source is referenced relative to the stack pointer,
4255 copy it to another register to stabilize it. We do not need
4256 to do this if we know that we won't be changing sp. */
4257
4258 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4259 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4260 temp = copy_to_reg (temp);
4261 }
4262
4263 target = gen_rtx_MEM (BLKmode, temp);
4264
4265 /* We do *not* set_mem_attributes here, because incoming arguments
4266 may overlap with sibling call outgoing arguments and we cannot
4267 allow reordering of reads from function arguments with stores
4268 to outgoing arguments of sibling calls. We do, however, want
4269 to record the alignment of the stack slot. */
4270 /* ALIGN may well be better aligned than TYPE, e.g. due to
4271 PARM_BOUNDARY. Assume the caller isn't lying. */
4272 set_mem_align (target, align);
4273
4274 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4275 }
4276 }
4277 else if (partial > 0)
4278 {
4279 /* Scalar partly in registers. */
4280
4281 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4282 int i;
4283 int not_stack;
4284 /* # bytes of start of argument
4285 that we must make space for but need not store. */
4286 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4287 int args_offset = INTVAL (args_so_far);
4288 int skip;
4289
4290 /* Push padding now if padding above and stack grows down,
4291 or if padding below and stack grows up.
4292 But if space already allocated, this has already been done. */
4293 if (extra && args_addr == 0
4294 && where_pad != none && where_pad != stack_direction)
4295 anti_adjust_stack (GEN_INT (extra));
4296
4297 /* If we make space by pushing it, we might as well push
4298 the real data. Otherwise, we can leave OFFSET nonzero
4299 and leave the space uninitialized. */
4300 if (args_addr == 0)
4301 offset = 0;
4302
4303 /* Now NOT_STACK gets the number of words that we don't need to
4304 allocate on the stack. Convert OFFSET to words too. */
4305 not_stack = (partial - offset) / UNITS_PER_WORD;
4306 offset /= UNITS_PER_WORD;
4307
4308 /* If the partial register-part of the arg counts in its stack size,
4309 skip the part of stack space corresponding to the registers.
4310 Otherwise, start copying to the beginning of the stack space,
4311 by setting SKIP to 0. */
4312 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4313
4314 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4315 x = validize_mem (force_const_mem (mode, x));
4316
4317 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4318 SUBREGs of such registers are not allowed. */
4319 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4320 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4321 x = copy_to_reg (x);
4322
4323 /* Loop over all the words allocated on the stack for this arg. */
4324 /* We can do it by words, because any scalar bigger than a word
4325 has a size a multiple of a word. */
4326 for (i = size - 1; i >= not_stack; i--)
4327 if (i >= not_stack + offset)
4328 emit_push_insn (operand_subword_force (x, i, mode),
4329 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4330 0, args_addr,
4331 GEN_INT (args_offset + ((i - not_stack + skip)
4332 * UNITS_PER_WORD)),
4333 reg_parm_stack_space, alignment_pad);
4334 }
4335 else
4336 {
4337 rtx addr;
4338 rtx dest;
4339
4340 /* Push padding now if padding above and stack grows down,
4341 or if padding below and stack grows up.
4342 But if space already allocated, this has already been done. */
4343 if (extra && args_addr == 0
4344 && where_pad != none && where_pad != stack_direction)
4345 anti_adjust_stack (GEN_INT (extra));
4346
4347 #ifdef PUSH_ROUNDING
4348 if (args_addr == 0 && PUSH_ARGS)
4349 emit_single_push_insn (mode, x, type);
4350 else
4351 #endif
4352 {
4353 if (CONST_INT_P (args_so_far))
4354 addr
4355 = memory_address (mode,
4356 plus_constant (Pmode, args_addr,
4357 INTVAL (args_so_far)));
4358 else
4359 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4360 args_so_far));
4361 dest = gen_rtx_MEM (mode, addr);
4362
4363 /* We do *not* set_mem_attributes here, because incoming arguments
4364 may overlap with sibling call outgoing arguments and we cannot
4365 allow reordering of reads from function arguments with stores
4366 to outgoing arguments of sibling calls. We do, however, want
4367 to record the alignment of the stack slot. */
4368 /* ALIGN may well be better aligned than TYPE, e.g. due to
4369 PARM_BOUNDARY. Assume the caller isn't lying. */
4370 set_mem_align (dest, align);
4371
4372 emit_move_insn (dest, x);
4373 }
4374 }
4375
4376 /* If part should go in registers, copy that part
4377 into the appropriate registers. Do this now, at the end,
4378 since mem-to-mem copies above may do function calls. */
4379 if (partial > 0 && reg != 0)
4380 {
4381 /* Handle calls that pass values in multiple non-contiguous locations.
4382 The Irix 6 ABI has examples of this. */
4383 if (GET_CODE (reg) == PARALLEL)
4384 emit_group_load (reg, x, type, -1);
4385 else
4386 {
4387 gcc_assert (partial % UNITS_PER_WORD == 0);
4388 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4389 }
4390 }
4391
4392 if (extra && args_addr == 0 && where_pad == stack_direction)
4393 anti_adjust_stack (GEN_INT (extra));
4394
4395 if (alignment_pad && args_addr == 0)
4396 anti_adjust_stack (alignment_pad);
4397 }
4398 \f
4399 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4400 operations. */
4401
4402 static rtx
4403 get_subtarget (rtx x)
4404 {
4405 return (optimize
4406 || x == 0
4407 /* Only registers can be subtargets. */
4408 || !REG_P (x)
4409 /* Don't use hard regs to avoid extending their life. */
4410 || REGNO (x) < FIRST_PSEUDO_REGISTER
4411 ? 0 : x);
4412 }
4413
4414 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4415 FIELD is a bitfield. Returns true if the optimization was successful,
4416 and there's nothing else to do. */
4417
4418 static bool
4419 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4420 unsigned HOST_WIDE_INT bitpos,
4421 unsigned HOST_WIDE_INT bitregion_start,
4422 unsigned HOST_WIDE_INT bitregion_end,
4423 enum machine_mode mode1, rtx str_rtx,
4424 tree to, tree src)
4425 {
4426 enum machine_mode str_mode = GET_MODE (str_rtx);
4427 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4428 tree op0, op1;
4429 rtx value, result;
4430 optab binop;
4431 gimple srcstmt;
4432 enum tree_code code;
4433
4434 if (mode1 != VOIDmode
4435 || bitsize >= BITS_PER_WORD
4436 || str_bitsize > BITS_PER_WORD
4437 || TREE_SIDE_EFFECTS (to)
4438 || TREE_THIS_VOLATILE (to))
4439 return false;
4440
4441 STRIP_NOPS (src);
4442 if (TREE_CODE (src) != SSA_NAME)
4443 return false;
4444 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4445 return false;
4446
4447 srcstmt = get_gimple_for_ssa_name (src);
4448 if (!srcstmt
4449 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4450 return false;
4451
4452 code = gimple_assign_rhs_code (srcstmt);
4453
4454 op0 = gimple_assign_rhs1 (srcstmt);
4455
4456 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4457 to find its initialization. Hopefully the initialization will
4458 be from a bitfield load. */
4459 if (TREE_CODE (op0) == SSA_NAME)
4460 {
4461 gimple op0stmt = get_gimple_for_ssa_name (op0);
4462
4463 /* We want to eventually have OP0 be the same as TO, which
4464 should be a bitfield. */
4465 if (!op0stmt
4466 || !is_gimple_assign (op0stmt)
4467 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4468 return false;
4469 op0 = gimple_assign_rhs1 (op0stmt);
4470 }
4471
4472 op1 = gimple_assign_rhs2 (srcstmt);
4473
4474 if (!operand_equal_p (to, op0, 0))
4475 return false;
4476
4477 if (MEM_P (str_rtx))
4478 {
4479 unsigned HOST_WIDE_INT offset1;
4480
4481 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4482 str_mode = word_mode;
4483 str_mode = get_best_mode (bitsize, bitpos,
4484 bitregion_start, bitregion_end,
4485 MEM_ALIGN (str_rtx), str_mode, 0);
4486 if (str_mode == VOIDmode)
4487 return false;
4488 str_bitsize = GET_MODE_BITSIZE (str_mode);
4489
4490 offset1 = bitpos;
4491 bitpos %= str_bitsize;
4492 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4493 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4494 }
4495 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4496 return false;
4497
4498 /* If the bit field covers the whole REG/MEM, store_field
4499 will likely generate better code. */
4500 if (bitsize >= str_bitsize)
4501 return false;
4502
4503 /* We can't handle fields split across multiple entities. */
4504 if (bitpos + bitsize > str_bitsize)
4505 return false;
4506
4507 if (BYTES_BIG_ENDIAN)
4508 bitpos = str_bitsize - bitpos - bitsize;
4509
4510 switch (code)
4511 {
4512 case PLUS_EXPR:
4513 case MINUS_EXPR:
4514 /* For now, just optimize the case of the topmost bitfield
4515 where we don't need to do any masking and also
4516 1 bit bitfields where xor can be used.
4517 We might win by one instruction for the other bitfields
4518 too if insv/extv instructions aren't used, so that
4519 can be added later. */
4520 if (bitpos + bitsize != str_bitsize
4521 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4522 break;
4523
4524 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4525 value = convert_modes (str_mode,
4526 TYPE_MODE (TREE_TYPE (op1)), value,
4527 TYPE_UNSIGNED (TREE_TYPE (op1)));
4528
4529 /* We may be accessing data outside the field, which means
4530 we can alias adjacent data. */
4531 if (MEM_P (str_rtx))
4532 {
4533 str_rtx = shallow_copy_rtx (str_rtx);
4534 set_mem_alias_set (str_rtx, 0);
4535 set_mem_expr (str_rtx, 0);
4536 }
4537
4538 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4539 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4540 {
4541 value = expand_and (str_mode, value, const1_rtx, NULL);
4542 binop = xor_optab;
4543 }
4544 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4545 result = expand_binop (str_mode, binop, str_rtx,
4546 value, str_rtx, 1, OPTAB_WIDEN);
4547 if (result != str_rtx)
4548 emit_move_insn (str_rtx, result);
4549 return true;
4550
4551 case BIT_IOR_EXPR:
4552 case BIT_XOR_EXPR:
4553 if (TREE_CODE (op1) != INTEGER_CST)
4554 break;
4555 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4556 value = convert_modes (str_mode,
4557 TYPE_MODE (TREE_TYPE (op1)), value,
4558 TYPE_UNSIGNED (TREE_TYPE (op1)));
4559
4560 /* We may be accessing data outside the field, which means
4561 we can alias adjacent data. */
4562 if (MEM_P (str_rtx))
4563 {
4564 str_rtx = shallow_copy_rtx (str_rtx);
4565 set_mem_alias_set (str_rtx, 0);
4566 set_mem_expr (str_rtx, 0);
4567 }
4568
4569 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4570 if (bitpos + bitsize != str_bitsize)
4571 {
4572 rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4573 str_mode);
4574 value = expand_and (str_mode, value, mask, NULL_RTX);
4575 }
4576 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4577 result = expand_binop (str_mode, binop, str_rtx,
4578 value, str_rtx, 1, OPTAB_WIDEN);
4579 if (result != str_rtx)
4580 emit_move_insn (str_rtx, result);
4581 return true;
4582
4583 default:
4584 break;
4585 }
4586
4587 return false;
4588 }
4589
4590 /* In the C++ memory model, consecutive bit fields in a structure are
4591 considered one memory location.
4592
4593 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4594 returns the bit range of consecutive bits in which this COMPONENT_REF
4595 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4596 and *OFFSET may be adjusted in the process.
4597
4598 If the access does not need to be restricted, 0 is returned in both
4599 *BITSTART and *BITEND. */
4600
4601 static void
4602 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4603 unsigned HOST_WIDE_INT *bitend,
4604 tree exp,
4605 HOST_WIDE_INT *bitpos,
4606 tree *offset)
4607 {
4608 HOST_WIDE_INT bitoffset;
4609 tree field, repr;
4610
4611 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4612
4613 field = TREE_OPERAND (exp, 1);
4614 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4615 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4616 need to limit the range we can access. */
4617 if (!repr)
4618 {
4619 *bitstart = *bitend = 0;
4620 return;
4621 }
4622
4623 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4624 part of a larger bit field, then the representative does not serve any
4625 useful purpose. This can occur in Ada. */
4626 if (handled_component_p (TREE_OPERAND (exp, 0)))
4627 {
4628 enum machine_mode rmode;
4629 HOST_WIDE_INT rbitsize, rbitpos;
4630 tree roffset;
4631 int unsignedp;
4632 int volatilep = 0;
4633 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4634 &roffset, &rmode, &unsignedp, &volatilep, false);
4635 if ((rbitpos % BITS_PER_UNIT) != 0)
4636 {
4637 *bitstart = *bitend = 0;
4638 return;
4639 }
4640 }
4641
4642 /* Compute the adjustment to bitpos from the offset of the field
4643 relative to the representative. DECL_FIELD_OFFSET of field and
4644 repr are the same by construction if they are not constants,
4645 see finish_bitfield_layout. */
4646 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4647 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4648 bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4649 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4650 else
4651 bitoffset = 0;
4652 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4653 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4654
4655 /* If the adjustment is larger than bitpos, we would have a negative bit
4656 position for the lower bound and this may wreak havoc later. Adjust
4657 offset and bitpos to make the lower bound non-negative in that case. */
4658 if (bitoffset > *bitpos)
4659 {
4660 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4661 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4662
4663 *bitpos += adjust;
4664 if (*offset == NULL_TREE)
4665 *offset = size_int (-adjust / BITS_PER_UNIT);
4666 else
4667 *offset
4668 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4669 *bitstart = 0;
4670 }
4671 else
4672 *bitstart = *bitpos - bitoffset;
4673
4674 *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4675 }
4676
4677 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4678 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4679 DECL_RTL was not set yet, return NORTL. */
4680
4681 static inline bool
4682 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4683 {
4684 if (TREE_CODE (addr) != ADDR_EXPR)
4685 return false;
4686
4687 tree base = TREE_OPERAND (addr, 0);
4688
4689 if (!DECL_P (base)
4690 || TREE_ADDRESSABLE (base)
4691 || DECL_MODE (base) == BLKmode)
4692 return false;
4693
4694 if (!DECL_RTL_SET_P (base))
4695 return nortl;
4696
4697 return (!MEM_P (DECL_RTL (base)));
4698 }
4699
4700 /* Returns true if the MEM_REF REF refers to an object that does not
4701 reside in memory and has non-BLKmode. */
4702
4703 static inline bool
4704 mem_ref_refers_to_non_mem_p (tree ref)
4705 {
4706 tree base = TREE_OPERAND (ref, 0);
4707 return addr_expr_of_non_mem_decl_p_1 (base, false);
4708 }
4709
4710 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4711 is true, try generating a nontemporal store. */
4712
4713 void
4714 expand_assignment (tree to, tree from, bool nontemporal)
4715 {
4716 rtx to_rtx = 0;
4717 rtx result;
4718 enum machine_mode mode;
4719 unsigned int align;
4720 enum insn_code icode;
4721
4722 /* Don't crash if the lhs of the assignment was erroneous. */
4723 if (TREE_CODE (to) == ERROR_MARK)
4724 {
4725 expand_normal (from);
4726 return;
4727 }
4728
4729 /* Optimize away no-op moves without side-effects. */
4730 if (operand_equal_p (to, from, 0))
4731 return;
4732
4733 /* Handle misaligned stores. */
4734 mode = TYPE_MODE (TREE_TYPE (to));
4735 if ((TREE_CODE (to) == MEM_REF
4736 || TREE_CODE (to) == TARGET_MEM_REF)
4737 && mode != BLKmode
4738 && !mem_ref_refers_to_non_mem_p (to)
4739 && ((align = get_object_alignment (to))
4740 < GET_MODE_ALIGNMENT (mode))
4741 && (((icode = optab_handler (movmisalign_optab, mode))
4742 != CODE_FOR_nothing)
4743 || SLOW_UNALIGNED_ACCESS (mode, align)))
4744 {
4745 rtx reg, mem;
4746
4747 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4748 reg = force_not_mem (reg);
4749 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4750
4751 if (icode != CODE_FOR_nothing)
4752 {
4753 struct expand_operand ops[2];
4754
4755 create_fixed_operand (&ops[0], mem);
4756 create_input_operand (&ops[1], reg, mode);
4757 /* The movmisalign<mode> pattern cannot fail, else the assignment
4758 would silently be omitted. */
4759 expand_insn (icode, 2, ops);
4760 }
4761 else
4762 store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg);
4763 return;
4764 }
4765
4766 /* Assignment of a structure component needs special treatment
4767 if the structure component's rtx is not simply a MEM.
4768 Assignment of an array element at a constant index, and assignment of
4769 an array element in an unaligned packed structure field, has the same
4770 problem. Same for (partially) storing into a non-memory object. */
4771 if (handled_component_p (to)
4772 || (TREE_CODE (to) == MEM_REF
4773 && mem_ref_refers_to_non_mem_p (to))
4774 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4775 {
4776 enum machine_mode mode1;
4777 HOST_WIDE_INT bitsize, bitpos;
4778 unsigned HOST_WIDE_INT bitregion_start = 0;
4779 unsigned HOST_WIDE_INT bitregion_end = 0;
4780 tree offset;
4781 int unsignedp;
4782 int volatilep = 0;
4783 tree tem;
4784
4785 push_temp_slots ();
4786 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4787 &unsignedp, &volatilep, true);
4788
4789 /* Make sure bitpos is not negative, it can wreak havoc later. */
4790 if (bitpos < 0)
4791 {
4792 gcc_assert (offset == NULL_TREE);
4793 offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4794 ? 3 : exact_log2 (BITS_PER_UNIT)));
4795 bitpos &= BITS_PER_UNIT - 1;
4796 }
4797
4798 if (TREE_CODE (to) == COMPONENT_REF
4799 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4800 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4801 /* The C++ memory model naturally applies to byte-aligned fields.
4802 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4803 BITSIZE are not byte-aligned, there is no need to limit the range
4804 we can access. This can occur with packed structures in Ada. */
4805 else if (bitsize > 0
4806 && bitsize % BITS_PER_UNIT == 0
4807 && bitpos % BITS_PER_UNIT == 0)
4808 {
4809 bitregion_start = bitpos;
4810 bitregion_end = bitpos + bitsize - 1;
4811 }
4812
4813 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4814
4815 /* If the field has a mode, we want to access it in the
4816 field's mode, not the computed mode.
4817 If a MEM has VOIDmode (external with incomplete type),
4818 use BLKmode for it instead. */
4819 if (MEM_P (to_rtx))
4820 {
4821 if (mode1 != VOIDmode)
4822 to_rtx = adjust_address (to_rtx, mode1, 0);
4823 else if (GET_MODE (to_rtx) == VOIDmode)
4824 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4825 }
4826
4827 if (offset != 0)
4828 {
4829 enum machine_mode address_mode;
4830 rtx offset_rtx;
4831
4832 if (!MEM_P (to_rtx))
4833 {
4834 /* We can get constant negative offsets into arrays with broken
4835 user code. Translate this to a trap instead of ICEing. */
4836 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4837 expand_builtin_trap ();
4838 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4839 }
4840
4841 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4842 address_mode = get_address_mode (to_rtx);
4843 if (GET_MODE (offset_rtx) != address_mode)
4844 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4845
4846 /* If we have an expression in OFFSET_RTX and a non-zero
4847 byte offset in BITPOS, adding the byte offset before the
4848 OFFSET_RTX results in better intermediate code, which makes
4849 later rtl optimization passes perform better.
4850
4851 We prefer intermediate code like this:
4852
4853 r124:DI=r123:DI+0x18
4854 [r124:DI]=r121:DI
4855
4856 ... instead of ...
4857
4858 r124:DI=r123:DI+0x10
4859 [r124:DI+0x8]=r121:DI
4860
4861 This is only done for aligned data values, as these can
4862 be expected to result in single move instructions. */
4863 if (mode1 != VOIDmode
4864 && bitpos != 0
4865 && bitsize > 0
4866 && (bitpos % bitsize) == 0
4867 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4868 && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
4869 {
4870 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4871 bitregion_start = 0;
4872 if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos)
4873 bitregion_end -= bitpos;
4874 bitpos = 0;
4875 }
4876
4877 to_rtx = offset_address (to_rtx, offset_rtx,
4878 highest_pow2_factor_for_target (to,
4879 offset));
4880 }
4881
4882 /* No action is needed if the target is not a memory and the field
4883 lies completely outside that target. This can occur if the source
4884 code contains an out-of-bounds access to a small array. */
4885 if (!MEM_P (to_rtx)
4886 && GET_MODE (to_rtx) != BLKmode
4887 && (unsigned HOST_WIDE_INT) bitpos
4888 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4889 {
4890 expand_normal (from);
4891 result = NULL;
4892 }
4893 /* Handle expand_expr of a complex value returning a CONCAT. */
4894 else if (GET_CODE (to_rtx) == CONCAT)
4895 {
4896 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4897 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4898 && bitpos == 0
4899 && bitsize == mode_bitsize)
4900 result = store_expr (from, to_rtx, false, nontemporal);
4901 else if (bitsize == mode_bitsize / 2
4902 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4903 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4904 nontemporal);
4905 else if (bitpos + bitsize <= mode_bitsize / 2)
4906 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4907 bitregion_start, bitregion_end,
4908 mode1, from,
4909 get_alias_set (to), nontemporal);
4910 else if (bitpos >= mode_bitsize / 2)
4911 result = store_field (XEXP (to_rtx, 1), bitsize,
4912 bitpos - mode_bitsize / 2,
4913 bitregion_start, bitregion_end,
4914 mode1, from,
4915 get_alias_set (to), nontemporal);
4916 else if (bitpos == 0 && bitsize == mode_bitsize)
4917 {
4918 rtx from_rtx;
4919 result = expand_normal (from);
4920 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4921 TYPE_MODE (TREE_TYPE (from)), 0);
4922 emit_move_insn (XEXP (to_rtx, 0),
4923 read_complex_part (from_rtx, false));
4924 emit_move_insn (XEXP (to_rtx, 1),
4925 read_complex_part (from_rtx, true));
4926 }
4927 else
4928 {
4929 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4930 GET_MODE_SIZE (GET_MODE (to_rtx)));
4931 write_complex_part (temp, XEXP (to_rtx, 0), false);
4932 write_complex_part (temp, XEXP (to_rtx, 1), true);
4933 result = store_field (temp, bitsize, bitpos,
4934 bitregion_start, bitregion_end,
4935 mode1, from,
4936 get_alias_set (to), nontemporal);
4937 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4938 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4939 }
4940 }
4941 else
4942 {
4943 if (MEM_P (to_rtx))
4944 {
4945 /* If the field is at offset zero, we could have been given the
4946 DECL_RTX of the parent struct. Don't munge it. */
4947 to_rtx = shallow_copy_rtx (to_rtx);
4948 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4949 if (volatilep)
4950 MEM_VOLATILE_P (to_rtx) = 1;
4951 }
4952
4953 if (optimize_bitfield_assignment_op (bitsize, bitpos,
4954 bitregion_start, bitregion_end,
4955 mode1,
4956 to_rtx, to, from))
4957 result = NULL;
4958 else
4959 result = store_field (to_rtx, bitsize, bitpos,
4960 bitregion_start, bitregion_end,
4961 mode1, from,
4962 get_alias_set (to), nontemporal);
4963 }
4964
4965 if (result)
4966 preserve_temp_slots (result);
4967 pop_temp_slots ();
4968 return;
4969 }
4970
4971 /* If the rhs is a function call and its value is not an aggregate,
4972 call the function before we start to compute the lhs.
4973 This is needed for correct code for cases such as
4974 val = setjmp (buf) on machines where reference to val
4975 requires loading up part of an address in a separate insn.
4976
4977 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4978 since it might be a promoted variable where the zero- or sign- extension
4979 needs to be done. Handling this in the normal way is safe because no
4980 computation is done before the call. The same is true for SSA names. */
4981 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4982 && COMPLETE_TYPE_P (TREE_TYPE (from))
4983 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4984 && ! (((TREE_CODE (to) == VAR_DECL
4985 || TREE_CODE (to) == PARM_DECL
4986 || TREE_CODE (to) == RESULT_DECL)
4987 && REG_P (DECL_RTL (to)))
4988 || TREE_CODE (to) == SSA_NAME))
4989 {
4990 rtx value;
4991
4992 push_temp_slots ();
4993 value = expand_normal (from);
4994 if (to_rtx == 0)
4995 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4996
4997 /* Handle calls that return values in multiple non-contiguous locations.
4998 The Irix 6 ABI has examples of this. */
4999 if (GET_CODE (to_rtx) == PARALLEL)
5000 {
5001 if (GET_CODE (value) == PARALLEL)
5002 emit_group_move (to_rtx, value);
5003 else
5004 emit_group_load (to_rtx, value, TREE_TYPE (from),
5005 int_size_in_bytes (TREE_TYPE (from)));
5006 }
5007 else if (GET_CODE (value) == PARALLEL)
5008 emit_group_store (to_rtx, value, TREE_TYPE (from),
5009 int_size_in_bytes (TREE_TYPE (from)));
5010 else if (GET_MODE (to_rtx) == BLKmode)
5011 {
5012 /* Handle calls that return BLKmode values in registers. */
5013 if (REG_P (value))
5014 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5015 else
5016 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5017 }
5018 else
5019 {
5020 if (POINTER_TYPE_P (TREE_TYPE (to)))
5021 value = convert_memory_address_addr_space
5022 (GET_MODE (to_rtx), value,
5023 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5024
5025 emit_move_insn (to_rtx, value);
5026 }
5027 preserve_temp_slots (to_rtx);
5028 pop_temp_slots ();
5029 return;
5030 }
5031
5032 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5033 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5034
5035 /* Don't move directly into a return register. */
5036 if (TREE_CODE (to) == RESULT_DECL
5037 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5038 {
5039 rtx temp;
5040
5041 push_temp_slots ();
5042
5043 /* If the source is itself a return value, it still is in a pseudo at
5044 this point so we can move it back to the return register directly. */
5045 if (REG_P (to_rtx)
5046 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5047 && TREE_CODE (from) != CALL_EXPR)
5048 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5049 else
5050 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5051
5052 /* Handle calls that return values in multiple non-contiguous locations.
5053 The Irix 6 ABI has examples of this. */
5054 if (GET_CODE (to_rtx) == PARALLEL)
5055 {
5056 if (GET_CODE (temp) == PARALLEL)
5057 emit_group_move (to_rtx, temp);
5058 else
5059 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5060 int_size_in_bytes (TREE_TYPE (from)));
5061 }
5062 else if (temp)
5063 emit_move_insn (to_rtx, temp);
5064
5065 preserve_temp_slots (to_rtx);
5066 pop_temp_slots ();
5067 return;
5068 }
5069
5070 /* In case we are returning the contents of an object which overlaps
5071 the place the value is being stored, use a safe function when copying
5072 a value through a pointer into a structure value return block. */
5073 if (TREE_CODE (to) == RESULT_DECL
5074 && TREE_CODE (from) == INDIRECT_REF
5075 && ADDR_SPACE_GENERIC_P
5076 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5077 && refs_may_alias_p (to, from)
5078 && cfun->returns_struct
5079 && !cfun->returns_pcc_struct)
5080 {
5081 rtx from_rtx, size;
5082
5083 push_temp_slots ();
5084 size = expr_size (from);
5085 from_rtx = expand_normal (from);
5086
5087 emit_library_call (memmove_libfunc, LCT_NORMAL,
5088 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
5089 XEXP (from_rtx, 0), Pmode,
5090 convert_to_mode (TYPE_MODE (sizetype),
5091 size, TYPE_UNSIGNED (sizetype)),
5092 TYPE_MODE (sizetype));
5093
5094 preserve_temp_slots (to_rtx);
5095 pop_temp_slots ();
5096 return;
5097 }
5098
5099 /* Compute FROM and store the value in the rtx we got. */
5100
5101 push_temp_slots ();
5102 result = store_expr (from, to_rtx, 0, nontemporal);
5103 preserve_temp_slots (result);
5104 pop_temp_slots ();
5105 return;
5106 }
5107
5108 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5109 succeeded, false otherwise. */
5110
5111 bool
5112 emit_storent_insn (rtx to, rtx from)
5113 {
5114 struct expand_operand ops[2];
5115 enum machine_mode mode = GET_MODE (to);
5116 enum insn_code code = optab_handler (storent_optab, mode);
5117
5118 if (code == CODE_FOR_nothing)
5119 return false;
5120
5121 create_fixed_operand (&ops[0], to);
5122 create_input_operand (&ops[1], from, mode);
5123 return maybe_expand_insn (code, 2, ops);
5124 }
5125
5126 /* Generate code for computing expression EXP,
5127 and storing the value into TARGET.
5128
5129 If the mode is BLKmode then we may return TARGET itself.
5130 It turns out that in BLKmode it doesn't cause a problem.
5131 because C has no operators that could combine two different
5132 assignments into the same BLKmode object with different values
5133 with no sequence point. Will other languages need this to
5134 be more thorough?
5135
5136 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5137 stack, and block moves may need to be treated specially.
5138
5139 If NONTEMPORAL is true, try using a nontemporal store instruction. */
5140
5141 rtx
5142 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5143 {
5144 rtx temp;
5145 rtx alt_rtl = NULL_RTX;
5146 location_t loc = curr_insn_location ();
5147
5148 if (VOID_TYPE_P (TREE_TYPE (exp)))
5149 {
5150 /* C++ can generate ?: expressions with a throw expression in one
5151 branch and an rvalue in the other. Here, we resolve attempts to
5152 store the throw expression's nonexistent result. */
5153 gcc_assert (!call_param_p);
5154 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5155 return NULL_RTX;
5156 }
5157 if (TREE_CODE (exp) == COMPOUND_EXPR)
5158 {
5159 /* Perform first part of compound expression, then assign from second
5160 part. */
5161 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5162 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5163 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5164 nontemporal);
5165 }
5166 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5167 {
5168 /* For conditional expression, get safe form of the target. Then
5169 test the condition, doing the appropriate assignment on either
5170 side. This avoids the creation of unnecessary temporaries.
5171 For non-BLKmode, it is more efficient not to do this. */
5172
5173 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
5174
5175 do_pending_stack_adjust ();
5176 NO_DEFER_POP;
5177 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5178 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5179 nontemporal);
5180 emit_jump_insn (gen_jump (lab2));
5181 emit_barrier ();
5182 emit_label (lab1);
5183 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
5184 nontemporal);
5185 emit_label (lab2);
5186 OK_DEFER_POP;
5187
5188 return NULL_RTX;
5189 }
5190 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5191 /* If this is a scalar in a register that is stored in a wider mode
5192 than the declared mode, compute the result into its declared mode
5193 and then convert to the wider mode. Our value is the computed
5194 expression. */
5195 {
5196 rtx inner_target = 0;
5197
5198 /* We can do the conversion inside EXP, which will often result
5199 in some optimizations. Do the conversion in two steps: first
5200 change the signedness, if needed, then the extend. But don't
5201 do this if the type of EXP is a subtype of something else
5202 since then the conversion might involve more than just
5203 converting modes. */
5204 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5205 && TREE_TYPE (TREE_TYPE (exp)) == 0
5206 && GET_MODE_PRECISION (GET_MODE (target))
5207 == TYPE_PRECISION (TREE_TYPE (exp)))
5208 {
5209 if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5210 TYPE_UNSIGNED (TREE_TYPE (exp))))
5211 {
5212 /* Some types, e.g. Fortran's logical*4, won't have a signed
5213 version, so use the mode instead. */
5214 tree ntype
5215 = (signed_or_unsigned_type_for
5216 (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
5217 if (ntype == NULL)
5218 ntype = lang_hooks.types.type_for_mode
5219 (TYPE_MODE (TREE_TYPE (exp)),
5220 SUBREG_PROMOTED_SIGN (target));
5221
5222 exp = fold_convert_loc (loc, ntype, exp);
5223 }
5224
5225 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5226 (GET_MODE (SUBREG_REG (target)),
5227 SUBREG_PROMOTED_SIGN (target)),
5228 exp);
5229
5230 inner_target = SUBREG_REG (target);
5231 }
5232
5233 temp = expand_expr (exp, inner_target, VOIDmode,
5234 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5235
5236 /* If TEMP is a VOIDmode constant, use convert_modes to make
5237 sure that we properly convert it. */
5238 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5239 {
5240 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5241 temp, SUBREG_PROMOTED_SIGN (target));
5242 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5243 GET_MODE (target), temp,
5244 SUBREG_PROMOTED_SIGN (target));
5245 }
5246
5247 convert_move (SUBREG_REG (target), temp,
5248 SUBREG_PROMOTED_SIGN (target));
5249
5250 return NULL_RTX;
5251 }
5252 else if ((TREE_CODE (exp) == STRING_CST
5253 || (TREE_CODE (exp) == MEM_REF
5254 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5255 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5256 == STRING_CST
5257 && integer_zerop (TREE_OPERAND (exp, 1))))
5258 && !nontemporal && !call_param_p
5259 && MEM_P (target))
5260 {
5261 /* Optimize initialization of an array with a STRING_CST. */
5262 HOST_WIDE_INT exp_len, str_copy_len;
5263 rtx dest_mem;
5264 tree str = TREE_CODE (exp) == STRING_CST
5265 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5266
5267 exp_len = int_expr_size (exp);
5268 if (exp_len <= 0)
5269 goto normal_expr;
5270
5271 if (TREE_STRING_LENGTH (str) <= 0)
5272 goto normal_expr;
5273
5274 str_copy_len = strlen (TREE_STRING_POINTER (str));
5275 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5276 goto normal_expr;
5277
5278 str_copy_len = TREE_STRING_LENGTH (str);
5279 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5280 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5281 {
5282 str_copy_len += STORE_MAX_PIECES - 1;
5283 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5284 }
5285 str_copy_len = MIN (str_copy_len, exp_len);
5286 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5287 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5288 MEM_ALIGN (target), false))
5289 goto normal_expr;
5290
5291 dest_mem = target;
5292
5293 dest_mem = store_by_pieces (dest_mem,
5294 str_copy_len, builtin_strncpy_read_str,
5295 CONST_CAST (char *,
5296 TREE_STRING_POINTER (str)),
5297 MEM_ALIGN (target), false,
5298 exp_len > str_copy_len ? 1 : 0);
5299 if (exp_len > str_copy_len)
5300 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5301 GEN_INT (exp_len - str_copy_len),
5302 BLOCK_OP_NORMAL);
5303 return NULL_RTX;
5304 }
5305 else
5306 {
5307 rtx tmp_target;
5308
5309 normal_expr:
5310 /* If we want to use a nontemporal store, force the value to
5311 register first. */
5312 tmp_target = nontemporal ? NULL_RTX : target;
5313 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5314 (call_param_p
5315 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5316 &alt_rtl, false);
5317 }
5318
5319 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5320 the same as that of TARGET, adjust the constant. This is needed, for
5321 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5322 only a word-sized value. */
5323 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5324 && TREE_CODE (exp) != ERROR_MARK
5325 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5326 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5327 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5328
5329 /* If value was not generated in the target, store it there.
5330 Convert the value to TARGET's type first if necessary and emit the
5331 pending incrementations that have been queued when expanding EXP.
5332 Note that we cannot emit the whole queue blindly because this will
5333 effectively disable the POST_INC optimization later.
5334
5335 If TEMP and TARGET compare equal according to rtx_equal_p, but
5336 one or both of them are volatile memory refs, we have to distinguish
5337 two cases:
5338 - expand_expr has used TARGET. In this case, we must not generate
5339 another copy. This can be detected by TARGET being equal according
5340 to == .
5341 - expand_expr has not used TARGET - that means that the source just
5342 happens to have the same RTX form. Since temp will have been created
5343 by expand_expr, it will compare unequal according to == .
5344 We must generate a copy in this case, to reach the correct number
5345 of volatile memory references. */
5346
5347 if ((! rtx_equal_p (temp, target)
5348 || (temp != target && (side_effects_p (temp)
5349 || side_effects_p (target))))
5350 && TREE_CODE (exp) != ERROR_MARK
5351 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5352 but TARGET is not valid memory reference, TEMP will differ
5353 from TARGET although it is really the same location. */
5354 && !(alt_rtl
5355 && rtx_equal_p (alt_rtl, target)
5356 && !side_effects_p (alt_rtl)
5357 && !side_effects_p (target))
5358 /* If there's nothing to copy, don't bother. Don't call
5359 expr_size unless necessary, because some front-ends (C++)
5360 expr_size-hook must not be given objects that are not
5361 supposed to be bit-copied or bit-initialized. */
5362 && expr_size (exp) != const0_rtx)
5363 {
5364 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5365 {
5366 if (GET_MODE (target) == BLKmode)
5367 {
5368 /* Handle calls that return BLKmode values in registers. */
5369 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5370 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5371 else
5372 store_bit_field (target,
5373 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5374 0, 0, 0, GET_MODE (temp), temp);
5375 }
5376 else
5377 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5378 }
5379
5380 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5381 {
5382 /* Handle copying a string constant into an array. The string
5383 constant may be shorter than the array. So copy just the string's
5384 actual length, and clear the rest. First get the size of the data
5385 type of the string, which is actually the size of the target. */
5386 rtx size = expr_size (exp);
5387
5388 if (CONST_INT_P (size)
5389 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5390 emit_block_move (target, temp, size,
5391 (call_param_p
5392 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5393 else
5394 {
5395 enum machine_mode pointer_mode
5396 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5397 enum machine_mode address_mode = get_address_mode (target);
5398
5399 /* Compute the size of the data to copy from the string. */
5400 tree copy_size
5401 = size_binop_loc (loc, MIN_EXPR,
5402 make_tree (sizetype, size),
5403 size_int (TREE_STRING_LENGTH (exp)));
5404 rtx copy_size_rtx
5405 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5406 (call_param_p
5407 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5408 rtx label = 0;
5409
5410 /* Copy that much. */
5411 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5412 TYPE_UNSIGNED (sizetype));
5413 emit_block_move (target, temp, copy_size_rtx,
5414 (call_param_p
5415 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5416
5417 /* Figure out how much is left in TARGET that we have to clear.
5418 Do all calculations in pointer_mode. */
5419 if (CONST_INT_P (copy_size_rtx))
5420 {
5421 size = plus_constant (address_mode, size,
5422 -INTVAL (copy_size_rtx));
5423 target = adjust_address (target, BLKmode,
5424 INTVAL (copy_size_rtx));
5425 }
5426 else
5427 {
5428 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5429 copy_size_rtx, NULL_RTX, 0,
5430 OPTAB_LIB_WIDEN);
5431
5432 if (GET_MODE (copy_size_rtx) != address_mode)
5433 copy_size_rtx = convert_to_mode (address_mode,
5434 copy_size_rtx,
5435 TYPE_UNSIGNED (sizetype));
5436
5437 target = offset_address (target, copy_size_rtx,
5438 highest_pow2_factor (copy_size));
5439 label = gen_label_rtx ();
5440 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5441 GET_MODE (size), 0, label);
5442 }
5443
5444 if (size != const0_rtx)
5445 clear_storage (target, size, BLOCK_OP_NORMAL);
5446
5447 if (label)
5448 emit_label (label);
5449 }
5450 }
5451 /* Handle calls that return values in multiple non-contiguous locations.
5452 The Irix 6 ABI has examples of this. */
5453 else if (GET_CODE (target) == PARALLEL)
5454 {
5455 if (GET_CODE (temp) == PARALLEL)
5456 emit_group_move (target, temp);
5457 else
5458 emit_group_load (target, temp, TREE_TYPE (exp),
5459 int_size_in_bytes (TREE_TYPE (exp)));
5460 }
5461 else if (GET_CODE (temp) == PARALLEL)
5462 emit_group_store (target, temp, TREE_TYPE (exp),
5463 int_size_in_bytes (TREE_TYPE (exp)));
5464 else if (GET_MODE (temp) == BLKmode)
5465 emit_block_move (target, temp, expr_size (exp),
5466 (call_param_p
5467 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5468 /* If we emit a nontemporal store, there is nothing else to do. */
5469 else if (nontemporal && emit_storent_insn (target, temp))
5470 ;
5471 else
5472 {
5473 temp = force_operand (temp, target);
5474 if (temp != target)
5475 emit_move_insn (target, temp);
5476 }
5477 }
5478
5479 return NULL_RTX;
5480 }
5481 \f
5482 /* Return true if field F of structure TYPE is a flexible array. */
5483
5484 static bool
5485 flexible_array_member_p (const_tree f, const_tree type)
5486 {
5487 const_tree tf;
5488
5489 tf = TREE_TYPE (f);
5490 return (DECL_CHAIN (f) == NULL
5491 && TREE_CODE (tf) == ARRAY_TYPE
5492 && TYPE_DOMAIN (tf)
5493 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5494 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5495 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5496 && int_size_in_bytes (type) >= 0);
5497 }
5498
5499 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5500 must have in order for it to completely initialize a value of type TYPE.
5501 Return -1 if the number isn't known.
5502
5503 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5504
5505 static HOST_WIDE_INT
5506 count_type_elements (const_tree type, bool for_ctor_p)
5507 {
5508 switch (TREE_CODE (type))
5509 {
5510 case ARRAY_TYPE:
5511 {
5512 tree nelts;
5513
5514 nelts = array_type_nelts (type);
5515 if (nelts && tree_fits_uhwi_p (nelts))
5516 {
5517 unsigned HOST_WIDE_INT n;
5518
5519 n = tree_to_uhwi (nelts) + 1;
5520 if (n == 0 || for_ctor_p)
5521 return n;
5522 else
5523 return n * count_type_elements (TREE_TYPE (type), false);
5524 }
5525 return for_ctor_p ? -1 : 1;
5526 }
5527
5528 case RECORD_TYPE:
5529 {
5530 unsigned HOST_WIDE_INT n;
5531 tree f;
5532
5533 n = 0;
5534 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5535 if (TREE_CODE (f) == FIELD_DECL)
5536 {
5537 if (!for_ctor_p)
5538 n += count_type_elements (TREE_TYPE (f), false);
5539 else if (!flexible_array_member_p (f, type))
5540 /* Don't count flexible arrays, which are not supposed
5541 to be initialized. */
5542 n += 1;
5543 }
5544
5545 return n;
5546 }
5547
5548 case UNION_TYPE:
5549 case QUAL_UNION_TYPE:
5550 {
5551 tree f;
5552 HOST_WIDE_INT n, m;
5553
5554 gcc_assert (!for_ctor_p);
5555 /* Estimate the number of scalars in each field and pick the
5556 maximum. Other estimates would do instead; the idea is simply
5557 to make sure that the estimate is not sensitive to the ordering
5558 of the fields. */
5559 n = 1;
5560 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5561 if (TREE_CODE (f) == FIELD_DECL)
5562 {
5563 m = count_type_elements (TREE_TYPE (f), false);
5564 /* If the field doesn't span the whole union, add an extra
5565 scalar for the rest. */
5566 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5567 TYPE_SIZE (type)) != 1)
5568 m++;
5569 if (n < m)
5570 n = m;
5571 }
5572 return n;
5573 }
5574
5575 case COMPLEX_TYPE:
5576 return 2;
5577
5578 case VECTOR_TYPE:
5579 return TYPE_VECTOR_SUBPARTS (type);
5580
5581 case INTEGER_TYPE:
5582 case REAL_TYPE:
5583 case FIXED_POINT_TYPE:
5584 case ENUMERAL_TYPE:
5585 case BOOLEAN_TYPE:
5586 case POINTER_TYPE:
5587 case OFFSET_TYPE:
5588 case REFERENCE_TYPE:
5589 case NULLPTR_TYPE:
5590 return 1;
5591
5592 case ERROR_MARK:
5593 return 0;
5594
5595 case VOID_TYPE:
5596 case METHOD_TYPE:
5597 case FUNCTION_TYPE:
5598 case LANG_TYPE:
5599 default:
5600 gcc_unreachable ();
5601 }
5602 }
5603
5604 /* Helper for categorize_ctor_elements. Identical interface. */
5605
5606 static bool
5607 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5608 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5609 {
5610 unsigned HOST_WIDE_INT idx;
5611 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5612 tree value, purpose, elt_type;
5613
5614 /* Whether CTOR is a valid constant initializer, in accordance with what
5615 initializer_constant_valid_p does. If inferred from the constructor
5616 elements, true until proven otherwise. */
5617 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5618 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5619
5620 nz_elts = 0;
5621 init_elts = 0;
5622 num_fields = 0;
5623 elt_type = NULL_TREE;
5624
5625 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5626 {
5627 HOST_WIDE_INT mult = 1;
5628
5629 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5630 {
5631 tree lo_index = TREE_OPERAND (purpose, 0);
5632 tree hi_index = TREE_OPERAND (purpose, 1);
5633
5634 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5635 mult = (tree_to_uhwi (hi_index)
5636 - tree_to_uhwi (lo_index) + 1);
5637 }
5638 num_fields += mult;
5639 elt_type = TREE_TYPE (value);
5640
5641 switch (TREE_CODE (value))
5642 {
5643 case CONSTRUCTOR:
5644 {
5645 HOST_WIDE_INT nz = 0, ic = 0;
5646
5647 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5648 p_complete);
5649
5650 nz_elts += mult * nz;
5651 init_elts += mult * ic;
5652
5653 if (const_from_elts_p && const_p)
5654 const_p = const_elt_p;
5655 }
5656 break;
5657
5658 case INTEGER_CST:
5659 case REAL_CST:
5660 case FIXED_CST:
5661 if (!initializer_zerop (value))
5662 nz_elts += mult;
5663 init_elts += mult;
5664 break;
5665
5666 case STRING_CST:
5667 nz_elts += mult * TREE_STRING_LENGTH (value);
5668 init_elts += mult * TREE_STRING_LENGTH (value);
5669 break;
5670
5671 case COMPLEX_CST:
5672 if (!initializer_zerop (TREE_REALPART (value)))
5673 nz_elts += mult;
5674 if (!initializer_zerop (TREE_IMAGPART (value)))
5675 nz_elts += mult;
5676 init_elts += mult;
5677 break;
5678
5679 case VECTOR_CST:
5680 {
5681 unsigned i;
5682 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5683 {
5684 tree v = VECTOR_CST_ELT (value, i);
5685 if (!initializer_zerop (v))
5686 nz_elts += mult;
5687 init_elts += mult;
5688 }
5689 }
5690 break;
5691
5692 default:
5693 {
5694 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5695 nz_elts += mult * tc;
5696 init_elts += mult * tc;
5697
5698 if (const_from_elts_p && const_p)
5699 const_p = initializer_constant_valid_p (value, elt_type)
5700 != NULL_TREE;
5701 }
5702 break;
5703 }
5704 }
5705
5706 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5707 num_fields, elt_type))
5708 *p_complete = false;
5709
5710 *p_nz_elts += nz_elts;
5711 *p_init_elts += init_elts;
5712
5713 return const_p;
5714 }
5715
5716 /* Examine CTOR to discover:
5717 * how many scalar fields are set to nonzero values,
5718 and place it in *P_NZ_ELTS;
5719 * how many scalar fields in total are in CTOR,
5720 and place it in *P_ELT_COUNT.
5721 * whether the constructor is complete -- in the sense that every
5722 meaningful byte is explicitly given a value --
5723 and place it in *P_COMPLETE.
5724
5725 Return whether or not CTOR is a valid static constant initializer, the same
5726 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5727
5728 bool
5729 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5730 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5731 {
5732 *p_nz_elts = 0;
5733 *p_init_elts = 0;
5734 *p_complete = true;
5735
5736 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5737 }
5738
5739 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5740 of which had type LAST_TYPE. Each element was itself a complete
5741 initializer, in the sense that every meaningful byte was explicitly
5742 given a value. Return true if the same is true for the constructor
5743 as a whole. */
5744
5745 bool
5746 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5747 const_tree last_type)
5748 {
5749 if (TREE_CODE (type) == UNION_TYPE
5750 || TREE_CODE (type) == QUAL_UNION_TYPE)
5751 {
5752 if (num_elts == 0)
5753 return false;
5754
5755 gcc_assert (num_elts == 1 && last_type);
5756
5757 /* ??? We could look at each element of the union, and find the
5758 largest element. Which would avoid comparing the size of the
5759 initialized element against any tail padding in the union.
5760 Doesn't seem worth the effort... */
5761 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5762 }
5763
5764 return count_type_elements (type, true) == num_elts;
5765 }
5766
5767 /* Return 1 if EXP contains mostly (3/4) zeros. */
5768
5769 static int
5770 mostly_zeros_p (const_tree exp)
5771 {
5772 if (TREE_CODE (exp) == CONSTRUCTOR)
5773 {
5774 HOST_WIDE_INT nz_elts, init_elts;
5775 bool complete_p;
5776
5777 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5778 return !complete_p || nz_elts < init_elts / 4;
5779 }
5780
5781 return initializer_zerop (exp);
5782 }
5783
5784 /* Return 1 if EXP contains all zeros. */
5785
5786 static int
5787 all_zeros_p (const_tree exp)
5788 {
5789 if (TREE_CODE (exp) == CONSTRUCTOR)
5790 {
5791 HOST_WIDE_INT nz_elts, init_elts;
5792 bool complete_p;
5793
5794 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5795 return nz_elts == 0;
5796 }
5797
5798 return initializer_zerop (exp);
5799 }
5800 \f
5801 /* Helper function for store_constructor.
5802 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5803 CLEARED is as for store_constructor.
5804 ALIAS_SET is the alias set to use for any stores.
5805
5806 This provides a recursive shortcut back to store_constructor when it isn't
5807 necessary to go through store_field. This is so that we can pass through
5808 the cleared field to let store_constructor know that we may not have to
5809 clear a substructure if the outer structure has already been cleared. */
5810
5811 static void
5812 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5813 HOST_WIDE_INT bitpos, enum machine_mode mode,
5814 tree exp, int cleared, alias_set_type alias_set)
5815 {
5816 if (TREE_CODE (exp) == CONSTRUCTOR
5817 /* We can only call store_constructor recursively if the size and
5818 bit position are on a byte boundary. */
5819 && bitpos % BITS_PER_UNIT == 0
5820 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5821 /* If we have a nonzero bitpos for a register target, then we just
5822 let store_field do the bitfield handling. This is unlikely to
5823 generate unnecessary clear instructions anyways. */
5824 && (bitpos == 0 || MEM_P (target)))
5825 {
5826 if (MEM_P (target))
5827 target
5828 = adjust_address (target,
5829 GET_MODE (target) == BLKmode
5830 || 0 != (bitpos
5831 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5832 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5833
5834
5835 /* Update the alias set, if required. */
5836 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5837 && MEM_ALIAS_SET (target) != 0)
5838 {
5839 target = copy_rtx (target);
5840 set_mem_alias_set (target, alias_set);
5841 }
5842
5843 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5844 }
5845 else
5846 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false);
5847 }
5848
5849
5850 /* Returns the number of FIELD_DECLs in TYPE. */
5851
5852 static int
5853 fields_length (const_tree type)
5854 {
5855 tree t = TYPE_FIELDS (type);
5856 int count = 0;
5857
5858 for (; t; t = DECL_CHAIN (t))
5859 if (TREE_CODE (t) == FIELD_DECL)
5860 ++count;
5861
5862 return count;
5863 }
5864
5865
5866 /* Store the value of constructor EXP into the rtx TARGET.
5867 TARGET is either a REG or a MEM; we know it cannot conflict, since
5868 safe_from_p has been called.
5869 CLEARED is true if TARGET is known to have been zero'd.
5870 SIZE is the number of bytes of TARGET we are allowed to modify: this
5871 may not be the same as the size of EXP if we are assigning to a field
5872 which has been packed to exclude padding bits. */
5873
5874 static void
5875 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5876 {
5877 tree type = TREE_TYPE (exp);
5878 #ifdef WORD_REGISTER_OPERATIONS
5879 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5880 #endif
5881
5882 switch (TREE_CODE (type))
5883 {
5884 case RECORD_TYPE:
5885 case UNION_TYPE:
5886 case QUAL_UNION_TYPE:
5887 {
5888 unsigned HOST_WIDE_INT idx;
5889 tree field, value;
5890
5891 /* If size is zero or the target is already cleared, do nothing. */
5892 if (size == 0 || cleared)
5893 cleared = 1;
5894 /* We either clear the aggregate or indicate the value is dead. */
5895 else if ((TREE_CODE (type) == UNION_TYPE
5896 || TREE_CODE (type) == QUAL_UNION_TYPE)
5897 && ! CONSTRUCTOR_ELTS (exp))
5898 /* If the constructor is empty, clear the union. */
5899 {
5900 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5901 cleared = 1;
5902 }
5903
5904 /* If we are building a static constructor into a register,
5905 set the initial value as zero so we can fold the value into
5906 a constant. But if more than one register is involved,
5907 this probably loses. */
5908 else if (REG_P (target) && TREE_STATIC (exp)
5909 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5910 {
5911 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5912 cleared = 1;
5913 }
5914
5915 /* If the constructor has fewer fields than the structure or
5916 if we are initializing the structure to mostly zeros, clear
5917 the whole structure first. Don't do this if TARGET is a
5918 register whose mode size isn't equal to SIZE since
5919 clear_storage can't handle this case. */
5920 else if (size > 0
5921 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
5922 != fields_length (type))
5923 || mostly_zeros_p (exp))
5924 && (!REG_P (target)
5925 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5926 == size)))
5927 {
5928 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5929 cleared = 1;
5930 }
5931
5932 if (REG_P (target) && !cleared)
5933 emit_clobber (target);
5934
5935 /* Store each element of the constructor into the
5936 corresponding field of TARGET. */
5937 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5938 {
5939 enum machine_mode mode;
5940 HOST_WIDE_INT bitsize;
5941 HOST_WIDE_INT bitpos = 0;
5942 tree offset;
5943 rtx to_rtx = target;
5944
5945 /* Just ignore missing fields. We cleared the whole
5946 structure, above, if any fields are missing. */
5947 if (field == 0)
5948 continue;
5949
5950 if (cleared && initializer_zerop (value))
5951 continue;
5952
5953 if (tree_fits_uhwi_p (DECL_SIZE (field)))
5954 bitsize = tree_to_uhwi (DECL_SIZE (field));
5955 else
5956 bitsize = -1;
5957
5958 mode = DECL_MODE (field);
5959 if (DECL_BIT_FIELD (field))
5960 mode = VOIDmode;
5961
5962 offset = DECL_FIELD_OFFSET (field);
5963 if (tree_fits_shwi_p (offset)
5964 && tree_fits_shwi_p (bit_position (field)))
5965 {
5966 bitpos = int_bit_position (field);
5967 offset = 0;
5968 }
5969 else
5970 bitpos = tree_to_shwi (DECL_FIELD_BIT_OFFSET (field));
5971
5972 if (offset)
5973 {
5974 enum machine_mode address_mode;
5975 rtx offset_rtx;
5976
5977 offset
5978 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5979 make_tree (TREE_TYPE (exp),
5980 target));
5981
5982 offset_rtx = expand_normal (offset);
5983 gcc_assert (MEM_P (to_rtx));
5984
5985 address_mode = get_address_mode (to_rtx);
5986 if (GET_MODE (offset_rtx) != address_mode)
5987 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5988
5989 to_rtx = offset_address (to_rtx, offset_rtx,
5990 highest_pow2_factor (offset));
5991 }
5992
5993 #ifdef WORD_REGISTER_OPERATIONS
5994 /* If this initializes a field that is smaller than a
5995 word, at the start of a word, try to widen it to a full
5996 word. This special case allows us to output C++ member
5997 function initializations in a form that the optimizers
5998 can understand. */
5999 if (REG_P (target)
6000 && bitsize < BITS_PER_WORD
6001 && bitpos % BITS_PER_WORD == 0
6002 && GET_MODE_CLASS (mode) == MODE_INT
6003 && TREE_CODE (value) == INTEGER_CST
6004 && exp_size >= 0
6005 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6006 {
6007 tree type = TREE_TYPE (value);
6008
6009 if (TYPE_PRECISION (type) < BITS_PER_WORD)
6010 {
6011 type = lang_hooks.types.type_for_mode
6012 (word_mode, TYPE_UNSIGNED (type));
6013 value = fold_convert (type, value);
6014 }
6015
6016 if (BYTES_BIG_ENDIAN)
6017 value
6018 = fold_build2 (LSHIFT_EXPR, type, value,
6019 build_int_cst (type,
6020 BITS_PER_WORD - bitsize));
6021 bitsize = BITS_PER_WORD;
6022 mode = word_mode;
6023 }
6024 #endif
6025
6026 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6027 && DECL_NONADDRESSABLE_P (field))
6028 {
6029 to_rtx = copy_rtx (to_rtx);
6030 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6031 }
6032
6033 store_constructor_field (to_rtx, bitsize, bitpos, mode,
6034 value, cleared,
6035 get_alias_set (TREE_TYPE (field)));
6036 }
6037 break;
6038 }
6039 case ARRAY_TYPE:
6040 {
6041 tree value, index;
6042 unsigned HOST_WIDE_INT i;
6043 int need_to_clear;
6044 tree domain;
6045 tree elttype = TREE_TYPE (type);
6046 int const_bounds_p;
6047 HOST_WIDE_INT minelt = 0;
6048 HOST_WIDE_INT maxelt = 0;
6049
6050 domain = TYPE_DOMAIN (type);
6051 const_bounds_p = (TYPE_MIN_VALUE (domain)
6052 && TYPE_MAX_VALUE (domain)
6053 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6054 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6055
6056 /* If we have constant bounds for the range of the type, get them. */
6057 if (const_bounds_p)
6058 {
6059 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6060 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6061 }
6062
6063 /* If the constructor has fewer elements than the array, clear
6064 the whole array first. Similarly if this is static
6065 constructor of a non-BLKmode object. */
6066 if (cleared)
6067 need_to_clear = 0;
6068 else if (REG_P (target) && TREE_STATIC (exp))
6069 need_to_clear = 1;
6070 else
6071 {
6072 unsigned HOST_WIDE_INT idx;
6073 tree index, value;
6074 HOST_WIDE_INT count = 0, zero_count = 0;
6075 need_to_clear = ! const_bounds_p;
6076
6077 /* This loop is a more accurate version of the loop in
6078 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6079 is also needed to check for missing elements. */
6080 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6081 {
6082 HOST_WIDE_INT this_node_count;
6083
6084 if (need_to_clear)
6085 break;
6086
6087 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6088 {
6089 tree lo_index = TREE_OPERAND (index, 0);
6090 tree hi_index = TREE_OPERAND (index, 1);
6091
6092 if (! tree_fits_uhwi_p (lo_index)
6093 || ! tree_fits_uhwi_p (hi_index))
6094 {
6095 need_to_clear = 1;
6096 break;
6097 }
6098
6099 this_node_count = (tree_to_uhwi (hi_index)
6100 - tree_to_uhwi (lo_index) + 1);
6101 }
6102 else
6103 this_node_count = 1;
6104
6105 count += this_node_count;
6106 if (mostly_zeros_p (value))
6107 zero_count += this_node_count;
6108 }
6109
6110 /* Clear the entire array first if there are any missing
6111 elements, or if the incidence of zero elements is >=
6112 75%. */
6113 if (! need_to_clear
6114 && (count < maxelt - minelt + 1
6115 || 4 * zero_count >= 3 * count))
6116 need_to_clear = 1;
6117 }
6118
6119 if (need_to_clear && size > 0)
6120 {
6121 if (REG_P (target))
6122 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6123 else
6124 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6125 cleared = 1;
6126 }
6127
6128 if (!cleared && REG_P (target))
6129 /* Inform later passes that the old value is dead. */
6130 emit_clobber (target);
6131
6132 /* Store each element of the constructor into the
6133 corresponding element of TARGET, determined by counting the
6134 elements. */
6135 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6136 {
6137 enum machine_mode mode;
6138 HOST_WIDE_INT bitsize;
6139 HOST_WIDE_INT bitpos;
6140 rtx xtarget = target;
6141
6142 if (cleared && initializer_zerop (value))
6143 continue;
6144
6145 mode = TYPE_MODE (elttype);
6146 if (mode == BLKmode)
6147 bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6148 ? tree_to_uhwi (TYPE_SIZE (elttype))
6149 : -1);
6150 else
6151 bitsize = GET_MODE_BITSIZE (mode);
6152
6153 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6154 {
6155 tree lo_index = TREE_OPERAND (index, 0);
6156 tree hi_index = TREE_OPERAND (index, 1);
6157 rtx index_r, pos_rtx;
6158 HOST_WIDE_INT lo, hi, count;
6159 tree position;
6160
6161 /* If the range is constant and "small", unroll the loop. */
6162 if (const_bounds_p
6163 && tree_fits_shwi_p (lo_index)
6164 && tree_fits_shwi_p (hi_index)
6165 && (lo = tree_to_shwi (lo_index),
6166 hi = tree_to_shwi (hi_index),
6167 count = hi - lo + 1,
6168 (!MEM_P (target)
6169 || count <= 2
6170 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6171 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6172 <= 40 * 8)))))
6173 {
6174 lo -= minelt; hi -= minelt;
6175 for (; lo <= hi; lo++)
6176 {
6177 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6178
6179 if (MEM_P (target)
6180 && !MEM_KEEP_ALIAS_SET_P (target)
6181 && TREE_CODE (type) == ARRAY_TYPE
6182 && TYPE_NONALIASED_COMPONENT (type))
6183 {
6184 target = copy_rtx (target);
6185 MEM_KEEP_ALIAS_SET_P (target) = 1;
6186 }
6187
6188 store_constructor_field
6189 (target, bitsize, bitpos, mode, value, cleared,
6190 get_alias_set (elttype));
6191 }
6192 }
6193 else
6194 {
6195 rtx loop_start = gen_label_rtx ();
6196 rtx loop_end = gen_label_rtx ();
6197 tree exit_cond;
6198
6199 expand_normal (hi_index);
6200
6201 index = build_decl (EXPR_LOCATION (exp),
6202 VAR_DECL, NULL_TREE, domain);
6203 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6204 SET_DECL_RTL (index, index_r);
6205 store_expr (lo_index, index_r, 0, false);
6206
6207 /* Build the head of the loop. */
6208 do_pending_stack_adjust ();
6209 emit_label (loop_start);
6210
6211 /* Assign value to element index. */
6212 position =
6213 fold_convert (ssizetype,
6214 fold_build2 (MINUS_EXPR,
6215 TREE_TYPE (index),
6216 index,
6217 TYPE_MIN_VALUE (domain)));
6218
6219 position =
6220 size_binop (MULT_EXPR, position,
6221 fold_convert (ssizetype,
6222 TYPE_SIZE_UNIT (elttype)));
6223
6224 pos_rtx = expand_normal (position);
6225 xtarget = offset_address (target, pos_rtx,
6226 highest_pow2_factor (position));
6227 xtarget = adjust_address (xtarget, mode, 0);
6228 if (TREE_CODE (value) == CONSTRUCTOR)
6229 store_constructor (value, xtarget, cleared,
6230 bitsize / BITS_PER_UNIT);
6231 else
6232 store_expr (value, xtarget, 0, false);
6233
6234 /* Generate a conditional jump to exit the loop. */
6235 exit_cond = build2 (LT_EXPR, integer_type_node,
6236 index, hi_index);
6237 jumpif (exit_cond, loop_end, -1);
6238
6239 /* Update the loop counter, and jump to the head of
6240 the loop. */
6241 expand_assignment (index,
6242 build2 (PLUS_EXPR, TREE_TYPE (index),
6243 index, integer_one_node),
6244 false);
6245
6246 emit_jump (loop_start);
6247
6248 /* Build the end of the loop. */
6249 emit_label (loop_end);
6250 }
6251 }
6252 else if ((index != 0 && ! tree_fits_shwi_p (index))
6253 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6254 {
6255 tree position;
6256
6257 if (index == 0)
6258 index = ssize_int (1);
6259
6260 if (minelt)
6261 index = fold_convert (ssizetype,
6262 fold_build2 (MINUS_EXPR,
6263 TREE_TYPE (index),
6264 index,
6265 TYPE_MIN_VALUE (domain)));
6266
6267 position =
6268 size_binop (MULT_EXPR, index,
6269 fold_convert (ssizetype,
6270 TYPE_SIZE_UNIT (elttype)));
6271 xtarget = offset_address (target,
6272 expand_normal (position),
6273 highest_pow2_factor (position));
6274 xtarget = adjust_address (xtarget, mode, 0);
6275 store_expr (value, xtarget, 0, false);
6276 }
6277 else
6278 {
6279 if (index != 0)
6280 bitpos = ((tree_to_shwi (index) - minelt)
6281 * tree_to_uhwi (TYPE_SIZE (elttype)));
6282 else
6283 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6284
6285 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6286 && TREE_CODE (type) == ARRAY_TYPE
6287 && TYPE_NONALIASED_COMPONENT (type))
6288 {
6289 target = copy_rtx (target);
6290 MEM_KEEP_ALIAS_SET_P (target) = 1;
6291 }
6292 store_constructor_field (target, bitsize, bitpos, mode, value,
6293 cleared, get_alias_set (elttype));
6294 }
6295 }
6296 break;
6297 }
6298
6299 case VECTOR_TYPE:
6300 {
6301 unsigned HOST_WIDE_INT idx;
6302 constructor_elt *ce;
6303 int i;
6304 int need_to_clear;
6305 int icode = CODE_FOR_nothing;
6306 tree elttype = TREE_TYPE (type);
6307 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6308 enum machine_mode eltmode = TYPE_MODE (elttype);
6309 HOST_WIDE_INT bitsize;
6310 HOST_WIDE_INT bitpos;
6311 rtvec vector = NULL;
6312 unsigned n_elts;
6313 alias_set_type alias;
6314
6315 gcc_assert (eltmode != BLKmode);
6316
6317 n_elts = TYPE_VECTOR_SUBPARTS (type);
6318 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6319 {
6320 enum machine_mode mode = GET_MODE (target);
6321
6322 icode = (int) optab_handler (vec_init_optab, mode);
6323 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6324 if (icode != CODE_FOR_nothing)
6325 {
6326 tree value;
6327
6328 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6329 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
6330 {
6331 icode = CODE_FOR_nothing;
6332 break;
6333 }
6334 }
6335 if (icode != CODE_FOR_nothing)
6336 {
6337 unsigned int i;
6338
6339 vector = rtvec_alloc (n_elts);
6340 for (i = 0; i < n_elts; i++)
6341 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6342 }
6343 }
6344
6345 /* If the constructor has fewer elements than the vector,
6346 clear the whole array first. Similarly if this is static
6347 constructor of a non-BLKmode object. */
6348 if (cleared)
6349 need_to_clear = 0;
6350 else if (REG_P (target) && TREE_STATIC (exp))
6351 need_to_clear = 1;
6352 else
6353 {
6354 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6355 tree value;
6356
6357 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6358 {
6359 int n_elts_here = tree_to_uhwi
6360 (int_const_binop (TRUNC_DIV_EXPR,
6361 TYPE_SIZE (TREE_TYPE (value)),
6362 TYPE_SIZE (elttype)));
6363
6364 count += n_elts_here;
6365 if (mostly_zeros_p (value))
6366 zero_count += n_elts_here;
6367 }
6368
6369 /* Clear the entire vector first if there are any missing elements,
6370 or if the incidence of zero elements is >= 75%. */
6371 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6372 }
6373
6374 if (need_to_clear && size > 0 && !vector)
6375 {
6376 if (REG_P (target))
6377 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6378 else
6379 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6380 cleared = 1;
6381 }
6382
6383 /* Inform later passes that the old value is dead. */
6384 if (!cleared && !vector && REG_P (target))
6385 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6386
6387 if (MEM_P (target))
6388 alias = MEM_ALIAS_SET (target);
6389 else
6390 alias = get_alias_set (elttype);
6391
6392 /* Store each element of the constructor into the corresponding
6393 element of TARGET, determined by counting the elements. */
6394 for (idx = 0, i = 0;
6395 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6396 idx++, i += bitsize / elt_size)
6397 {
6398 HOST_WIDE_INT eltpos;
6399 tree value = ce->value;
6400
6401 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6402 if (cleared && initializer_zerop (value))
6403 continue;
6404
6405 if (ce->index)
6406 eltpos = tree_to_uhwi (ce->index);
6407 else
6408 eltpos = i;
6409
6410 if (vector)
6411 {
6412 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6413 elements. */
6414 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6415 RTVEC_ELT (vector, eltpos)
6416 = expand_normal (value);
6417 }
6418 else
6419 {
6420 enum machine_mode value_mode =
6421 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6422 ? TYPE_MODE (TREE_TYPE (value))
6423 : eltmode;
6424 bitpos = eltpos * elt_size;
6425 store_constructor_field (target, bitsize, bitpos, value_mode,
6426 value, cleared, alias);
6427 }
6428 }
6429
6430 if (vector)
6431 emit_insn (GEN_FCN (icode)
6432 (target,
6433 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6434 break;
6435 }
6436
6437 default:
6438 gcc_unreachable ();
6439 }
6440 }
6441
6442 /* Store the value of EXP (an expression tree)
6443 into a subfield of TARGET which has mode MODE and occupies
6444 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6445 If MODE is VOIDmode, it means that we are storing into a bit-field.
6446
6447 BITREGION_START is bitpos of the first bitfield in this region.
6448 BITREGION_END is the bitpos of the ending bitfield in this region.
6449 These two fields are 0, if the C++ memory model does not apply,
6450 or we are not interested in keeping track of bitfield regions.
6451
6452 Always return const0_rtx unless we have something particular to
6453 return.
6454
6455 ALIAS_SET is the alias set for the destination. This value will
6456 (in general) be different from that for TARGET, since TARGET is a
6457 reference to the containing structure.
6458
6459 If NONTEMPORAL is true, try generating a nontemporal store. */
6460
6461 static rtx
6462 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6463 unsigned HOST_WIDE_INT bitregion_start,
6464 unsigned HOST_WIDE_INT bitregion_end,
6465 enum machine_mode mode, tree exp,
6466 alias_set_type alias_set, bool nontemporal)
6467 {
6468 if (TREE_CODE (exp) == ERROR_MARK)
6469 return const0_rtx;
6470
6471 /* If we have nothing to store, do nothing unless the expression has
6472 side-effects. */
6473 if (bitsize == 0)
6474 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6475
6476 if (GET_CODE (target) == CONCAT)
6477 {
6478 /* We're storing into a struct containing a single __complex. */
6479
6480 gcc_assert (!bitpos);
6481 return store_expr (exp, target, 0, nontemporal);
6482 }
6483
6484 /* If the structure is in a register or if the component
6485 is a bit field, we cannot use addressing to access it.
6486 Use bit-field techniques or SUBREG to store in it. */
6487
6488 if (mode == VOIDmode
6489 || (mode != BLKmode && ! direct_store[(int) mode]
6490 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6491 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6492 || REG_P (target)
6493 || GET_CODE (target) == SUBREG
6494 /* If the field isn't aligned enough to store as an ordinary memref,
6495 store it as a bit field. */
6496 || (mode != BLKmode
6497 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6498 || bitpos % GET_MODE_ALIGNMENT (mode))
6499 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6500 || (bitpos % BITS_PER_UNIT != 0)))
6501 || (bitsize >= 0 && mode != BLKmode
6502 && GET_MODE_BITSIZE (mode) > bitsize)
6503 /* If the RHS and field are a constant size and the size of the
6504 RHS isn't the same size as the bitfield, we must use bitfield
6505 operations. */
6506 || (bitsize >= 0
6507 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6508 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6509 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6510 decl we must use bitfield operations. */
6511 || (bitsize >= 0
6512 && TREE_CODE (exp) == MEM_REF
6513 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6514 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6515 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6516 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6517 {
6518 rtx temp;
6519 gimple nop_def;
6520
6521 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6522 implies a mask operation. If the precision is the same size as
6523 the field we're storing into, that mask is redundant. This is
6524 particularly common with bit field assignments generated by the
6525 C front end. */
6526 nop_def = get_def_for_expr (exp, NOP_EXPR);
6527 if (nop_def)
6528 {
6529 tree type = TREE_TYPE (exp);
6530 if (INTEGRAL_TYPE_P (type)
6531 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6532 && bitsize == TYPE_PRECISION (type))
6533 {
6534 tree op = gimple_assign_rhs1 (nop_def);
6535 type = TREE_TYPE (op);
6536 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6537 exp = op;
6538 }
6539 }
6540
6541 temp = expand_normal (exp);
6542
6543 /* If BITSIZE is narrower than the size of the type of EXP
6544 we will be narrowing TEMP. Normally, what's wanted are the
6545 low-order bits. However, if EXP's type is a record and this is
6546 big-endian machine, we want the upper BITSIZE bits. */
6547 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6548 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6549 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6550 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6551 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6552 NULL_RTX, 1);
6553
6554 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6555 if (mode != VOIDmode && mode != BLKmode
6556 && mode != TYPE_MODE (TREE_TYPE (exp)))
6557 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6558
6559 /* If the modes of TEMP and TARGET are both BLKmode, both
6560 must be in memory and BITPOS must be aligned on a byte
6561 boundary. If so, we simply do a block copy. Likewise
6562 for a BLKmode-like TARGET. */
6563 if (GET_MODE (temp) == BLKmode
6564 && (GET_MODE (target) == BLKmode
6565 || (MEM_P (target)
6566 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6567 && (bitpos % BITS_PER_UNIT) == 0
6568 && (bitsize % BITS_PER_UNIT) == 0)))
6569 {
6570 gcc_assert (MEM_P (target) && MEM_P (temp)
6571 && (bitpos % BITS_PER_UNIT) == 0);
6572
6573 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6574 emit_block_move (target, temp,
6575 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6576 / BITS_PER_UNIT),
6577 BLOCK_OP_NORMAL);
6578
6579 return const0_rtx;
6580 }
6581
6582 /* Handle calls that return values in multiple non-contiguous locations.
6583 The Irix 6 ABI has examples of this. */
6584 if (GET_CODE (temp) == PARALLEL)
6585 {
6586 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6587 rtx temp_target;
6588 if (mode == BLKmode || mode == VOIDmode)
6589 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6590 temp_target = gen_reg_rtx (mode);
6591 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6592 temp = temp_target;
6593 }
6594 else if (mode == BLKmode)
6595 {
6596 /* Handle calls that return BLKmode values in registers. */
6597 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6598 {
6599 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6600 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6601 temp = temp_target;
6602 }
6603 else
6604 {
6605 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6606 rtx temp_target;
6607 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6608 temp_target = gen_reg_rtx (mode);
6609 temp_target
6610 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6611 temp_target, mode, mode);
6612 temp = temp_target;
6613 }
6614 }
6615
6616 /* Store the value in the bitfield. */
6617 store_bit_field (target, bitsize, bitpos,
6618 bitregion_start, bitregion_end,
6619 mode, temp);
6620
6621 return const0_rtx;
6622 }
6623 else
6624 {
6625 /* Now build a reference to just the desired component. */
6626 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6627
6628 if (to_rtx == target)
6629 to_rtx = copy_rtx (to_rtx);
6630
6631 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6632 set_mem_alias_set (to_rtx, alias_set);
6633
6634 return store_expr (exp, to_rtx, 0, nontemporal);
6635 }
6636 }
6637 \f
6638 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6639 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6640 codes and find the ultimate containing object, which we return.
6641
6642 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6643 bit position, and *PUNSIGNEDP to the signedness of the field.
6644 If the position of the field is variable, we store a tree
6645 giving the variable offset (in units) in *POFFSET.
6646 This offset is in addition to the bit position.
6647 If the position is not variable, we store 0 in *POFFSET.
6648
6649 If any of the extraction expressions is volatile,
6650 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6651
6652 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6653 Otherwise, it is a mode that can be used to access the field.
6654
6655 If the field describes a variable-sized object, *PMODE is set to
6656 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6657 this case, but the address of the object can be found.
6658
6659 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6660 look through nodes that serve as markers of a greater alignment than
6661 the one that can be deduced from the expression. These nodes make it
6662 possible for front-ends to prevent temporaries from being created by
6663 the middle-end on alignment considerations. For that purpose, the
6664 normal operating mode at high-level is to always pass FALSE so that
6665 the ultimate containing object is really returned; moreover, the
6666 associated predicate handled_component_p will always return TRUE
6667 on these nodes, thus indicating that they are essentially handled
6668 by get_inner_reference. TRUE should only be passed when the caller
6669 is scanning the expression in order to build another representation
6670 and specifically knows how to handle these nodes; as such, this is
6671 the normal operating mode in the RTL expanders. */
6672
6673 tree
6674 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6675 HOST_WIDE_INT *pbitpos, tree *poffset,
6676 enum machine_mode *pmode, int *punsignedp,
6677 int *pvolatilep, bool keep_aligning)
6678 {
6679 tree size_tree = 0;
6680 enum machine_mode mode = VOIDmode;
6681 bool blkmode_bitfield = false;
6682 tree offset = size_zero_node;
6683 offset_int bit_offset = 0;
6684
6685 /* First get the mode, signedness, and size. We do this from just the
6686 outermost expression. */
6687 *pbitsize = -1;
6688 if (TREE_CODE (exp) == COMPONENT_REF)
6689 {
6690 tree field = TREE_OPERAND (exp, 1);
6691 size_tree = DECL_SIZE (field);
6692 if (flag_strict_volatile_bitfields > 0
6693 && TREE_THIS_VOLATILE (exp)
6694 && DECL_BIT_FIELD_TYPE (field)
6695 && DECL_MODE (field) != BLKmode)
6696 /* Volatile bitfields should be accessed in the mode of the
6697 field's type, not the mode computed based on the bit
6698 size. */
6699 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6700 else if (!DECL_BIT_FIELD (field))
6701 mode = DECL_MODE (field);
6702 else if (DECL_MODE (field) == BLKmode)
6703 blkmode_bitfield = true;
6704
6705 *punsignedp = DECL_UNSIGNED (field);
6706 }
6707 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6708 {
6709 size_tree = TREE_OPERAND (exp, 1);
6710 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6711 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6712
6713 /* For vector types, with the correct size of access, use the mode of
6714 inner type. */
6715 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6716 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6717 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6718 mode = TYPE_MODE (TREE_TYPE (exp));
6719 }
6720 else
6721 {
6722 mode = TYPE_MODE (TREE_TYPE (exp));
6723 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6724
6725 if (mode == BLKmode)
6726 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6727 else
6728 *pbitsize = GET_MODE_BITSIZE (mode);
6729 }
6730
6731 if (size_tree != 0)
6732 {
6733 if (! tree_fits_uhwi_p (size_tree))
6734 mode = BLKmode, *pbitsize = -1;
6735 else
6736 *pbitsize = tree_to_uhwi (size_tree);
6737 }
6738
6739 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6740 and find the ultimate containing object. */
6741 while (1)
6742 {
6743 switch (TREE_CODE (exp))
6744 {
6745 case BIT_FIELD_REF:
6746 bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
6747 break;
6748
6749 case COMPONENT_REF:
6750 {
6751 tree field = TREE_OPERAND (exp, 1);
6752 tree this_offset = component_ref_field_offset (exp);
6753
6754 /* If this field hasn't been filled in yet, don't go past it.
6755 This should only happen when folding expressions made during
6756 type construction. */
6757 if (this_offset == 0)
6758 break;
6759
6760 offset = size_binop (PLUS_EXPR, offset, this_offset);
6761 bit_offset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
6762
6763 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6764 }
6765 break;
6766
6767 case ARRAY_REF:
6768 case ARRAY_RANGE_REF:
6769 {
6770 tree index = TREE_OPERAND (exp, 1);
6771 tree low_bound = array_ref_low_bound (exp);
6772 tree unit_size = array_ref_element_size (exp);
6773
6774 /* We assume all arrays have sizes that are a multiple of a byte.
6775 First subtract the lower bound, if any, in the type of the
6776 index, then convert to sizetype and multiply by the size of
6777 the array element. */
6778 if (! integer_zerop (low_bound))
6779 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6780 index, low_bound);
6781
6782 offset = size_binop (PLUS_EXPR, offset,
6783 size_binop (MULT_EXPR,
6784 fold_convert (sizetype, index),
6785 unit_size));
6786 }
6787 break;
6788
6789 case REALPART_EXPR:
6790 break;
6791
6792 case IMAGPART_EXPR:
6793 bit_offset += *pbitsize;
6794 break;
6795
6796 case VIEW_CONVERT_EXPR:
6797 if (keep_aligning && STRICT_ALIGNMENT
6798 && (TYPE_ALIGN (TREE_TYPE (exp))
6799 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6800 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6801 < BIGGEST_ALIGNMENT)
6802 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6803 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6804 goto done;
6805 break;
6806
6807 case MEM_REF:
6808 /* Hand back the decl for MEM[&decl, off]. */
6809 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6810 {
6811 tree off = TREE_OPERAND (exp, 1);
6812 if (!integer_zerop (off))
6813 {
6814 offset_int boff, coff = mem_ref_offset (exp);
6815 boff = wi::lshift (coff, LOG2_BITS_PER_UNIT);
6816 bit_offset += boff;
6817 }
6818 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6819 }
6820 goto done;
6821
6822 default:
6823 goto done;
6824 }
6825
6826 /* If any reference in the chain is volatile, the effect is volatile. */
6827 if (TREE_THIS_VOLATILE (exp))
6828 *pvolatilep = 1;
6829
6830 exp = TREE_OPERAND (exp, 0);
6831 }
6832 done:
6833
6834 /* If OFFSET is constant, see if we can return the whole thing as a
6835 constant bit position. Make sure to handle overflow during
6836 this conversion. */
6837 if (TREE_CODE (offset) == INTEGER_CST)
6838 {
6839 offset_int tem = wi::sext (wi::to_offset (offset),
6840 TYPE_PRECISION (sizetype));
6841 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
6842 tem += bit_offset;
6843 if (wi::fits_shwi_p (tem))
6844 {
6845 *pbitpos = tem.to_shwi ();
6846 *poffset = offset = NULL_TREE;
6847 }
6848 }
6849
6850 /* Otherwise, split it up. */
6851 if (offset)
6852 {
6853 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6854 if (wi::neg_p (bit_offset))
6855 {
6856 offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
6857 offset_int tem = bit_offset.and_not (mask);
6858 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6859 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6860 bit_offset -= tem;
6861 tem = wi::arshift (tem, LOG2_BITS_PER_UNIT);
6862 offset = size_binop (PLUS_EXPR, offset,
6863 wide_int_to_tree (sizetype, tem));
6864 }
6865
6866 *pbitpos = bit_offset.to_shwi ();
6867 *poffset = offset;
6868 }
6869
6870 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6871 if (mode == VOIDmode
6872 && blkmode_bitfield
6873 && (*pbitpos % BITS_PER_UNIT) == 0
6874 && (*pbitsize % BITS_PER_UNIT) == 0)
6875 *pmode = BLKmode;
6876 else
6877 *pmode = mode;
6878
6879 return exp;
6880 }
6881
6882 /* Return a tree of sizetype representing the size, in bytes, of the element
6883 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6884
6885 tree
6886 array_ref_element_size (tree exp)
6887 {
6888 tree aligned_size = TREE_OPERAND (exp, 3);
6889 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6890 location_t loc = EXPR_LOCATION (exp);
6891
6892 /* If a size was specified in the ARRAY_REF, it's the size measured
6893 in alignment units of the element type. So multiply by that value. */
6894 if (aligned_size)
6895 {
6896 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6897 sizetype from another type of the same width and signedness. */
6898 if (TREE_TYPE (aligned_size) != sizetype)
6899 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6900 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6901 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6902 }
6903
6904 /* Otherwise, take the size from that of the element type. Substitute
6905 any PLACEHOLDER_EXPR that we have. */
6906 else
6907 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6908 }
6909
6910 /* Return a tree representing the lower bound of the array mentioned in
6911 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6912
6913 tree
6914 array_ref_low_bound (tree exp)
6915 {
6916 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6917
6918 /* If a lower bound is specified in EXP, use it. */
6919 if (TREE_OPERAND (exp, 2))
6920 return TREE_OPERAND (exp, 2);
6921
6922 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6923 substituting for a PLACEHOLDER_EXPR as needed. */
6924 if (domain_type && TYPE_MIN_VALUE (domain_type))
6925 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6926
6927 /* Otherwise, return a zero of the appropriate type. */
6928 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6929 }
6930
6931 /* Returns true if REF is an array reference to an array at the end of
6932 a structure. If this is the case, the array may be allocated larger
6933 than its upper bound implies. */
6934
6935 bool
6936 array_at_struct_end_p (tree ref)
6937 {
6938 if (TREE_CODE (ref) != ARRAY_REF
6939 && TREE_CODE (ref) != ARRAY_RANGE_REF)
6940 return false;
6941
6942 while (handled_component_p (ref))
6943 {
6944 /* If the reference chain contains a component reference to a
6945 non-union type and there follows another field the reference
6946 is not at the end of a structure. */
6947 if (TREE_CODE (ref) == COMPONENT_REF
6948 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
6949 {
6950 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
6951 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
6952 nextf = DECL_CHAIN (nextf);
6953 if (nextf)
6954 return false;
6955 }
6956
6957 ref = TREE_OPERAND (ref, 0);
6958 }
6959
6960 /* If the reference is based on a declared entity, the size of the array
6961 is constrained by its given domain. */
6962 if (DECL_P (ref))
6963 return false;
6964
6965 return true;
6966 }
6967
6968 /* Return a tree representing the upper bound of the array mentioned in
6969 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6970
6971 tree
6972 array_ref_up_bound (tree exp)
6973 {
6974 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6975
6976 /* If there is a domain type and it has an upper bound, use it, substituting
6977 for a PLACEHOLDER_EXPR as needed. */
6978 if (domain_type && TYPE_MAX_VALUE (domain_type))
6979 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6980
6981 /* Otherwise fail. */
6982 return NULL_TREE;
6983 }
6984
6985 /* Return a tree representing the offset, in bytes, of the field referenced
6986 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6987
6988 tree
6989 component_ref_field_offset (tree exp)
6990 {
6991 tree aligned_offset = TREE_OPERAND (exp, 2);
6992 tree field = TREE_OPERAND (exp, 1);
6993 location_t loc = EXPR_LOCATION (exp);
6994
6995 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6996 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6997 value. */
6998 if (aligned_offset)
6999 {
7000 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
7001 sizetype from another type of the same width and signedness. */
7002 if (TREE_TYPE (aligned_offset) != sizetype)
7003 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
7004 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
7005 size_int (DECL_OFFSET_ALIGN (field)
7006 / BITS_PER_UNIT));
7007 }
7008
7009 /* Otherwise, take the offset from that of the field. Substitute
7010 any PLACEHOLDER_EXPR that we have. */
7011 else
7012 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
7013 }
7014
7015 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7016
7017 static unsigned HOST_WIDE_INT
7018 target_align (const_tree target)
7019 {
7020 /* We might have a chain of nested references with intermediate misaligning
7021 bitfields components, so need to recurse to find out. */
7022
7023 unsigned HOST_WIDE_INT this_align, outer_align;
7024
7025 switch (TREE_CODE (target))
7026 {
7027 case BIT_FIELD_REF:
7028 return 1;
7029
7030 case COMPONENT_REF:
7031 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7032 outer_align = target_align (TREE_OPERAND (target, 0));
7033 return MIN (this_align, outer_align);
7034
7035 case ARRAY_REF:
7036 case ARRAY_RANGE_REF:
7037 this_align = TYPE_ALIGN (TREE_TYPE (target));
7038 outer_align = target_align (TREE_OPERAND (target, 0));
7039 return MIN (this_align, outer_align);
7040
7041 CASE_CONVERT:
7042 case NON_LVALUE_EXPR:
7043 case VIEW_CONVERT_EXPR:
7044 this_align = TYPE_ALIGN (TREE_TYPE (target));
7045 outer_align = target_align (TREE_OPERAND (target, 0));
7046 return MAX (this_align, outer_align);
7047
7048 default:
7049 return TYPE_ALIGN (TREE_TYPE (target));
7050 }
7051 }
7052
7053 \f
7054 /* Given an rtx VALUE that may contain additions and multiplications, return
7055 an equivalent value that just refers to a register, memory, or constant.
7056 This is done by generating instructions to perform the arithmetic and
7057 returning a pseudo-register containing the value.
7058
7059 The returned value may be a REG, SUBREG, MEM or constant. */
7060
7061 rtx
7062 force_operand (rtx value, rtx target)
7063 {
7064 rtx op1, op2;
7065 /* Use subtarget as the target for operand 0 of a binary operation. */
7066 rtx subtarget = get_subtarget (target);
7067 enum rtx_code code = GET_CODE (value);
7068
7069 /* Check for subreg applied to an expression produced by loop optimizer. */
7070 if (code == SUBREG
7071 && !REG_P (SUBREG_REG (value))
7072 && !MEM_P (SUBREG_REG (value)))
7073 {
7074 value
7075 = simplify_gen_subreg (GET_MODE (value),
7076 force_reg (GET_MODE (SUBREG_REG (value)),
7077 force_operand (SUBREG_REG (value),
7078 NULL_RTX)),
7079 GET_MODE (SUBREG_REG (value)),
7080 SUBREG_BYTE (value));
7081 code = GET_CODE (value);
7082 }
7083
7084 /* Check for a PIC address load. */
7085 if ((code == PLUS || code == MINUS)
7086 && XEXP (value, 0) == pic_offset_table_rtx
7087 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7088 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7089 || GET_CODE (XEXP (value, 1)) == CONST))
7090 {
7091 if (!subtarget)
7092 subtarget = gen_reg_rtx (GET_MODE (value));
7093 emit_move_insn (subtarget, value);
7094 return subtarget;
7095 }
7096
7097 if (ARITHMETIC_P (value))
7098 {
7099 op2 = XEXP (value, 1);
7100 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7101 subtarget = 0;
7102 if (code == MINUS && CONST_INT_P (op2))
7103 {
7104 code = PLUS;
7105 op2 = negate_rtx (GET_MODE (value), op2);
7106 }
7107
7108 /* Check for an addition with OP2 a constant integer and our first
7109 operand a PLUS of a virtual register and something else. In that
7110 case, we want to emit the sum of the virtual register and the
7111 constant first and then add the other value. This allows virtual
7112 register instantiation to simply modify the constant rather than
7113 creating another one around this addition. */
7114 if (code == PLUS && CONST_INT_P (op2)
7115 && GET_CODE (XEXP (value, 0)) == PLUS
7116 && REG_P (XEXP (XEXP (value, 0), 0))
7117 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7118 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7119 {
7120 rtx temp = expand_simple_binop (GET_MODE (value), code,
7121 XEXP (XEXP (value, 0), 0), op2,
7122 subtarget, 0, OPTAB_LIB_WIDEN);
7123 return expand_simple_binop (GET_MODE (value), code, temp,
7124 force_operand (XEXP (XEXP (value,
7125 0), 1), 0),
7126 target, 0, OPTAB_LIB_WIDEN);
7127 }
7128
7129 op1 = force_operand (XEXP (value, 0), subtarget);
7130 op2 = force_operand (op2, NULL_RTX);
7131 switch (code)
7132 {
7133 case MULT:
7134 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7135 case DIV:
7136 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7137 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7138 target, 1, OPTAB_LIB_WIDEN);
7139 else
7140 return expand_divmod (0,
7141 FLOAT_MODE_P (GET_MODE (value))
7142 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7143 GET_MODE (value), op1, op2, target, 0);
7144 case MOD:
7145 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7146 target, 0);
7147 case UDIV:
7148 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7149 target, 1);
7150 case UMOD:
7151 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7152 target, 1);
7153 case ASHIFTRT:
7154 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7155 target, 0, OPTAB_LIB_WIDEN);
7156 default:
7157 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7158 target, 1, OPTAB_LIB_WIDEN);
7159 }
7160 }
7161 if (UNARY_P (value))
7162 {
7163 if (!target)
7164 target = gen_reg_rtx (GET_MODE (value));
7165 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7166 switch (code)
7167 {
7168 case ZERO_EXTEND:
7169 case SIGN_EXTEND:
7170 case TRUNCATE:
7171 case FLOAT_EXTEND:
7172 case FLOAT_TRUNCATE:
7173 convert_move (target, op1, code == ZERO_EXTEND);
7174 return target;
7175
7176 case FIX:
7177 case UNSIGNED_FIX:
7178 expand_fix (target, op1, code == UNSIGNED_FIX);
7179 return target;
7180
7181 case FLOAT:
7182 case UNSIGNED_FLOAT:
7183 expand_float (target, op1, code == UNSIGNED_FLOAT);
7184 return target;
7185
7186 default:
7187 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7188 }
7189 }
7190
7191 #ifdef INSN_SCHEDULING
7192 /* On machines that have insn scheduling, we want all memory reference to be
7193 explicit, so we need to deal with such paradoxical SUBREGs. */
7194 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7195 value
7196 = simplify_gen_subreg (GET_MODE (value),
7197 force_reg (GET_MODE (SUBREG_REG (value)),
7198 force_operand (SUBREG_REG (value),
7199 NULL_RTX)),
7200 GET_MODE (SUBREG_REG (value)),
7201 SUBREG_BYTE (value));
7202 #endif
7203
7204 return value;
7205 }
7206 \f
7207 /* Subroutine of expand_expr: return nonzero iff there is no way that
7208 EXP can reference X, which is being modified. TOP_P is nonzero if this
7209 call is going to be used to determine whether we need a temporary
7210 for EXP, as opposed to a recursive call to this function.
7211
7212 It is always safe for this routine to return zero since it merely
7213 searches for optimization opportunities. */
7214
7215 int
7216 safe_from_p (const_rtx x, tree exp, int top_p)
7217 {
7218 rtx exp_rtl = 0;
7219 int i, nops;
7220
7221 if (x == 0
7222 /* If EXP has varying size, we MUST use a target since we currently
7223 have no way of allocating temporaries of variable size
7224 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7225 So we assume here that something at a higher level has prevented a
7226 clash. This is somewhat bogus, but the best we can do. Only
7227 do this when X is BLKmode and when we are at the top level. */
7228 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7229 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7230 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7231 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7232 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7233 != INTEGER_CST)
7234 && GET_MODE (x) == BLKmode)
7235 /* If X is in the outgoing argument area, it is always safe. */
7236 || (MEM_P (x)
7237 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7238 || (GET_CODE (XEXP (x, 0)) == PLUS
7239 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7240 return 1;
7241
7242 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7243 find the underlying pseudo. */
7244 if (GET_CODE (x) == SUBREG)
7245 {
7246 x = SUBREG_REG (x);
7247 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7248 return 0;
7249 }
7250
7251 /* Now look at our tree code and possibly recurse. */
7252 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7253 {
7254 case tcc_declaration:
7255 exp_rtl = DECL_RTL_IF_SET (exp);
7256 break;
7257
7258 case tcc_constant:
7259 return 1;
7260
7261 case tcc_exceptional:
7262 if (TREE_CODE (exp) == TREE_LIST)
7263 {
7264 while (1)
7265 {
7266 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7267 return 0;
7268 exp = TREE_CHAIN (exp);
7269 if (!exp)
7270 return 1;
7271 if (TREE_CODE (exp) != TREE_LIST)
7272 return safe_from_p (x, exp, 0);
7273 }
7274 }
7275 else if (TREE_CODE (exp) == CONSTRUCTOR)
7276 {
7277 constructor_elt *ce;
7278 unsigned HOST_WIDE_INT idx;
7279
7280 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7281 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7282 || !safe_from_p (x, ce->value, 0))
7283 return 0;
7284 return 1;
7285 }
7286 else if (TREE_CODE (exp) == ERROR_MARK)
7287 return 1; /* An already-visited SAVE_EXPR? */
7288 else
7289 return 0;
7290
7291 case tcc_statement:
7292 /* The only case we look at here is the DECL_INITIAL inside a
7293 DECL_EXPR. */
7294 return (TREE_CODE (exp) != DECL_EXPR
7295 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7296 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7297 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7298
7299 case tcc_binary:
7300 case tcc_comparison:
7301 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7302 return 0;
7303 /* Fall through. */
7304
7305 case tcc_unary:
7306 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7307
7308 case tcc_expression:
7309 case tcc_reference:
7310 case tcc_vl_exp:
7311 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7312 the expression. If it is set, we conflict iff we are that rtx or
7313 both are in memory. Otherwise, we check all operands of the
7314 expression recursively. */
7315
7316 switch (TREE_CODE (exp))
7317 {
7318 case ADDR_EXPR:
7319 /* If the operand is static or we are static, we can't conflict.
7320 Likewise if we don't conflict with the operand at all. */
7321 if (staticp (TREE_OPERAND (exp, 0))
7322 || TREE_STATIC (exp)
7323 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7324 return 1;
7325
7326 /* Otherwise, the only way this can conflict is if we are taking
7327 the address of a DECL a that address if part of X, which is
7328 very rare. */
7329 exp = TREE_OPERAND (exp, 0);
7330 if (DECL_P (exp))
7331 {
7332 if (!DECL_RTL_SET_P (exp)
7333 || !MEM_P (DECL_RTL (exp)))
7334 return 0;
7335 else
7336 exp_rtl = XEXP (DECL_RTL (exp), 0);
7337 }
7338 break;
7339
7340 case MEM_REF:
7341 if (MEM_P (x)
7342 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7343 get_alias_set (exp)))
7344 return 0;
7345 break;
7346
7347 case CALL_EXPR:
7348 /* Assume that the call will clobber all hard registers and
7349 all of memory. */
7350 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7351 || MEM_P (x))
7352 return 0;
7353 break;
7354
7355 case WITH_CLEANUP_EXPR:
7356 case CLEANUP_POINT_EXPR:
7357 /* Lowered by gimplify.c. */
7358 gcc_unreachable ();
7359
7360 case SAVE_EXPR:
7361 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7362
7363 default:
7364 break;
7365 }
7366
7367 /* If we have an rtx, we do not need to scan our operands. */
7368 if (exp_rtl)
7369 break;
7370
7371 nops = TREE_OPERAND_LENGTH (exp);
7372 for (i = 0; i < nops; i++)
7373 if (TREE_OPERAND (exp, i) != 0
7374 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7375 return 0;
7376
7377 break;
7378
7379 case tcc_type:
7380 /* Should never get a type here. */
7381 gcc_unreachable ();
7382 }
7383
7384 /* If we have an rtl, find any enclosed object. Then see if we conflict
7385 with it. */
7386 if (exp_rtl)
7387 {
7388 if (GET_CODE (exp_rtl) == SUBREG)
7389 {
7390 exp_rtl = SUBREG_REG (exp_rtl);
7391 if (REG_P (exp_rtl)
7392 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7393 return 0;
7394 }
7395
7396 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7397 are memory and they conflict. */
7398 return ! (rtx_equal_p (x, exp_rtl)
7399 || (MEM_P (x) && MEM_P (exp_rtl)
7400 && true_dependence (exp_rtl, VOIDmode, x)));
7401 }
7402
7403 /* If we reach here, it is safe. */
7404 return 1;
7405 }
7406
7407 \f
7408 /* Return the highest power of two that EXP is known to be a multiple of.
7409 This is used in updating alignment of MEMs in array references. */
7410
7411 unsigned HOST_WIDE_INT
7412 highest_pow2_factor (const_tree exp)
7413 {
7414 unsigned HOST_WIDE_INT ret;
7415 int trailing_zeros = tree_ctz (exp);
7416 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7417 return BIGGEST_ALIGNMENT;
7418 ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7419 if (ret > BIGGEST_ALIGNMENT)
7420 return BIGGEST_ALIGNMENT;
7421 return ret;
7422 }
7423
7424 /* Similar, except that the alignment requirements of TARGET are
7425 taken into account. Assume it is at least as aligned as its
7426 type, unless it is a COMPONENT_REF in which case the layout of
7427 the structure gives the alignment. */
7428
7429 static unsigned HOST_WIDE_INT
7430 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7431 {
7432 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7433 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7434
7435 return MAX (factor, talign);
7436 }
7437 \f
7438 #ifdef HAVE_conditional_move
7439 /* Convert the tree comparison code TCODE to the rtl one where the
7440 signedness is UNSIGNEDP. */
7441
7442 static enum rtx_code
7443 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7444 {
7445 enum rtx_code code;
7446 switch (tcode)
7447 {
7448 case EQ_EXPR:
7449 code = EQ;
7450 break;
7451 case NE_EXPR:
7452 code = NE;
7453 break;
7454 case LT_EXPR:
7455 code = unsignedp ? LTU : LT;
7456 break;
7457 case LE_EXPR:
7458 code = unsignedp ? LEU : LE;
7459 break;
7460 case GT_EXPR:
7461 code = unsignedp ? GTU : GT;
7462 break;
7463 case GE_EXPR:
7464 code = unsignedp ? GEU : GE;
7465 break;
7466 case UNORDERED_EXPR:
7467 code = UNORDERED;
7468 break;
7469 case ORDERED_EXPR:
7470 code = ORDERED;
7471 break;
7472 case UNLT_EXPR:
7473 code = UNLT;
7474 break;
7475 case UNLE_EXPR:
7476 code = UNLE;
7477 break;
7478 case UNGT_EXPR:
7479 code = UNGT;
7480 break;
7481 case UNGE_EXPR:
7482 code = UNGE;
7483 break;
7484 case UNEQ_EXPR:
7485 code = UNEQ;
7486 break;
7487 case LTGT_EXPR:
7488 code = LTGT;
7489 break;
7490
7491 default:
7492 gcc_unreachable ();
7493 }
7494 return code;
7495 }
7496 #endif
7497
7498 /* Subroutine of expand_expr. Expand the two operands of a binary
7499 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7500 The value may be stored in TARGET if TARGET is nonzero. The
7501 MODIFIER argument is as documented by expand_expr. */
7502
7503 static void
7504 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7505 enum expand_modifier modifier)
7506 {
7507 if (! safe_from_p (target, exp1, 1))
7508 target = 0;
7509 if (operand_equal_p (exp0, exp1, 0))
7510 {
7511 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7512 *op1 = copy_rtx (*op0);
7513 }
7514 else
7515 {
7516 /* If we need to preserve evaluation order, copy exp0 into its own
7517 temporary variable so that it can't be clobbered by exp1. */
7518 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7519 exp0 = save_expr (exp0);
7520 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7521 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7522 }
7523 }
7524
7525 \f
7526 /* Return a MEM that contains constant EXP. DEFER is as for
7527 output_constant_def and MODIFIER is as for expand_expr. */
7528
7529 static rtx
7530 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7531 {
7532 rtx mem;
7533
7534 mem = output_constant_def (exp, defer);
7535 if (modifier != EXPAND_INITIALIZER)
7536 mem = use_anchored_address (mem);
7537 return mem;
7538 }
7539
7540 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7541 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7542
7543 static rtx
7544 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
7545 enum expand_modifier modifier, addr_space_t as)
7546 {
7547 rtx result, subtarget;
7548 tree inner, offset;
7549 HOST_WIDE_INT bitsize, bitpos;
7550 int volatilep, unsignedp;
7551 enum machine_mode mode1;
7552
7553 /* If we are taking the address of a constant and are at the top level,
7554 we have to use output_constant_def since we can't call force_const_mem
7555 at top level. */
7556 /* ??? This should be considered a front-end bug. We should not be
7557 generating ADDR_EXPR of something that isn't an LVALUE. The only
7558 exception here is STRING_CST. */
7559 if (CONSTANT_CLASS_P (exp))
7560 {
7561 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7562 if (modifier < EXPAND_SUM)
7563 result = force_operand (result, target);
7564 return result;
7565 }
7566
7567 /* Everything must be something allowed by is_gimple_addressable. */
7568 switch (TREE_CODE (exp))
7569 {
7570 case INDIRECT_REF:
7571 /* This case will happen via recursion for &a->b. */
7572 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7573
7574 case MEM_REF:
7575 {
7576 tree tem = TREE_OPERAND (exp, 0);
7577 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7578 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7579 return expand_expr (tem, target, tmode, modifier);
7580 }
7581
7582 case CONST_DECL:
7583 /* Expand the initializer like constants above. */
7584 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7585 0, modifier), 0);
7586 if (modifier < EXPAND_SUM)
7587 result = force_operand (result, target);
7588 return result;
7589
7590 case REALPART_EXPR:
7591 /* The real part of the complex number is always first, therefore
7592 the address is the same as the address of the parent object. */
7593 offset = 0;
7594 bitpos = 0;
7595 inner = TREE_OPERAND (exp, 0);
7596 break;
7597
7598 case IMAGPART_EXPR:
7599 /* The imaginary part of the complex number is always second.
7600 The expression is therefore always offset by the size of the
7601 scalar type. */
7602 offset = 0;
7603 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7604 inner = TREE_OPERAND (exp, 0);
7605 break;
7606
7607 case COMPOUND_LITERAL_EXPR:
7608 /* Allow COMPOUND_LITERAL_EXPR in initializers, if e.g.
7609 rtl_for_decl_init is called on DECL_INITIAL with
7610 COMPOUNT_LITERAL_EXPRs in it, they aren't gimplified. */
7611 if (modifier == EXPAND_INITIALIZER
7612 && COMPOUND_LITERAL_EXPR_DECL (exp))
7613 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7614 target, tmode, modifier, as);
7615 /* FALLTHRU */
7616 default:
7617 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7618 expand_expr, as that can have various side effects; LABEL_DECLs for
7619 example, may not have their DECL_RTL set yet. Expand the rtl of
7620 CONSTRUCTORs too, which should yield a memory reference for the
7621 constructor's contents. Assume language specific tree nodes can
7622 be expanded in some interesting way. */
7623 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7624 if (DECL_P (exp)
7625 || TREE_CODE (exp) == CONSTRUCTOR
7626 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7627 {
7628 result = expand_expr (exp, target, tmode,
7629 modifier == EXPAND_INITIALIZER
7630 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7631
7632 /* If the DECL isn't in memory, then the DECL wasn't properly
7633 marked TREE_ADDRESSABLE, which will be either a front-end
7634 or a tree optimizer bug. */
7635
7636 if (TREE_ADDRESSABLE (exp)
7637 && ! MEM_P (result)
7638 && ! targetm.calls.allocate_stack_slots_for_args ())
7639 {
7640 error ("local frame unavailable (naked function?)");
7641 return result;
7642 }
7643 else
7644 gcc_assert (MEM_P (result));
7645 result = XEXP (result, 0);
7646
7647 /* ??? Is this needed anymore? */
7648 if (DECL_P (exp))
7649 TREE_USED (exp) = 1;
7650
7651 if (modifier != EXPAND_INITIALIZER
7652 && modifier != EXPAND_CONST_ADDRESS
7653 && modifier != EXPAND_SUM)
7654 result = force_operand (result, target);
7655 return result;
7656 }
7657
7658 /* Pass FALSE as the last argument to get_inner_reference although
7659 we are expanding to RTL. The rationale is that we know how to
7660 handle "aligning nodes" here: we can just bypass them because
7661 they won't change the final object whose address will be returned
7662 (they actually exist only for that purpose). */
7663 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7664 &mode1, &unsignedp, &volatilep, false);
7665 break;
7666 }
7667
7668 /* We must have made progress. */
7669 gcc_assert (inner != exp);
7670
7671 subtarget = offset || bitpos ? NULL_RTX : target;
7672 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7673 inner alignment, force the inner to be sufficiently aligned. */
7674 if (CONSTANT_CLASS_P (inner)
7675 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7676 {
7677 inner = copy_node (inner);
7678 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7679 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7680 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7681 }
7682 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7683
7684 if (offset)
7685 {
7686 rtx tmp;
7687
7688 if (modifier != EXPAND_NORMAL)
7689 result = force_operand (result, NULL);
7690 tmp = expand_expr (offset, NULL_RTX, tmode,
7691 modifier == EXPAND_INITIALIZER
7692 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7693
7694 /* expand_expr is allowed to return an object in a mode other
7695 than TMODE. If it did, we need to convert. */
7696 if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
7697 tmp = convert_modes (tmode, GET_MODE (tmp),
7698 tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
7699 result = convert_memory_address_addr_space (tmode, result, as);
7700 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7701
7702 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7703 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7704 else
7705 {
7706 subtarget = bitpos ? NULL_RTX : target;
7707 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7708 1, OPTAB_LIB_WIDEN);
7709 }
7710 }
7711
7712 if (bitpos)
7713 {
7714 /* Someone beforehand should have rejected taking the address
7715 of such an object. */
7716 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7717
7718 result = convert_memory_address_addr_space (tmode, result, as);
7719 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7720 if (modifier < EXPAND_SUM)
7721 result = force_operand (result, target);
7722 }
7723
7724 return result;
7725 }
7726
7727 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7728 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7729
7730 static rtx
7731 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7732 enum expand_modifier modifier)
7733 {
7734 addr_space_t as = ADDR_SPACE_GENERIC;
7735 enum machine_mode address_mode = Pmode;
7736 enum machine_mode pointer_mode = ptr_mode;
7737 enum machine_mode rmode;
7738 rtx result;
7739
7740 /* Target mode of VOIDmode says "whatever's natural". */
7741 if (tmode == VOIDmode)
7742 tmode = TYPE_MODE (TREE_TYPE (exp));
7743
7744 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7745 {
7746 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7747 address_mode = targetm.addr_space.address_mode (as);
7748 pointer_mode = targetm.addr_space.pointer_mode (as);
7749 }
7750
7751 /* We can get called with some Weird Things if the user does silliness
7752 like "(short) &a". In that case, convert_memory_address won't do
7753 the right thing, so ignore the given target mode. */
7754 if (tmode != address_mode && tmode != pointer_mode)
7755 tmode = address_mode;
7756
7757 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7758 tmode, modifier, as);
7759
7760 /* Despite expand_expr claims concerning ignoring TMODE when not
7761 strictly convenient, stuff breaks if we don't honor it. Note
7762 that combined with the above, we only do this for pointer modes. */
7763 rmode = GET_MODE (result);
7764 if (rmode == VOIDmode)
7765 rmode = tmode;
7766 if (rmode != tmode)
7767 result = convert_memory_address_addr_space (tmode, result, as);
7768
7769 return result;
7770 }
7771
7772 /* Generate code for computing CONSTRUCTOR EXP.
7773 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7774 is TRUE, instead of creating a temporary variable in memory
7775 NULL is returned and the caller needs to handle it differently. */
7776
7777 static rtx
7778 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7779 bool avoid_temp_mem)
7780 {
7781 tree type = TREE_TYPE (exp);
7782 enum machine_mode mode = TYPE_MODE (type);
7783
7784 /* Try to avoid creating a temporary at all. This is possible
7785 if all of the initializer is zero.
7786 FIXME: try to handle all [0..255] initializers we can handle
7787 with memset. */
7788 if (TREE_STATIC (exp)
7789 && !TREE_ADDRESSABLE (exp)
7790 && target != 0 && mode == BLKmode
7791 && all_zeros_p (exp))
7792 {
7793 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7794 return target;
7795 }
7796
7797 /* All elts simple constants => refer to a constant in memory. But
7798 if this is a non-BLKmode mode, let it store a field at a time
7799 since that should make a CONST_INT, CONST_WIDE_INT or
7800 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7801 use, it is best to store directly into the target unless the type
7802 is large enough that memcpy will be used. If we are making an
7803 initializer and all operands are constant, put it in memory as
7804 well.
7805
7806 FIXME: Avoid trying to fill vector constructors piece-meal.
7807 Output them with output_constant_def below unless we're sure
7808 they're zeros. This should go away when vector initializers
7809 are treated like VECTOR_CST instead of arrays. */
7810 if ((TREE_STATIC (exp)
7811 && ((mode == BLKmode
7812 && ! (target != 0 && safe_from_p (target, exp, 1)))
7813 || TREE_ADDRESSABLE (exp)
7814 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
7815 && (! MOVE_BY_PIECES_P
7816 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
7817 TYPE_ALIGN (type)))
7818 && ! mostly_zeros_p (exp))))
7819 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7820 && TREE_CONSTANT (exp)))
7821 {
7822 rtx constructor;
7823
7824 if (avoid_temp_mem)
7825 return NULL_RTX;
7826
7827 constructor = expand_expr_constant (exp, 1, modifier);
7828
7829 if (modifier != EXPAND_CONST_ADDRESS
7830 && modifier != EXPAND_INITIALIZER
7831 && modifier != EXPAND_SUM)
7832 constructor = validize_mem (constructor);
7833
7834 return constructor;
7835 }
7836
7837 /* Handle calls that pass values in multiple non-contiguous
7838 locations. The Irix 6 ABI has examples of this. */
7839 if (target == 0 || ! safe_from_p (target, exp, 1)
7840 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7841 {
7842 if (avoid_temp_mem)
7843 return NULL_RTX;
7844
7845 target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
7846 }
7847
7848 store_constructor (exp, target, 0, int_expr_size (exp));
7849 return target;
7850 }
7851
7852
7853 /* expand_expr: generate code for computing expression EXP.
7854 An rtx for the computed value is returned. The value is never null.
7855 In the case of a void EXP, const0_rtx is returned.
7856
7857 The value may be stored in TARGET if TARGET is nonzero.
7858 TARGET is just a suggestion; callers must assume that
7859 the rtx returned may not be the same as TARGET.
7860
7861 If TARGET is CONST0_RTX, it means that the value will be ignored.
7862
7863 If TMODE is not VOIDmode, it suggests generating the
7864 result in mode TMODE. But this is done only when convenient.
7865 Otherwise, TMODE is ignored and the value generated in its natural mode.
7866 TMODE is just a suggestion; callers must assume that
7867 the rtx returned may not have mode TMODE.
7868
7869 Note that TARGET may have neither TMODE nor MODE. In that case, it
7870 probably will not be used.
7871
7872 If MODIFIER is EXPAND_SUM then when EXP is an addition
7873 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7874 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7875 products as above, or REG or MEM, or constant.
7876 Ordinarily in such cases we would output mul or add instructions
7877 and then return a pseudo reg containing the sum.
7878
7879 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7880 it also marks a label as absolutely required (it can't be dead).
7881 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7882 This is used for outputting expressions used in initializers.
7883
7884 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7885 with a constant address even if that address is not normally legitimate.
7886 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7887
7888 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7889 a call parameter. Such targets require special care as we haven't yet
7890 marked TARGET so that it's safe from being trashed by libcalls. We
7891 don't want to use TARGET for anything but the final result;
7892 Intermediate values must go elsewhere. Additionally, calls to
7893 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7894
7895 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7896 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7897 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7898 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7899 recursively.
7900
7901 If INNER_REFERENCE_P is true, we are expanding an inner reference.
7902 In this case, we don't adjust a returned MEM rtx that wouldn't be
7903 sufficiently aligned for its mode; instead, it's up to the caller
7904 to deal with it afterwards. This is used to make sure that unaligned
7905 base objects for which out-of-bounds accesses are supported, for
7906 example record types with trailing arrays, aren't realigned behind
7907 the back of the caller.
7908 The normal operating mode is to pass FALSE for this parameter. */
7909
7910 rtx
7911 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7912 enum expand_modifier modifier, rtx *alt_rtl,
7913 bool inner_reference_p)
7914 {
7915 rtx ret;
7916
7917 /* Handle ERROR_MARK before anybody tries to access its type. */
7918 if (TREE_CODE (exp) == ERROR_MARK
7919 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7920 {
7921 ret = CONST0_RTX (tmode);
7922 return ret ? ret : const0_rtx;
7923 }
7924
7925 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
7926 inner_reference_p);
7927 return ret;
7928 }
7929
7930 /* Try to expand the conditional expression which is represented by
7931 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
7932 return the rtl reg which repsents the result. Otherwise return
7933 NULL_RTL. */
7934
7935 static rtx
7936 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7937 tree treeop1 ATTRIBUTE_UNUSED,
7938 tree treeop2 ATTRIBUTE_UNUSED)
7939 {
7940 #ifdef HAVE_conditional_move
7941 rtx insn;
7942 rtx op00, op01, op1, op2;
7943 enum rtx_code comparison_code;
7944 enum machine_mode comparison_mode;
7945 gimple srcstmt;
7946 rtx temp;
7947 tree type = TREE_TYPE (treeop1);
7948 int unsignedp = TYPE_UNSIGNED (type);
7949 enum machine_mode mode = TYPE_MODE (type);
7950 enum machine_mode orig_mode = mode;
7951
7952 /* If we cannot do a conditional move on the mode, try doing it
7953 with the promoted mode. */
7954 if (!can_conditionally_move_p (mode))
7955 {
7956 mode = promote_mode (type, mode, &unsignedp);
7957 if (!can_conditionally_move_p (mode))
7958 return NULL_RTX;
7959 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
7960 }
7961 else
7962 temp = assign_temp (type, 0, 1);
7963
7964 start_sequence ();
7965 expand_operands (treeop1, treeop2,
7966 temp, &op1, &op2, EXPAND_NORMAL);
7967
7968 if (TREE_CODE (treeop0) == SSA_NAME
7969 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
7970 {
7971 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
7972 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
7973 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
7974 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
7975 comparison_mode = TYPE_MODE (type);
7976 unsignedp = TYPE_UNSIGNED (type);
7977 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7978 }
7979 else if (TREE_CODE_CLASS (TREE_CODE (treeop0)) == tcc_comparison)
7980 {
7981 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
7982 enum tree_code cmpcode = TREE_CODE (treeop0);
7983 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
7984 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
7985 unsignedp = TYPE_UNSIGNED (type);
7986 comparison_mode = TYPE_MODE (type);
7987 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7988 }
7989 else
7990 {
7991 op00 = expand_normal (treeop0);
7992 op01 = const0_rtx;
7993 comparison_code = NE;
7994 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
7995 }
7996
7997 if (GET_MODE (op1) != mode)
7998 op1 = gen_lowpart (mode, op1);
7999
8000 if (GET_MODE (op2) != mode)
8001 op2 = gen_lowpart (mode, op2);
8002
8003 /* Try to emit the conditional move. */
8004 insn = emit_conditional_move (temp, comparison_code,
8005 op00, op01, comparison_mode,
8006 op1, op2, mode,
8007 unsignedp);
8008
8009 /* If we could do the conditional move, emit the sequence,
8010 and return. */
8011 if (insn)
8012 {
8013 rtx seq = get_insns ();
8014 end_sequence ();
8015 emit_insn (seq);
8016 return convert_modes (orig_mode, mode, temp, 0);
8017 }
8018
8019 /* Otherwise discard the sequence and fall back to code with
8020 branches. */
8021 end_sequence ();
8022 #endif
8023 return NULL_RTX;
8024 }
8025
8026 rtx
8027 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
8028 enum expand_modifier modifier)
8029 {
8030 rtx op0, op1, op2, temp;
8031 tree type;
8032 int unsignedp;
8033 enum machine_mode mode;
8034 enum tree_code code = ops->code;
8035 optab this_optab;
8036 rtx subtarget, original_target;
8037 int ignore;
8038 bool reduce_bit_field;
8039 location_t loc = ops->location;
8040 tree treeop0, treeop1, treeop2;
8041 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8042 ? reduce_to_bit_field_precision ((expr), \
8043 target, \
8044 type) \
8045 : (expr))
8046
8047 type = ops->type;
8048 mode = TYPE_MODE (type);
8049 unsignedp = TYPE_UNSIGNED (type);
8050
8051 treeop0 = ops->op0;
8052 treeop1 = ops->op1;
8053 treeop2 = ops->op2;
8054
8055 /* We should be called only on simple (binary or unary) expressions,
8056 exactly those that are valid in gimple expressions that aren't
8057 GIMPLE_SINGLE_RHS (or invalid). */
8058 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8059 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8060 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8061
8062 ignore = (target == const0_rtx
8063 || ((CONVERT_EXPR_CODE_P (code)
8064 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8065 && TREE_CODE (type) == VOID_TYPE));
8066
8067 /* We should be called only if we need the result. */
8068 gcc_assert (!ignore);
8069
8070 /* An operation in what may be a bit-field type needs the
8071 result to be reduced to the precision of the bit-field type,
8072 which is narrower than that of the type's mode. */
8073 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8074 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8075
8076 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8077 target = 0;
8078
8079 /* Use subtarget as the target for operand 0 of a binary operation. */
8080 subtarget = get_subtarget (target);
8081 original_target = target;
8082
8083 switch (code)
8084 {
8085 case NON_LVALUE_EXPR:
8086 case PAREN_EXPR:
8087 CASE_CONVERT:
8088 if (treeop0 == error_mark_node)
8089 return const0_rtx;
8090
8091 if (TREE_CODE (type) == UNION_TYPE)
8092 {
8093 tree valtype = TREE_TYPE (treeop0);
8094
8095 /* If both input and output are BLKmode, this conversion isn't doing
8096 anything except possibly changing memory attribute. */
8097 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8098 {
8099 rtx result = expand_expr (treeop0, target, tmode,
8100 modifier);
8101
8102 result = copy_rtx (result);
8103 set_mem_attributes (result, type, 0);
8104 return result;
8105 }
8106
8107 if (target == 0)
8108 {
8109 if (TYPE_MODE (type) != BLKmode)
8110 target = gen_reg_rtx (TYPE_MODE (type));
8111 else
8112 target = assign_temp (type, 1, 1);
8113 }
8114
8115 if (MEM_P (target))
8116 /* Store data into beginning of memory target. */
8117 store_expr (treeop0,
8118 adjust_address (target, TYPE_MODE (valtype), 0),
8119 modifier == EXPAND_STACK_PARM,
8120 false);
8121
8122 else
8123 {
8124 gcc_assert (REG_P (target));
8125
8126 /* Store this field into a union of the proper type. */
8127 store_field (target,
8128 MIN ((int_size_in_bytes (TREE_TYPE
8129 (treeop0))
8130 * BITS_PER_UNIT),
8131 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8132 0, 0, 0, TYPE_MODE (valtype), treeop0, 0, false);
8133 }
8134
8135 /* Return the entire union. */
8136 return target;
8137 }
8138
8139 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8140 {
8141 op0 = expand_expr (treeop0, target, VOIDmode,
8142 modifier);
8143
8144 /* If the signedness of the conversion differs and OP0 is
8145 a promoted SUBREG, clear that indication since we now
8146 have to do the proper extension. */
8147 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8148 && GET_CODE (op0) == SUBREG)
8149 SUBREG_PROMOTED_VAR_P (op0) = 0;
8150
8151 return REDUCE_BIT_FIELD (op0);
8152 }
8153
8154 op0 = expand_expr (treeop0, NULL_RTX, mode,
8155 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8156 if (GET_MODE (op0) == mode)
8157 ;
8158
8159 /* If OP0 is a constant, just convert it into the proper mode. */
8160 else if (CONSTANT_P (op0))
8161 {
8162 tree inner_type = TREE_TYPE (treeop0);
8163 enum machine_mode inner_mode = GET_MODE (op0);
8164
8165 if (inner_mode == VOIDmode)
8166 inner_mode = TYPE_MODE (inner_type);
8167
8168 if (modifier == EXPAND_INITIALIZER)
8169 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8170 subreg_lowpart_offset (mode,
8171 inner_mode));
8172 else
8173 op0= convert_modes (mode, inner_mode, op0,
8174 TYPE_UNSIGNED (inner_type));
8175 }
8176
8177 else if (modifier == EXPAND_INITIALIZER)
8178 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8179
8180 else if (target == 0)
8181 op0 = convert_to_mode (mode, op0,
8182 TYPE_UNSIGNED (TREE_TYPE
8183 (treeop0)));
8184 else
8185 {
8186 convert_move (target, op0,
8187 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8188 op0 = target;
8189 }
8190
8191 return REDUCE_BIT_FIELD (op0);
8192
8193 case ADDR_SPACE_CONVERT_EXPR:
8194 {
8195 tree treeop0_type = TREE_TYPE (treeop0);
8196 addr_space_t as_to;
8197 addr_space_t as_from;
8198
8199 gcc_assert (POINTER_TYPE_P (type));
8200 gcc_assert (POINTER_TYPE_P (treeop0_type));
8201
8202 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8203 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8204
8205 /* Conversions between pointers to the same address space should
8206 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8207 gcc_assert (as_to != as_from);
8208
8209 /* Ask target code to handle conversion between pointers
8210 to overlapping address spaces. */
8211 if (targetm.addr_space.subset_p (as_to, as_from)
8212 || targetm.addr_space.subset_p (as_from, as_to))
8213 {
8214 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8215 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8216 gcc_assert (op0);
8217 return op0;
8218 }
8219
8220 /* For disjoint address spaces, converting anything but
8221 a null pointer invokes undefined behaviour. We simply
8222 always return a null pointer here. */
8223 return CONST0_RTX (mode);
8224 }
8225
8226 case POINTER_PLUS_EXPR:
8227 /* Even though the sizetype mode and the pointer's mode can be different
8228 expand is able to handle this correctly and get the correct result out
8229 of the PLUS_EXPR code. */
8230 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8231 if sizetype precision is smaller than pointer precision. */
8232 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8233 treeop1 = fold_convert_loc (loc, type,
8234 fold_convert_loc (loc, ssizetype,
8235 treeop1));
8236 /* If sizetype precision is larger than pointer precision, truncate the
8237 offset to have matching modes. */
8238 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8239 treeop1 = fold_convert_loc (loc, type, treeop1);
8240
8241 case PLUS_EXPR:
8242 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8243 something else, make sure we add the register to the constant and
8244 then to the other thing. This case can occur during strength
8245 reduction and doing it this way will produce better code if the
8246 frame pointer or argument pointer is eliminated.
8247
8248 fold-const.c will ensure that the constant is always in the inner
8249 PLUS_EXPR, so the only case we need to do anything about is if
8250 sp, ap, or fp is our second argument, in which case we must swap
8251 the innermost first argument and our second argument. */
8252
8253 if (TREE_CODE (treeop0) == PLUS_EXPR
8254 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8255 && TREE_CODE (treeop1) == VAR_DECL
8256 && (DECL_RTL (treeop1) == frame_pointer_rtx
8257 || DECL_RTL (treeop1) == stack_pointer_rtx
8258 || DECL_RTL (treeop1) == arg_pointer_rtx))
8259 {
8260 gcc_unreachable ();
8261 }
8262
8263 /* If the result is to be ptr_mode and we are adding an integer to
8264 something, we might be forming a constant. So try to use
8265 plus_constant. If it produces a sum and we can't accept it,
8266 use force_operand. This allows P = &ARR[const] to generate
8267 efficient code on machines where a SYMBOL_REF is not a valid
8268 address.
8269
8270 If this is an EXPAND_SUM call, always return the sum. */
8271 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8272 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8273 {
8274 if (modifier == EXPAND_STACK_PARM)
8275 target = 0;
8276 if (TREE_CODE (treeop0) == INTEGER_CST
8277 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8278 && TREE_CONSTANT (treeop1))
8279 {
8280 rtx constant_part;
8281 HOST_WIDE_INT wc;
8282 enum machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8283
8284 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8285 EXPAND_SUM);
8286 /* Use wi::shwi to ensure that the constant is
8287 truncated according to the mode of OP1, then sign extended
8288 to a HOST_WIDE_INT. Using the constant directly can result
8289 in non-canonical RTL in a 64x32 cross compile. */
8290 wc = TREE_INT_CST_LOW (treeop0);
8291 constant_part =
8292 immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8293 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8294 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8295 op1 = force_operand (op1, target);
8296 return REDUCE_BIT_FIELD (op1);
8297 }
8298
8299 else if (TREE_CODE (treeop1) == INTEGER_CST
8300 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8301 && TREE_CONSTANT (treeop0))
8302 {
8303 rtx constant_part;
8304 HOST_WIDE_INT wc;
8305 enum machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8306
8307 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8308 (modifier == EXPAND_INITIALIZER
8309 ? EXPAND_INITIALIZER : EXPAND_SUM));
8310 if (! CONSTANT_P (op0))
8311 {
8312 op1 = expand_expr (treeop1, NULL_RTX,
8313 VOIDmode, modifier);
8314 /* Return a PLUS if modifier says it's OK. */
8315 if (modifier == EXPAND_SUM
8316 || modifier == EXPAND_INITIALIZER)
8317 return simplify_gen_binary (PLUS, mode, op0, op1);
8318 goto binop2;
8319 }
8320 /* Use wi::shwi to ensure that the constant is
8321 truncated according to the mode of OP1, then sign extended
8322 to a HOST_WIDE_INT. Using the constant directly can result
8323 in non-canonical RTL in a 64x32 cross compile. */
8324 wc = TREE_INT_CST_LOW (treeop1);
8325 constant_part
8326 = immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8327 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8328 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8329 op0 = force_operand (op0, target);
8330 return REDUCE_BIT_FIELD (op0);
8331 }
8332 }
8333
8334 /* Use TER to expand pointer addition of a negated value
8335 as pointer subtraction. */
8336 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8337 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8338 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8339 && TREE_CODE (treeop1) == SSA_NAME
8340 && TYPE_MODE (TREE_TYPE (treeop0))
8341 == TYPE_MODE (TREE_TYPE (treeop1)))
8342 {
8343 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8344 if (def)
8345 {
8346 treeop1 = gimple_assign_rhs1 (def);
8347 code = MINUS_EXPR;
8348 goto do_minus;
8349 }
8350 }
8351
8352 /* No sense saving up arithmetic to be done
8353 if it's all in the wrong mode to form part of an address.
8354 And force_operand won't know whether to sign-extend or
8355 zero-extend. */
8356 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8357 || mode != ptr_mode)
8358 {
8359 expand_operands (treeop0, treeop1,
8360 subtarget, &op0, &op1, EXPAND_NORMAL);
8361 if (op0 == const0_rtx)
8362 return op1;
8363 if (op1 == const0_rtx)
8364 return op0;
8365 goto binop2;
8366 }
8367
8368 expand_operands (treeop0, treeop1,
8369 subtarget, &op0, &op1, modifier);
8370 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8371
8372 case MINUS_EXPR:
8373 do_minus:
8374 /* For initializers, we are allowed to return a MINUS of two
8375 symbolic constants. Here we handle all cases when both operands
8376 are constant. */
8377 /* Handle difference of two symbolic constants,
8378 for the sake of an initializer. */
8379 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8380 && really_constant_p (treeop0)
8381 && really_constant_p (treeop1))
8382 {
8383 expand_operands (treeop0, treeop1,
8384 NULL_RTX, &op0, &op1, modifier);
8385
8386 /* If the last operand is a CONST_INT, use plus_constant of
8387 the negated constant. Else make the MINUS. */
8388 if (CONST_INT_P (op1))
8389 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8390 -INTVAL (op1)));
8391 else
8392 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8393 }
8394
8395 /* No sense saving up arithmetic to be done
8396 if it's all in the wrong mode to form part of an address.
8397 And force_operand won't know whether to sign-extend or
8398 zero-extend. */
8399 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8400 || mode != ptr_mode)
8401 goto binop;
8402
8403 expand_operands (treeop0, treeop1,
8404 subtarget, &op0, &op1, modifier);
8405
8406 /* Convert A - const to A + (-const). */
8407 if (CONST_INT_P (op1))
8408 {
8409 op1 = negate_rtx (mode, op1);
8410 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8411 }
8412
8413 goto binop2;
8414
8415 case WIDEN_MULT_PLUS_EXPR:
8416 case WIDEN_MULT_MINUS_EXPR:
8417 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8418 op2 = expand_normal (treeop2);
8419 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8420 target, unsignedp);
8421 return target;
8422
8423 case WIDEN_MULT_EXPR:
8424 /* If first operand is constant, swap them.
8425 Thus the following special case checks need only
8426 check the second operand. */
8427 if (TREE_CODE (treeop0) == INTEGER_CST)
8428 {
8429 tree t1 = treeop0;
8430 treeop0 = treeop1;
8431 treeop1 = t1;
8432 }
8433
8434 /* First, check if we have a multiplication of one signed and one
8435 unsigned operand. */
8436 if (TREE_CODE (treeop1) != INTEGER_CST
8437 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8438 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8439 {
8440 enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8441 this_optab = usmul_widen_optab;
8442 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8443 != CODE_FOR_nothing)
8444 {
8445 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8446 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8447 EXPAND_NORMAL);
8448 else
8449 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8450 EXPAND_NORMAL);
8451 /* op0 and op1 might still be constant, despite the above
8452 != INTEGER_CST check. Handle it. */
8453 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8454 {
8455 op0 = convert_modes (innermode, mode, op0, true);
8456 op1 = convert_modes (innermode, mode, op1, false);
8457 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8458 target, unsignedp));
8459 }
8460 goto binop3;
8461 }
8462 }
8463 /* Check for a multiplication with matching signedness. */
8464 else if ((TREE_CODE (treeop1) == INTEGER_CST
8465 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8466 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8467 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8468 {
8469 tree op0type = TREE_TYPE (treeop0);
8470 enum machine_mode innermode = TYPE_MODE (op0type);
8471 bool zextend_p = TYPE_UNSIGNED (op0type);
8472 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8473 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8474
8475 if (TREE_CODE (treeop0) != INTEGER_CST)
8476 {
8477 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8478 != CODE_FOR_nothing)
8479 {
8480 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8481 EXPAND_NORMAL);
8482 /* op0 and op1 might still be constant, despite the above
8483 != INTEGER_CST check. Handle it. */
8484 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8485 {
8486 widen_mult_const:
8487 op0 = convert_modes (innermode, mode, op0, zextend_p);
8488 op1
8489 = convert_modes (innermode, mode, op1,
8490 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8491 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8492 target,
8493 unsignedp));
8494 }
8495 temp = expand_widening_mult (mode, op0, op1, target,
8496 unsignedp, this_optab);
8497 return REDUCE_BIT_FIELD (temp);
8498 }
8499 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8500 != CODE_FOR_nothing
8501 && innermode == word_mode)
8502 {
8503 rtx htem, hipart;
8504 op0 = expand_normal (treeop0);
8505 if (TREE_CODE (treeop1) == INTEGER_CST)
8506 op1 = convert_modes (innermode, mode,
8507 expand_normal (treeop1),
8508 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8509 else
8510 op1 = expand_normal (treeop1);
8511 /* op0 and op1 might still be constant, despite the above
8512 != INTEGER_CST check. Handle it. */
8513 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8514 goto widen_mult_const;
8515 temp = expand_binop (mode, other_optab, op0, op1, target,
8516 unsignedp, OPTAB_LIB_WIDEN);
8517 hipart = gen_highpart (innermode, temp);
8518 htem = expand_mult_highpart_adjust (innermode, hipart,
8519 op0, op1, hipart,
8520 zextend_p);
8521 if (htem != hipart)
8522 emit_move_insn (hipart, htem);
8523 return REDUCE_BIT_FIELD (temp);
8524 }
8525 }
8526 }
8527 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8528 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8529 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8530 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8531
8532 case FMA_EXPR:
8533 {
8534 optab opt = fma_optab;
8535 gimple def0, def2;
8536
8537 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8538 call. */
8539 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8540 {
8541 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8542 tree call_expr;
8543
8544 gcc_assert (fn != NULL_TREE);
8545 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8546 return expand_builtin (call_expr, target, subtarget, mode, false);
8547 }
8548
8549 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8550 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8551
8552 op0 = op2 = NULL;
8553
8554 if (def0 && def2
8555 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8556 {
8557 opt = fnms_optab;
8558 op0 = expand_normal (gimple_assign_rhs1 (def0));
8559 op2 = expand_normal (gimple_assign_rhs1 (def2));
8560 }
8561 else if (def0
8562 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8563 {
8564 opt = fnma_optab;
8565 op0 = expand_normal (gimple_assign_rhs1 (def0));
8566 }
8567 else if (def2
8568 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8569 {
8570 opt = fms_optab;
8571 op2 = expand_normal (gimple_assign_rhs1 (def2));
8572 }
8573
8574 if (op0 == NULL)
8575 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8576 if (op2 == NULL)
8577 op2 = expand_normal (treeop2);
8578 op1 = expand_normal (treeop1);
8579
8580 return expand_ternary_op (TYPE_MODE (type), opt,
8581 op0, op1, op2, target, 0);
8582 }
8583
8584 case MULT_EXPR:
8585 /* If this is a fixed-point operation, then we cannot use the code
8586 below because "expand_mult" doesn't support sat/no-sat fixed-point
8587 multiplications. */
8588 if (ALL_FIXED_POINT_MODE_P (mode))
8589 goto binop;
8590
8591 /* If first operand is constant, swap them.
8592 Thus the following special case checks need only
8593 check the second operand. */
8594 if (TREE_CODE (treeop0) == INTEGER_CST)
8595 {
8596 tree t1 = treeop0;
8597 treeop0 = treeop1;
8598 treeop1 = t1;
8599 }
8600
8601 /* Attempt to return something suitable for generating an
8602 indexed address, for machines that support that. */
8603
8604 if (modifier == EXPAND_SUM && mode == ptr_mode
8605 && tree_fits_shwi_p (treeop1))
8606 {
8607 tree exp1 = treeop1;
8608
8609 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8610 EXPAND_SUM);
8611
8612 if (!REG_P (op0))
8613 op0 = force_operand (op0, NULL_RTX);
8614 if (!REG_P (op0))
8615 op0 = copy_to_mode_reg (mode, op0);
8616
8617 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8618 gen_int_mode (tree_to_shwi (exp1),
8619 TYPE_MODE (TREE_TYPE (exp1)))));
8620 }
8621
8622 if (modifier == EXPAND_STACK_PARM)
8623 target = 0;
8624
8625 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8626 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8627
8628 case TRUNC_DIV_EXPR:
8629 case FLOOR_DIV_EXPR:
8630 case CEIL_DIV_EXPR:
8631 case ROUND_DIV_EXPR:
8632 case EXACT_DIV_EXPR:
8633 /* If this is a fixed-point operation, then we cannot use the code
8634 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8635 divisions. */
8636 if (ALL_FIXED_POINT_MODE_P (mode))
8637 goto binop;
8638
8639 if (modifier == EXPAND_STACK_PARM)
8640 target = 0;
8641 /* Possible optimization: compute the dividend with EXPAND_SUM
8642 then if the divisor is constant can optimize the case
8643 where some terms of the dividend have coeffs divisible by it. */
8644 expand_operands (treeop0, treeop1,
8645 subtarget, &op0, &op1, EXPAND_NORMAL);
8646 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8647
8648 case RDIV_EXPR:
8649 goto binop;
8650
8651 case MULT_HIGHPART_EXPR:
8652 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8653 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8654 gcc_assert (temp);
8655 return temp;
8656
8657 case TRUNC_MOD_EXPR:
8658 case FLOOR_MOD_EXPR:
8659 case CEIL_MOD_EXPR:
8660 case ROUND_MOD_EXPR:
8661 if (modifier == EXPAND_STACK_PARM)
8662 target = 0;
8663 expand_operands (treeop0, treeop1,
8664 subtarget, &op0, &op1, EXPAND_NORMAL);
8665 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8666
8667 case FIXED_CONVERT_EXPR:
8668 op0 = expand_normal (treeop0);
8669 if (target == 0 || modifier == EXPAND_STACK_PARM)
8670 target = gen_reg_rtx (mode);
8671
8672 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8673 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8674 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8675 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8676 else
8677 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8678 return target;
8679
8680 case FIX_TRUNC_EXPR:
8681 op0 = expand_normal (treeop0);
8682 if (target == 0 || modifier == EXPAND_STACK_PARM)
8683 target = gen_reg_rtx (mode);
8684 expand_fix (target, op0, unsignedp);
8685 return target;
8686
8687 case FLOAT_EXPR:
8688 op0 = expand_normal (treeop0);
8689 if (target == 0 || modifier == EXPAND_STACK_PARM)
8690 target = gen_reg_rtx (mode);
8691 /* expand_float can't figure out what to do if FROM has VOIDmode.
8692 So give it the correct mode. With -O, cse will optimize this. */
8693 if (GET_MODE (op0) == VOIDmode)
8694 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8695 op0);
8696 expand_float (target, op0,
8697 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8698 return target;
8699
8700 case NEGATE_EXPR:
8701 op0 = expand_expr (treeop0, subtarget,
8702 VOIDmode, EXPAND_NORMAL);
8703 if (modifier == EXPAND_STACK_PARM)
8704 target = 0;
8705 temp = expand_unop (mode,
8706 optab_for_tree_code (NEGATE_EXPR, type,
8707 optab_default),
8708 op0, target, 0);
8709 gcc_assert (temp);
8710 return REDUCE_BIT_FIELD (temp);
8711
8712 case ABS_EXPR:
8713 op0 = expand_expr (treeop0, subtarget,
8714 VOIDmode, EXPAND_NORMAL);
8715 if (modifier == EXPAND_STACK_PARM)
8716 target = 0;
8717
8718 /* ABS_EXPR is not valid for complex arguments. */
8719 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8720 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8721
8722 /* Unsigned abs is simply the operand. Testing here means we don't
8723 risk generating incorrect code below. */
8724 if (TYPE_UNSIGNED (type))
8725 return op0;
8726
8727 return expand_abs (mode, op0, target, unsignedp,
8728 safe_from_p (target, treeop0, 1));
8729
8730 case MAX_EXPR:
8731 case MIN_EXPR:
8732 target = original_target;
8733 if (target == 0
8734 || modifier == EXPAND_STACK_PARM
8735 || (MEM_P (target) && MEM_VOLATILE_P (target))
8736 || GET_MODE (target) != mode
8737 || (REG_P (target)
8738 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8739 target = gen_reg_rtx (mode);
8740 expand_operands (treeop0, treeop1,
8741 target, &op0, &op1, EXPAND_NORMAL);
8742
8743 /* First try to do it with a special MIN or MAX instruction.
8744 If that does not win, use a conditional jump to select the proper
8745 value. */
8746 this_optab = optab_for_tree_code (code, type, optab_default);
8747 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8748 OPTAB_WIDEN);
8749 if (temp != 0)
8750 return temp;
8751
8752 /* At this point, a MEM target is no longer useful; we will get better
8753 code without it. */
8754
8755 if (! REG_P (target))
8756 target = gen_reg_rtx (mode);
8757
8758 /* If op1 was placed in target, swap op0 and op1. */
8759 if (target != op0 && target == op1)
8760 {
8761 temp = op0;
8762 op0 = op1;
8763 op1 = temp;
8764 }
8765
8766 /* We generate better code and avoid problems with op1 mentioning
8767 target by forcing op1 into a pseudo if it isn't a constant. */
8768 if (! CONSTANT_P (op1))
8769 op1 = force_reg (mode, op1);
8770
8771 {
8772 enum rtx_code comparison_code;
8773 rtx cmpop1 = op1;
8774
8775 if (code == MAX_EXPR)
8776 comparison_code = unsignedp ? GEU : GE;
8777 else
8778 comparison_code = unsignedp ? LEU : LE;
8779
8780 /* Canonicalize to comparisons against 0. */
8781 if (op1 == const1_rtx)
8782 {
8783 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8784 or (a != 0 ? a : 1) for unsigned.
8785 For MIN we are safe converting (a <= 1 ? a : 1)
8786 into (a <= 0 ? a : 1) */
8787 cmpop1 = const0_rtx;
8788 if (code == MAX_EXPR)
8789 comparison_code = unsignedp ? NE : GT;
8790 }
8791 if (op1 == constm1_rtx && !unsignedp)
8792 {
8793 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8794 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8795 cmpop1 = const0_rtx;
8796 if (code == MIN_EXPR)
8797 comparison_code = LT;
8798 }
8799 #ifdef HAVE_conditional_move
8800 /* Use a conditional move if possible. */
8801 if (can_conditionally_move_p (mode))
8802 {
8803 rtx insn;
8804
8805 start_sequence ();
8806
8807 /* Try to emit the conditional move. */
8808 insn = emit_conditional_move (target, comparison_code,
8809 op0, cmpop1, mode,
8810 op0, op1, mode,
8811 unsignedp);
8812
8813 /* If we could do the conditional move, emit the sequence,
8814 and return. */
8815 if (insn)
8816 {
8817 rtx seq = get_insns ();
8818 end_sequence ();
8819 emit_insn (seq);
8820 return target;
8821 }
8822
8823 /* Otherwise discard the sequence and fall back to code with
8824 branches. */
8825 end_sequence ();
8826 }
8827 #endif
8828 if (target != op0)
8829 emit_move_insn (target, op0);
8830
8831 temp = gen_label_rtx ();
8832 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8833 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8834 -1);
8835 }
8836 emit_move_insn (target, op1);
8837 emit_label (temp);
8838 return target;
8839
8840 case BIT_NOT_EXPR:
8841 op0 = expand_expr (treeop0, subtarget,
8842 VOIDmode, EXPAND_NORMAL);
8843 if (modifier == EXPAND_STACK_PARM)
8844 target = 0;
8845 /* In case we have to reduce the result to bitfield precision
8846 for unsigned bitfield expand this as XOR with a proper constant
8847 instead. */
8848 if (reduce_bit_field && TYPE_UNSIGNED (type))
8849 {
8850 wide_int mask = wi::mask (TYPE_PRECISION (type),
8851 false, GET_MODE_PRECISION (mode));
8852
8853 temp = expand_binop (mode, xor_optab, op0,
8854 immed_wide_int_const (mask, mode),
8855 target, 1, OPTAB_LIB_WIDEN);
8856 }
8857 else
8858 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8859 gcc_assert (temp);
8860 return temp;
8861
8862 /* ??? Can optimize bitwise operations with one arg constant.
8863 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8864 and (a bitwise1 b) bitwise2 b (etc)
8865 but that is probably not worth while. */
8866
8867 case BIT_AND_EXPR:
8868 case BIT_IOR_EXPR:
8869 case BIT_XOR_EXPR:
8870 goto binop;
8871
8872 case LROTATE_EXPR:
8873 case RROTATE_EXPR:
8874 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8875 || (GET_MODE_PRECISION (TYPE_MODE (type))
8876 == TYPE_PRECISION (type)));
8877 /* fall through */
8878
8879 case LSHIFT_EXPR:
8880 case RSHIFT_EXPR:
8881 /* If this is a fixed-point operation, then we cannot use the code
8882 below because "expand_shift" doesn't support sat/no-sat fixed-point
8883 shifts. */
8884 if (ALL_FIXED_POINT_MODE_P (mode))
8885 goto binop;
8886
8887 if (! safe_from_p (subtarget, treeop1, 1))
8888 subtarget = 0;
8889 if (modifier == EXPAND_STACK_PARM)
8890 target = 0;
8891 op0 = expand_expr (treeop0, subtarget,
8892 VOIDmode, EXPAND_NORMAL);
8893 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8894 unsignedp);
8895 if (code == LSHIFT_EXPR)
8896 temp = REDUCE_BIT_FIELD (temp);
8897 return temp;
8898
8899 /* Could determine the answer when only additive constants differ. Also,
8900 the addition of one can be handled by changing the condition. */
8901 case LT_EXPR:
8902 case LE_EXPR:
8903 case GT_EXPR:
8904 case GE_EXPR:
8905 case EQ_EXPR:
8906 case NE_EXPR:
8907 case UNORDERED_EXPR:
8908 case ORDERED_EXPR:
8909 case UNLT_EXPR:
8910 case UNLE_EXPR:
8911 case UNGT_EXPR:
8912 case UNGE_EXPR:
8913 case UNEQ_EXPR:
8914 case LTGT_EXPR:
8915 temp = do_store_flag (ops,
8916 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8917 tmode != VOIDmode ? tmode : mode);
8918 if (temp)
8919 return temp;
8920
8921 /* Use a compare and a jump for BLKmode comparisons, or for function
8922 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8923
8924 if ((target == 0
8925 || modifier == EXPAND_STACK_PARM
8926 || ! safe_from_p (target, treeop0, 1)
8927 || ! safe_from_p (target, treeop1, 1)
8928 /* Make sure we don't have a hard reg (such as function's return
8929 value) live across basic blocks, if not optimizing. */
8930 || (!optimize && REG_P (target)
8931 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8932 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8933
8934 emit_move_insn (target, const0_rtx);
8935
8936 op1 = gen_label_rtx ();
8937 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8938
8939 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8940 emit_move_insn (target, constm1_rtx);
8941 else
8942 emit_move_insn (target, const1_rtx);
8943
8944 emit_label (op1);
8945 return target;
8946
8947 case COMPLEX_EXPR:
8948 /* Get the rtx code of the operands. */
8949 op0 = expand_normal (treeop0);
8950 op1 = expand_normal (treeop1);
8951
8952 if (!target)
8953 target = gen_reg_rtx (TYPE_MODE (type));
8954 else
8955 /* If target overlaps with op1, then either we need to force
8956 op1 into a pseudo (if target also overlaps with op0),
8957 or write the complex parts in reverse order. */
8958 switch (GET_CODE (target))
8959 {
8960 case CONCAT:
8961 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
8962 {
8963 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
8964 {
8965 complex_expr_force_op1:
8966 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
8967 emit_move_insn (temp, op1);
8968 op1 = temp;
8969 break;
8970 }
8971 complex_expr_swap_order:
8972 /* Move the imaginary (op1) and real (op0) parts to their
8973 location. */
8974 write_complex_part (target, op1, true);
8975 write_complex_part (target, op0, false);
8976
8977 return target;
8978 }
8979 break;
8980 case MEM:
8981 temp = adjust_address_nv (target,
8982 GET_MODE_INNER (GET_MODE (target)), 0);
8983 if (reg_overlap_mentioned_p (temp, op1))
8984 {
8985 enum machine_mode imode = GET_MODE_INNER (GET_MODE (target));
8986 temp = adjust_address_nv (target, imode,
8987 GET_MODE_SIZE (imode));
8988 if (reg_overlap_mentioned_p (temp, op0))
8989 goto complex_expr_force_op1;
8990 goto complex_expr_swap_order;
8991 }
8992 break;
8993 default:
8994 if (reg_overlap_mentioned_p (target, op1))
8995 {
8996 if (reg_overlap_mentioned_p (target, op0))
8997 goto complex_expr_force_op1;
8998 goto complex_expr_swap_order;
8999 }
9000 break;
9001 }
9002
9003 /* Move the real (op0) and imaginary (op1) parts to their location. */
9004 write_complex_part (target, op0, false);
9005 write_complex_part (target, op1, true);
9006
9007 return target;
9008
9009 case WIDEN_SUM_EXPR:
9010 {
9011 tree oprnd0 = treeop0;
9012 tree oprnd1 = treeop1;
9013
9014 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9015 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9016 target, unsignedp);
9017 return target;
9018 }
9019
9020 case REDUC_MAX_EXPR:
9021 case REDUC_MIN_EXPR:
9022 case REDUC_PLUS_EXPR:
9023 {
9024 op0 = expand_normal (treeop0);
9025 this_optab = optab_for_tree_code (code, type, optab_default);
9026 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
9027 gcc_assert (temp);
9028 return temp;
9029 }
9030
9031 case VEC_LSHIFT_EXPR:
9032 case VEC_RSHIFT_EXPR:
9033 {
9034 target = expand_vec_shift_expr (ops, target);
9035 return target;
9036 }
9037
9038 case VEC_UNPACK_HI_EXPR:
9039 case VEC_UNPACK_LO_EXPR:
9040 {
9041 op0 = expand_normal (treeop0);
9042 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9043 target, unsignedp);
9044 gcc_assert (temp);
9045 return temp;
9046 }
9047
9048 case VEC_UNPACK_FLOAT_HI_EXPR:
9049 case VEC_UNPACK_FLOAT_LO_EXPR:
9050 {
9051 op0 = expand_normal (treeop0);
9052 /* The signedness is determined from input operand. */
9053 temp = expand_widen_pattern_expr
9054 (ops, op0, NULL_RTX, NULL_RTX,
9055 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9056
9057 gcc_assert (temp);
9058 return temp;
9059 }
9060
9061 case VEC_WIDEN_MULT_HI_EXPR:
9062 case VEC_WIDEN_MULT_LO_EXPR:
9063 case VEC_WIDEN_MULT_EVEN_EXPR:
9064 case VEC_WIDEN_MULT_ODD_EXPR:
9065 case VEC_WIDEN_LSHIFT_HI_EXPR:
9066 case VEC_WIDEN_LSHIFT_LO_EXPR:
9067 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9068 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9069 target, unsignedp);
9070 gcc_assert (target);
9071 return target;
9072
9073 case VEC_PACK_TRUNC_EXPR:
9074 case VEC_PACK_SAT_EXPR:
9075 case VEC_PACK_FIX_TRUNC_EXPR:
9076 mode = TYPE_MODE (TREE_TYPE (treeop0));
9077 goto binop;
9078
9079 case VEC_PERM_EXPR:
9080 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9081 op2 = expand_normal (treeop2);
9082
9083 /* Careful here: if the target doesn't support integral vector modes,
9084 a constant selection vector could wind up smooshed into a normal
9085 integral constant. */
9086 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9087 {
9088 tree sel_type = TREE_TYPE (treeop2);
9089 enum machine_mode vmode
9090 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9091 TYPE_VECTOR_SUBPARTS (sel_type));
9092 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9093 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9094 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9095 }
9096 else
9097 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9098
9099 temp = expand_vec_perm (mode, op0, op1, op2, target);
9100 gcc_assert (temp);
9101 return temp;
9102
9103 case DOT_PROD_EXPR:
9104 {
9105 tree oprnd0 = treeop0;
9106 tree oprnd1 = treeop1;
9107 tree oprnd2 = treeop2;
9108 rtx op2;
9109
9110 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9111 op2 = expand_normal (oprnd2);
9112 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9113 target, unsignedp);
9114 return target;
9115 }
9116
9117 case SAD_EXPR:
9118 {
9119 tree oprnd0 = treeop0;
9120 tree oprnd1 = treeop1;
9121 tree oprnd2 = treeop2;
9122 rtx op2;
9123
9124 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9125 op2 = expand_normal (oprnd2);
9126 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9127 target, unsignedp);
9128 return target;
9129 }
9130
9131 case REALIGN_LOAD_EXPR:
9132 {
9133 tree oprnd0 = treeop0;
9134 tree oprnd1 = treeop1;
9135 tree oprnd2 = treeop2;
9136 rtx op2;
9137
9138 this_optab = optab_for_tree_code (code, type, optab_default);
9139 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9140 op2 = expand_normal (oprnd2);
9141 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9142 target, unsignedp);
9143 gcc_assert (temp);
9144 return temp;
9145 }
9146
9147 case COND_EXPR:
9148 /* A COND_EXPR with its type being VOID_TYPE represents a
9149 conditional jump and is handled in
9150 expand_gimple_cond_expr. */
9151 gcc_assert (!VOID_TYPE_P (type));
9152
9153 /* Note that COND_EXPRs whose type is a structure or union
9154 are required to be constructed to contain assignments of
9155 a temporary variable, so that we can evaluate them here
9156 for side effect only. If type is void, we must do likewise. */
9157
9158 gcc_assert (!TREE_ADDRESSABLE (type)
9159 && !ignore
9160 && TREE_TYPE (treeop1) != void_type_node
9161 && TREE_TYPE (treeop2) != void_type_node);
9162
9163 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9164 if (temp)
9165 return temp;
9166
9167 /* If we are not to produce a result, we have no target. Otherwise,
9168 if a target was specified use it; it will not be used as an
9169 intermediate target unless it is safe. If no target, use a
9170 temporary. */
9171
9172 if (modifier != EXPAND_STACK_PARM
9173 && original_target
9174 && safe_from_p (original_target, treeop0, 1)
9175 && GET_MODE (original_target) == mode
9176 && !MEM_P (original_target))
9177 temp = original_target;
9178 else
9179 temp = assign_temp (type, 0, 1);
9180
9181 do_pending_stack_adjust ();
9182 NO_DEFER_POP;
9183 op0 = gen_label_rtx ();
9184 op1 = gen_label_rtx ();
9185 jumpifnot (treeop0, op0, -1);
9186 store_expr (treeop1, temp,
9187 modifier == EXPAND_STACK_PARM,
9188 false);
9189
9190 emit_jump_insn (gen_jump (op1));
9191 emit_barrier ();
9192 emit_label (op0);
9193 store_expr (treeop2, temp,
9194 modifier == EXPAND_STACK_PARM,
9195 false);
9196
9197 emit_label (op1);
9198 OK_DEFER_POP;
9199 return temp;
9200
9201 case VEC_COND_EXPR:
9202 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9203 return target;
9204
9205 default:
9206 gcc_unreachable ();
9207 }
9208
9209 /* Here to do an ordinary binary operator. */
9210 binop:
9211 expand_operands (treeop0, treeop1,
9212 subtarget, &op0, &op1, EXPAND_NORMAL);
9213 binop2:
9214 this_optab = optab_for_tree_code (code, type, optab_default);
9215 binop3:
9216 if (modifier == EXPAND_STACK_PARM)
9217 target = 0;
9218 temp = expand_binop (mode, this_optab, op0, op1, target,
9219 unsignedp, OPTAB_LIB_WIDEN);
9220 gcc_assert (temp);
9221 /* Bitwise operations do not need bitfield reduction as we expect their
9222 operands being properly truncated. */
9223 if (code == BIT_XOR_EXPR
9224 || code == BIT_AND_EXPR
9225 || code == BIT_IOR_EXPR)
9226 return temp;
9227 return REDUCE_BIT_FIELD (temp);
9228 }
9229 #undef REDUCE_BIT_FIELD
9230
9231 /* Return TRUE if value in SSA is zero and sign extended for wider mode MODE
9232 using value range information stored. Return FALSE otherwise.
9233
9234 This is used to check if SUBREG is zero and sign extended and to set
9235 promoted mode SRP_SIGNED_AND_UNSIGNED to SUBREG. */
9236
9237 bool
9238 promoted_for_signed_and_unsigned_p (tree ssa, enum machine_mode mode)
9239 {
9240 wide_int min, max;
9241
9242 if (ssa == NULL_TREE
9243 || TREE_CODE (ssa) != SSA_NAME
9244 || !INTEGRAL_TYPE_P (TREE_TYPE (ssa))
9245 || (TYPE_PRECISION (TREE_TYPE (ssa)) != GET_MODE_PRECISION (mode)))
9246 return false;
9247
9248 /* Return FALSE if value_range is not recorded for SSA. */
9249 if (get_range_info (ssa, &min, &max) != VR_RANGE)
9250 return false;
9251
9252 /* Return true (to set SRP_SIGNED_AND_UNSIGNED to SUBREG) if MSB of the
9253 smaller mode is not set (i.e. MSB of ssa is not set). */
9254 if (!wi::neg_p (min, SIGNED) && !wi::neg_p(max, SIGNED))
9255 return true;
9256 else
9257 return false;
9258
9259 }
9260
9261 /* Return TRUE if expression STMT is suitable for replacement.
9262 Never consider memory loads as replaceable, because those don't ever lead
9263 into constant expressions. */
9264
9265 static bool
9266 stmt_is_replaceable_p (gimple stmt)
9267 {
9268 if (ssa_is_replaceable_p (stmt))
9269 {
9270 /* Don't move around loads. */
9271 if (!gimple_assign_single_p (stmt)
9272 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9273 return true;
9274 }
9275 return false;
9276 }
9277
9278 rtx
9279 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
9280 enum expand_modifier modifier, rtx *alt_rtl,
9281 bool inner_reference_p)
9282 {
9283 rtx op0, op1, temp, decl_rtl;
9284 tree type;
9285 int unsignedp;
9286 enum machine_mode mode;
9287 enum tree_code code = TREE_CODE (exp);
9288 rtx subtarget, original_target;
9289 int ignore;
9290 tree context;
9291 bool reduce_bit_field;
9292 location_t loc = EXPR_LOCATION (exp);
9293 struct separate_ops ops;
9294 tree treeop0, treeop1, treeop2;
9295 tree ssa_name = NULL_TREE;
9296 gimple g;
9297
9298 type = TREE_TYPE (exp);
9299 mode = TYPE_MODE (type);
9300 unsignedp = TYPE_UNSIGNED (type);
9301
9302 treeop0 = treeop1 = treeop2 = NULL_TREE;
9303 if (!VL_EXP_CLASS_P (exp))
9304 switch (TREE_CODE_LENGTH (code))
9305 {
9306 default:
9307 case 3: treeop2 = TREE_OPERAND (exp, 2);
9308 case 2: treeop1 = TREE_OPERAND (exp, 1);
9309 case 1: treeop0 = TREE_OPERAND (exp, 0);
9310 case 0: break;
9311 }
9312 ops.code = code;
9313 ops.type = type;
9314 ops.op0 = treeop0;
9315 ops.op1 = treeop1;
9316 ops.op2 = treeop2;
9317 ops.location = loc;
9318
9319 ignore = (target == const0_rtx
9320 || ((CONVERT_EXPR_CODE_P (code)
9321 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9322 && TREE_CODE (type) == VOID_TYPE));
9323
9324 /* An operation in what may be a bit-field type needs the
9325 result to be reduced to the precision of the bit-field type,
9326 which is narrower than that of the type's mode. */
9327 reduce_bit_field = (!ignore
9328 && INTEGRAL_TYPE_P (type)
9329 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9330
9331 /* If we are going to ignore this result, we need only do something
9332 if there is a side-effect somewhere in the expression. If there
9333 is, short-circuit the most common cases here. Note that we must
9334 not call expand_expr with anything but const0_rtx in case this
9335 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9336
9337 if (ignore)
9338 {
9339 if (! TREE_SIDE_EFFECTS (exp))
9340 return const0_rtx;
9341
9342 /* Ensure we reference a volatile object even if value is ignored, but
9343 don't do this if all we are doing is taking its address. */
9344 if (TREE_THIS_VOLATILE (exp)
9345 && TREE_CODE (exp) != FUNCTION_DECL
9346 && mode != VOIDmode && mode != BLKmode
9347 && modifier != EXPAND_CONST_ADDRESS)
9348 {
9349 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9350 if (MEM_P (temp))
9351 copy_to_reg (temp);
9352 return const0_rtx;
9353 }
9354
9355 if (TREE_CODE_CLASS (code) == tcc_unary
9356 || code == BIT_FIELD_REF
9357 || code == COMPONENT_REF
9358 || code == INDIRECT_REF)
9359 return expand_expr (treeop0, const0_rtx, VOIDmode,
9360 modifier);
9361
9362 else if (TREE_CODE_CLASS (code) == tcc_binary
9363 || TREE_CODE_CLASS (code) == tcc_comparison
9364 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9365 {
9366 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9367 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9368 return const0_rtx;
9369 }
9370
9371 target = 0;
9372 }
9373
9374 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9375 target = 0;
9376
9377 /* Use subtarget as the target for operand 0 of a binary operation. */
9378 subtarget = get_subtarget (target);
9379 original_target = target;
9380
9381 switch (code)
9382 {
9383 case LABEL_DECL:
9384 {
9385 tree function = decl_function_context (exp);
9386
9387 temp = label_rtx (exp);
9388 temp = gen_rtx_LABEL_REF (Pmode, temp);
9389
9390 if (function != current_function_decl
9391 && function != 0)
9392 LABEL_REF_NONLOCAL_P (temp) = 1;
9393
9394 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9395 return temp;
9396 }
9397
9398 case SSA_NAME:
9399 /* ??? ivopts calls expander, without any preparation from
9400 out-of-ssa. So fake instructions as if this was an access to the
9401 base variable. This unnecessarily allocates a pseudo, see how we can
9402 reuse it, if partition base vars have it set already. */
9403 if (!currently_expanding_to_rtl)
9404 {
9405 tree var = SSA_NAME_VAR (exp);
9406 if (var && DECL_RTL_SET_P (var))
9407 return DECL_RTL (var);
9408 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9409 LAST_VIRTUAL_REGISTER + 1);
9410 }
9411
9412 g = get_gimple_for_ssa_name (exp);
9413 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9414 if (g == NULL
9415 && modifier == EXPAND_INITIALIZER
9416 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9417 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9418 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9419 g = SSA_NAME_DEF_STMT (exp);
9420 if (g)
9421 {
9422 rtx r;
9423 ops.code = gimple_assign_rhs_code (g);
9424 switch (get_gimple_rhs_class (ops.code))
9425 {
9426 case GIMPLE_TERNARY_RHS:
9427 ops.op2 = gimple_assign_rhs3 (g);
9428 /* Fallthru */
9429 case GIMPLE_BINARY_RHS:
9430 ops.op1 = gimple_assign_rhs2 (g);
9431 /* Fallthru */
9432 case GIMPLE_UNARY_RHS:
9433 ops.op0 = gimple_assign_rhs1 (g);
9434 ops.type = TREE_TYPE (gimple_assign_lhs (g));
9435 ops.location = gimple_location (g);
9436 r = expand_expr_real_2 (&ops, target, tmode, modifier);
9437 break;
9438 case GIMPLE_SINGLE_RHS:
9439 {
9440 location_t saved_loc = curr_insn_location ();
9441 set_curr_insn_location (gimple_location (g));
9442 r = expand_expr_real (gimple_assign_rhs1 (g), target,
9443 tmode, modifier, NULL, inner_reference_p);
9444 set_curr_insn_location (saved_loc);
9445 break;
9446 }
9447 default:
9448 gcc_unreachable ();
9449 }
9450 if (REG_P (r) && !REG_EXPR (r))
9451 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9452 return r;
9453 }
9454
9455 ssa_name = exp;
9456 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9457 exp = SSA_NAME_VAR (ssa_name);
9458 goto expand_decl_rtl;
9459
9460 case PARM_DECL:
9461 case VAR_DECL:
9462 /* If a static var's type was incomplete when the decl was written,
9463 but the type is complete now, lay out the decl now. */
9464 if (DECL_SIZE (exp) == 0
9465 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9466 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9467 layout_decl (exp, 0);
9468
9469 /* ... fall through ... */
9470
9471 case FUNCTION_DECL:
9472 case RESULT_DECL:
9473 decl_rtl = DECL_RTL (exp);
9474 expand_decl_rtl:
9475 gcc_assert (decl_rtl);
9476 decl_rtl = copy_rtx (decl_rtl);
9477 /* Record writes to register variables. */
9478 if (modifier == EXPAND_WRITE
9479 && REG_P (decl_rtl)
9480 && HARD_REGISTER_P (decl_rtl))
9481 add_to_hard_reg_set (&crtl->asm_clobbers,
9482 GET_MODE (decl_rtl), REGNO (decl_rtl));
9483
9484 /* Ensure variable marked as used even if it doesn't go through
9485 a parser. If it hasn't be used yet, write out an external
9486 definition. */
9487 TREE_USED (exp) = 1;
9488
9489 /* Show we haven't gotten RTL for this yet. */
9490 temp = 0;
9491
9492 /* Variables inherited from containing functions should have
9493 been lowered by this point. */
9494 context = decl_function_context (exp);
9495 gcc_assert (SCOPE_FILE_SCOPE_P (context)
9496 || context == current_function_decl
9497 || TREE_STATIC (exp)
9498 || DECL_EXTERNAL (exp)
9499 /* ??? C++ creates functions that are not TREE_STATIC. */
9500 || TREE_CODE (exp) == FUNCTION_DECL);
9501
9502 /* This is the case of an array whose size is to be determined
9503 from its initializer, while the initializer is still being parsed.
9504 ??? We aren't parsing while expanding anymore. */
9505
9506 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9507 temp = validize_mem (decl_rtl);
9508
9509 /* If DECL_RTL is memory, we are in the normal case and the
9510 address is not valid, get the address into a register. */
9511
9512 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9513 {
9514 if (alt_rtl)
9515 *alt_rtl = decl_rtl;
9516 decl_rtl = use_anchored_address (decl_rtl);
9517 if (modifier != EXPAND_CONST_ADDRESS
9518 && modifier != EXPAND_SUM
9519 && !memory_address_addr_space_p (DECL_MODE (exp),
9520 XEXP (decl_rtl, 0),
9521 MEM_ADDR_SPACE (decl_rtl)))
9522 temp = replace_equiv_address (decl_rtl,
9523 copy_rtx (XEXP (decl_rtl, 0)));
9524 }
9525
9526 /* If we got something, return it. But first, set the alignment
9527 if the address is a register. */
9528 if (temp != 0)
9529 {
9530 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9531 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9532
9533 return temp;
9534 }
9535
9536 /* If the mode of DECL_RTL does not match that of the decl,
9537 there are two cases: we are dealing with a BLKmode value
9538 that is returned in a register, or we are dealing with
9539 a promoted value. In the latter case, return a SUBREG
9540 of the wanted mode, but mark it so that we know that it
9541 was already extended. */
9542 if (REG_P (decl_rtl)
9543 && DECL_MODE (exp) != BLKmode
9544 && GET_MODE (decl_rtl) != DECL_MODE (exp))
9545 {
9546 enum machine_mode pmode;
9547
9548 /* Get the signedness to be used for this variable. Ensure we get
9549 the same mode we got when the variable was declared. */
9550 if (code == SSA_NAME
9551 && (g = SSA_NAME_DEF_STMT (ssa_name))
9552 && gimple_code (g) == GIMPLE_CALL
9553 && !gimple_call_internal_p (g))
9554 pmode = promote_function_mode (type, mode, &unsignedp,
9555 gimple_call_fntype (g),
9556 2);
9557 else
9558 pmode = promote_decl_mode (exp, &unsignedp);
9559 gcc_assert (GET_MODE (decl_rtl) == pmode);
9560
9561 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9562 SUBREG_PROMOTED_VAR_P (temp) = 1;
9563 if (promoted_for_signed_and_unsigned_p (ssa_name, mode))
9564 SUBREG_PROMOTED_SET (temp, SRP_SIGNED_AND_UNSIGNED);
9565 else
9566 SUBREG_PROMOTED_SET (temp, unsignedp);
9567 return temp;
9568 }
9569
9570 return decl_rtl;
9571
9572 case INTEGER_CST:
9573 /* Given that TYPE_PRECISION (type) is not always equal to
9574 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9575 the former to the latter according to the signedness of the
9576 type. */
9577 temp = immed_wide_int_const (wide_int::from
9578 (exp,
9579 GET_MODE_PRECISION (TYPE_MODE (type)),
9580 TYPE_SIGN (type)),
9581 TYPE_MODE (type));
9582 return temp;
9583
9584 case VECTOR_CST:
9585 {
9586 tree tmp = NULL_TREE;
9587 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9588 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9589 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9590 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9591 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9592 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9593 return const_vector_from_tree (exp);
9594 if (GET_MODE_CLASS (mode) == MODE_INT)
9595 {
9596 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9597 if (type_for_mode)
9598 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9599 }
9600 if (!tmp)
9601 {
9602 vec<constructor_elt, va_gc> *v;
9603 unsigned i;
9604 vec_alloc (v, VECTOR_CST_NELTS (exp));
9605 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9606 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9607 tmp = build_constructor (type, v);
9608 }
9609 return expand_expr (tmp, ignore ? const0_rtx : target,
9610 tmode, modifier);
9611 }
9612
9613 case CONST_DECL:
9614 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9615
9616 case REAL_CST:
9617 /* If optimized, generate immediate CONST_DOUBLE
9618 which will be turned into memory by reload if necessary.
9619
9620 We used to force a register so that loop.c could see it. But
9621 this does not allow gen_* patterns to perform optimizations with
9622 the constants. It also produces two insns in cases like "x = 1.0;".
9623 On most machines, floating-point constants are not permitted in
9624 many insns, so we'd end up copying it to a register in any case.
9625
9626 Now, we do the copying in expand_binop, if appropriate. */
9627 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9628 TYPE_MODE (TREE_TYPE (exp)));
9629
9630 case FIXED_CST:
9631 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9632 TYPE_MODE (TREE_TYPE (exp)));
9633
9634 case COMPLEX_CST:
9635 /* Handle evaluating a complex constant in a CONCAT target. */
9636 if (original_target && GET_CODE (original_target) == CONCAT)
9637 {
9638 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9639 rtx rtarg, itarg;
9640
9641 rtarg = XEXP (original_target, 0);
9642 itarg = XEXP (original_target, 1);
9643
9644 /* Move the real and imaginary parts separately. */
9645 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9646 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9647
9648 if (op0 != rtarg)
9649 emit_move_insn (rtarg, op0);
9650 if (op1 != itarg)
9651 emit_move_insn (itarg, op1);
9652
9653 return original_target;
9654 }
9655
9656 /* ... fall through ... */
9657
9658 case STRING_CST:
9659 temp = expand_expr_constant (exp, 1, modifier);
9660
9661 /* temp contains a constant address.
9662 On RISC machines where a constant address isn't valid,
9663 make some insns to get that address into a register. */
9664 if (modifier != EXPAND_CONST_ADDRESS
9665 && modifier != EXPAND_INITIALIZER
9666 && modifier != EXPAND_SUM
9667 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9668 MEM_ADDR_SPACE (temp)))
9669 return replace_equiv_address (temp,
9670 copy_rtx (XEXP (temp, 0)));
9671 return temp;
9672
9673 case SAVE_EXPR:
9674 {
9675 tree val = treeop0;
9676 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
9677 inner_reference_p);
9678
9679 if (!SAVE_EXPR_RESOLVED_P (exp))
9680 {
9681 /* We can indeed still hit this case, typically via builtin
9682 expanders calling save_expr immediately before expanding
9683 something. Assume this means that we only have to deal
9684 with non-BLKmode values. */
9685 gcc_assert (GET_MODE (ret) != BLKmode);
9686
9687 val = build_decl (curr_insn_location (),
9688 VAR_DECL, NULL, TREE_TYPE (exp));
9689 DECL_ARTIFICIAL (val) = 1;
9690 DECL_IGNORED_P (val) = 1;
9691 treeop0 = val;
9692 TREE_OPERAND (exp, 0) = treeop0;
9693 SAVE_EXPR_RESOLVED_P (exp) = 1;
9694
9695 if (!CONSTANT_P (ret))
9696 ret = copy_to_reg (ret);
9697 SET_DECL_RTL (val, ret);
9698 }
9699
9700 return ret;
9701 }
9702
9703
9704 case CONSTRUCTOR:
9705 /* If we don't need the result, just ensure we evaluate any
9706 subexpressions. */
9707 if (ignore)
9708 {
9709 unsigned HOST_WIDE_INT idx;
9710 tree value;
9711
9712 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9713 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9714
9715 return const0_rtx;
9716 }
9717
9718 return expand_constructor (exp, target, modifier, false);
9719
9720 case TARGET_MEM_REF:
9721 {
9722 addr_space_t as
9723 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9724 enum insn_code icode;
9725 unsigned int align;
9726
9727 op0 = addr_for_mem_ref (exp, as, true);
9728 op0 = memory_address_addr_space (mode, op0, as);
9729 temp = gen_rtx_MEM (mode, op0);
9730 set_mem_attributes (temp, exp, 0);
9731 set_mem_addr_space (temp, as);
9732 align = get_object_alignment (exp);
9733 if (modifier != EXPAND_WRITE
9734 && modifier != EXPAND_MEMORY
9735 && mode != BLKmode
9736 && align < GET_MODE_ALIGNMENT (mode)
9737 /* If the target does not have special handling for unaligned
9738 loads of mode then it can use regular moves for them. */
9739 && ((icode = optab_handler (movmisalign_optab, mode))
9740 != CODE_FOR_nothing))
9741 {
9742 struct expand_operand ops[2];
9743
9744 /* We've already validated the memory, and we're creating a
9745 new pseudo destination. The predicates really can't fail,
9746 nor can the generator. */
9747 create_output_operand (&ops[0], NULL_RTX, mode);
9748 create_fixed_operand (&ops[1], temp);
9749 expand_insn (icode, 2, ops);
9750 temp = ops[0].value;
9751 }
9752 return temp;
9753 }
9754
9755 case MEM_REF:
9756 {
9757 addr_space_t as
9758 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9759 enum machine_mode address_mode;
9760 tree base = TREE_OPERAND (exp, 0);
9761 gimple def_stmt;
9762 enum insn_code icode;
9763 unsigned align;
9764 /* Handle expansion of non-aliased memory with non-BLKmode. That
9765 might end up in a register. */
9766 if (mem_ref_refers_to_non_mem_p (exp))
9767 {
9768 HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr ();
9769 base = TREE_OPERAND (base, 0);
9770 if (offset == 0
9771 && tree_fits_uhwi_p (TYPE_SIZE (type))
9772 && (GET_MODE_BITSIZE (DECL_MODE (base))
9773 == tree_to_uhwi (TYPE_SIZE (type))))
9774 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9775 target, tmode, modifier);
9776 if (TYPE_MODE (type) == BLKmode)
9777 {
9778 temp = assign_stack_temp (DECL_MODE (base),
9779 GET_MODE_SIZE (DECL_MODE (base)));
9780 store_expr (base, temp, 0, false);
9781 temp = adjust_address (temp, BLKmode, offset);
9782 set_mem_size (temp, int_size_in_bytes (type));
9783 return temp;
9784 }
9785 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9786 bitsize_int (offset * BITS_PER_UNIT));
9787 return expand_expr (exp, target, tmode, modifier);
9788 }
9789 address_mode = targetm.addr_space.address_mode (as);
9790 base = TREE_OPERAND (exp, 0);
9791 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9792 {
9793 tree mask = gimple_assign_rhs2 (def_stmt);
9794 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9795 gimple_assign_rhs1 (def_stmt), mask);
9796 TREE_OPERAND (exp, 0) = base;
9797 }
9798 align = get_object_alignment (exp);
9799 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9800 op0 = memory_address_addr_space (mode, op0, as);
9801 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9802 {
9803 rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
9804 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9805 op0 = memory_address_addr_space (mode, op0, as);
9806 }
9807 temp = gen_rtx_MEM (mode, op0);
9808 set_mem_attributes (temp, exp, 0);
9809 set_mem_addr_space (temp, as);
9810 if (TREE_THIS_VOLATILE (exp))
9811 MEM_VOLATILE_P (temp) = 1;
9812 if (modifier != EXPAND_WRITE
9813 && modifier != EXPAND_MEMORY
9814 && !inner_reference_p
9815 && mode != BLKmode
9816 && align < GET_MODE_ALIGNMENT (mode))
9817 {
9818 if ((icode = optab_handler (movmisalign_optab, mode))
9819 != CODE_FOR_nothing)
9820 {
9821 struct expand_operand ops[2];
9822
9823 /* We've already validated the memory, and we're creating a
9824 new pseudo destination. The predicates really can't fail,
9825 nor can the generator. */
9826 create_output_operand (&ops[0], NULL_RTX, mode);
9827 create_fixed_operand (&ops[1], temp);
9828 expand_insn (icode, 2, ops);
9829 temp = ops[0].value;
9830 }
9831 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9832 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9833 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9834 (modifier == EXPAND_STACK_PARM
9835 ? NULL_RTX : target),
9836 mode, mode);
9837 }
9838 return temp;
9839 }
9840
9841 case ARRAY_REF:
9842
9843 {
9844 tree array = treeop0;
9845 tree index = treeop1;
9846 tree init;
9847
9848 /* Fold an expression like: "foo"[2].
9849 This is not done in fold so it won't happen inside &.
9850 Don't fold if this is for wide characters since it's too
9851 difficult to do correctly and this is a very rare case. */
9852
9853 if (modifier != EXPAND_CONST_ADDRESS
9854 && modifier != EXPAND_INITIALIZER
9855 && modifier != EXPAND_MEMORY)
9856 {
9857 tree t = fold_read_from_constant_string (exp);
9858
9859 if (t)
9860 return expand_expr (t, target, tmode, modifier);
9861 }
9862
9863 /* If this is a constant index into a constant array,
9864 just get the value from the array. Handle both the cases when
9865 we have an explicit constructor and when our operand is a variable
9866 that was declared const. */
9867
9868 if (modifier != EXPAND_CONST_ADDRESS
9869 && modifier != EXPAND_INITIALIZER
9870 && modifier != EXPAND_MEMORY
9871 && TREE_CODE (array) == CONSTRUCTOR
9872 && ! TREE_SIDE_EFFECTS (array)
9873 && TREE_CODE (index) == INTEGER_CST)
9874 {
9875 unsigned HOST_WIDE_INT ix;
9876 tree field, value;
9877
9878 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9879 field, value)
9880 if (tree_int_cst_equal (field, index))
9881 {
9882 if (!TREE_SIDE_EFFECTS (value))
9883 return expand_expr (fold (value), target, tmode, modifier);
9884 break;
9885 }
9886 }
9887
9888 else if (optimize >= 1
9889 && modifier != EXPAND_CONST_ADDRESS
9890 && modifier != EXPAND_INITIALIZER
9891 && modifier != EXPAND_MEMORY
9892 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9893 && TREE_CODE (index) == INTEGER_CST
9894 && (TREE_CODE (array) == VAR_DECL
9895 || TREE_CODE (array) == CONST_DECL)
9896 && (init = ctor_for_folding (array)) != error_mark_node)
9897 {
9898 if (init == NULL_TREE)
9899 {
9900 tree value = build_zero_cst (type);
9901 if (TREE_CODE (value) == CONSTRUCTOR)
9902 {
9903 /* If VALUE is a CONSTRUCTOR, this optimization is only
9904 useful if this doesn't store the CONSTRUCTOR into
9905 memory. If it does, it is more efficient to just
9906 load the data from the array directly. */
9907 rtx ret = expand_constructor (value, target,
9908 modifier, true);
9909 if (ret == NULL_RTX)
9910 value = NULL_TREE;
9911 }
9912
9913 if (value)
9914 return expand_expr (value, target, tmode, modifier);
9915 }
9916 else if (TREE_CODE (init) == CONSTRUCTOR)
9917 {
9918 unsigned HOST_WIDE_INT ix;
9919 tree field, value;
9920
9921 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
9922 field, value)
9923 if (tree_int_cst_equal (field, index))
9924 {
9925 if (TREE_SIDE_EFFECTS (value))
9926 break;
9927
9928 if (TREE_CODE (value) == CONSTRUCTOR)
9929 {
9930 /* If VALUE is a CONSTRUCTOR, this
9931 optimization is only useful if
9932 this doesn't store the CONSTRUCTOR
9933 into memory. If it does, it is more
9934 efficient to just load the data from
9935 the array directly. */
9936 rtx ret = expand_constructor (value, target,
9937 modifier, true);
9938 if (ret == NULL_RTX)
9939 break;
9940 }
9941
9942 return
9943 expand_expr (fold (value), target, tmode, modifier);
9944 }
9945 }
9946 else if (TREE_CODE (init) == STRING_CST)
9947 {
9948 tree low_bound = array_ref_low_bound (exp);
9949 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
9950
9951 /* Optimize the special case of a zero lower bound.
9952
9953 We convert the lower bound to sizetype to avoid problems
9954 with constant folding. E.g. suppose the lower bound is
9955 1 and its mode is QI. Without the conversion
9956 (ARRAY + (INDEX - (unsigned char)1))
9957 becomes
9958 (ARRAY + (-(unsigned char)1) + INDEX)
9959 which becomes
9960 (ARRAY + 255 + INDEX). Oops! */
9961 if (!integer_zerop (low_bound))
9962 index1 = size_diffop_loc (loc, index1,
9963 fold_convert_loc (loc, sizetype,
9964 low_bound));
9965
9966 if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
9967 {
9968 tree type = TREE_TYPE (TREE_TYPE (init));
9969 enum machine_mode mode = TYPE_MODE (type);
9970
9971 if (GET_MODE_CLASS (mode) == MODE_INT
9972 && GET_MODE_SIZE (mode) == 1)
9973 return gen_int_mode (TREE_STRING_POINTER (init)
9974 [TREE_INT_CST_LOW (index1)],
9975 mode);
9976 }
9977 }
9978 }
9979 }
9980 goto normal_inner_ref;
9981
9982 case COMPONENT_REF:
9983 /* If the operand is a CONSTRUCTOR, we can just extract the
9984 appropriate field if it is present. */
9985 if (TREE_CODE (treeop0) == CONSTRUCTOR)
9986 {
9987 unsigned HOST_WIDE_INT idx;
9988 tree field, value;
9989
9990 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
9991 idx, field, value)
9992 if (field == treeop1
9993 /* We can normally use the value of the field in the
9994 CONSTRUCTOR. However, if this is a bitfield in
9995 an integral mode that we can fit in a HOST_WIDE_INT,
9996 we must mask only the number of bits in the bitfield,
9997 since this is done implicitly by the constructor. If
9998 the bitfield does not meet either of those conditions,
9999 we can't do this optimization. */
10000 && (! DECL_BIT_FIELD (field)
10001 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
10002 && (GET_MODE_PRECISION (DECL_MODE (field))
10003 <= HOST_BITS_PER_WIDE_INT))))
10004 {
10005 if (DECL_BIT_FIELD (field)
10006 && modifier == EXPAND_STACK_PARM)
10007 target = 0;
10008 op0 = expand_expr (value, target, tmode, modifier);
10009 if (DECL_BIT_FIELD (field))
10010 {
10011 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
10012 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
10013
10014 if (TYPE_UNSIGNED (TREE_TYPE (field)))
10015 {
10016 op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
10017 imode);
10018 op0 = expand_and (imode, op0, op1, target);
10019 }
10020 else
10021 {
10022 int count = GET_MODE_PRECISION (imode) - bitsize;
10023
10024 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
10025 target, 0);
10026 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
10027 target, 0);
10028 }
10029 }
10030
10031 return op0;
10032 }
10033 }
10034 goto normal_inner_ref;
10035
10036 case BIT_FIELD_REF:
10037 case ARRAY_RANGE_REF:
10038 normal_inner_ref:
10039 {
10040 enum machine_mode mode1, mode2;
10041 HOST_WIDE_INT bitsize, bitpos;
10042 tree offset;
10043 int volatilep = 0, must_force_mem;
10044 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
10045 &mode1, &unsignedp, &volatilep, true);
10046 rtx orig_op0, memloc;
10047 bool mem_attrs_from_type = false;
10048
10049 /* If we got back the original object, something is wrong. Perhaps
10050 we are evaluating an expression too early. In any event, don't
10051 infinitely recurse. */
10052 gcc_assert (tem != exp);
10053
10054 /* If TEM's type is a union of variable size, pass TARGET to the inner
10055 computation, since it will need a temporary and TARGET is known
10056 to have to do. This occurs in unchecked conversion in Ada. */
10057 orig_op0 = op0
10058 = expand_expr_real (tem,
10059 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10060 && COMPLETE_TYPE_P (TREE_TYPE (tem))
10061 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10062 != INTEGER_CST)
10063 && modifier != EXPAND_STACK_PARM
10064 ? target : NULL_RTX),
10065 VOIDmode,
10066 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10067 NULL, true);
10068
10069 /* If the field has a mode, we want to access it in the
10070 field's mode, not the computed mode.
10071 If a MEM has VOIDmode (external with incomplete type),
10072 use BLKmode for it instead. */
10073 if (MEM_P (op0))
10074 {
10075 if (mode1 != VOIDmode)
10076 op0 = adjust_address (op0, mode1, 0);
10077 else if (GET_MODE (op0) == VOIDmode)
10078 op0 = adjust_address (op0, BLKmode, 0);
10079 }
10080
10081 mode2
10082 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10083
10084 /* If we have either an offset, a BLKmode result, or a reference
10085 outside the underlying object, we must force it to memory.
10086 Such a case can occur in Ada if we have unchecked conversion
10087 of an expression from a scalar type to an aggregate type or
10088 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10089 passed a partially uninitialized object or a view-conversion
10090 to a larger size. */
10091 must_force_mem = (offset
10092 || mode1 == BLKmode
10093 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
10094
10095 /* Handle CONCAT first. */
10096 if (GET_CODE (op0) == CONCAT && !must_force_mem)
10097 {
10098 if (bitpos == 0
10099 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
10100 return op0;
10101 if (bitpos == 0
10102 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10103 && bitsize)
10104 {
10105 op0 = XEXP (op0, 0);
10106 mode2 = GET_MODE (op0);
10107 }
10108 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10109 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10110 && bitpos
10111 && bitsize)
10112 {
10113 op0 = XEXP (op0, 1);
10114 bitpos = 0;
10115 mode2 = GET_MODE (op0);
10116 }
10117 else
10118 /* Otherwise force into memory. */
10119 must_force_mem = 1;
10120 }
10121
10122 /* If this is a constant, put it in a register if it is a legitimate
10123 constant and we don't need a memory reference. */
10124 if (CONSTANT_P (op0)
10125 && mode2 != BLKmode
10126 && targetm.legitimate_constant_p (mode2, op0)
10127 && !must_force_mem)
10128 op0 = force_reg (mode2, op0);
10129
10130 /* Otherwise, if this is a constant, try to force it to the constant
10131 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10132 is a legitimate constant. */
10133 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10134 op0 = validize_mem (memloc);
10135
10136 /* Otherwise, if this is a constant or the object is not in memory
10137 and need be, put it there. */
10138 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10139 {
10140 memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10141 emit_move_insn (memloc, op0);
10142 op0 = memloc;
10143 mem_attrs_from_type = true;
10144 }
10145
10146 if (offset)
10147 {
10148 enum machine_mode address_mode;
10149 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10150 EXPAND_SUM);
10151
10152 gcc_assert (MEM_P (op0));
10153
10154 address_mode = get_address_mode (op0);
10155 if (GET_MODE (offset_rtx) != address_mode)
10156 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10157
10158 /* See the comment in expand_assignment for the rationale. */
10159 if (mode1 != VOIDmode
10160 && bitpos != 0
10161 && bitsize > 0
10162 && (bitpos % bitsize) == 0
10163 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10164 && MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10165 {
10166 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10167 bitpos = 0;
10168 }
10169
10170 op0 = offset_address (op0, offset_rtx,
10171 highest_pow2_factor (offset));
10172 }
10173
10174 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10175 record its alignment as BIGGEST_ALIGNMENT. */
10176 if (MEM_P (op0) && bitpos == 0 && offset != 0
10177 && is_aligning_offset (offset, tem))
10178 set_mem_align (op0, BIGGEST_ALIGNMENT);
10179
10180 /* Don't forget about volatility even if this is a bitfield. */
10181 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10182 {
10183 if (op0 == orig_op0)
10184 op0 = copy_rtx (op0);
10185
10186 MEM_VOLATILE_P (op0) = 1;
10187 }
10188
10189 /* In cases where an aligned union has an unaligned object
10190 as a field, we might be extracting a BLKmode value from
10191 an integer-mode (e.g., SImode) object. Handle this case
10192 by doing the extract into an object as wide as the field
10193 (which we know to be the width of a basic mode), then
10194 storing into memory, and changing the mode to BLKmode. */
10195 if (mode1 == VOIDmode
10196 || REG_P (op0) || GET_CODE (op0) == SUBREG
10197 || (mode1 != BLKmode && ! direct_load[(int) mode1]
10198 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10199 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10200 && modifier != EXPAND_CONST_ADDRESS
10201 && modifier != EXPAND_INITIALIZER
10202 && modifier != EXPAND_MEMORY)
10203 /* If the bitfield is volatile and the bitsize
10204 is narrower than the access size of the bitfield,
10205 we need to extract bitfields from the access. */
10206 || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10207 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10208 && mode1 != BLKmode
10209 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
10210 /* If the field isn't aligned enough to fetch as a memref,
10211 fetch it as a bit field. */
10212 || (mode1 != BLKmode
10213 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10214 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10215 || (MEM_P (op0)
10216 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10217 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10218 && modifier != EXPAND_MEMORY
10219 && ((modifier == EXPAND_CONST_ADDRESS
10220 || modifier == EXPAND_INITIALIZER)
10221 ? STRICT_ALIGNMENT
10222 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10223 || (bitpos % BITS_PER_UNIT != 0)))
10224 /* If the type and the field are a constant size and the
10225 size of the type isn't the same size as the bitfield,
10226 we must use bitfield operations. */
10227 || (bitsize >= 0
10228 && TYPE_SIZE (TREE_TYPE (exp))
10229 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10230 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10231 bitsize)))
10232 {
10233 enum machine_mode ext_mode = mode;
10234
10235 if (ext_mode == BLKmode
10236 && ! (target != 0 && MEM_P (op0)
10237 && MEM_P (target)
10238 && bitpos % BITS_PER_UNIT == 0))
10239 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10240
10241 if (ext_mode == BLKmode)
10242 {
10243 if (target == 0)
10244 target = assign_temp (type, 1, 1);
10245
10246 /* ??? Unlike the similar test a few lines below, this one is
10247 very likely obsolete. */
10248 if (bitsize == 0)
10249 return target;
10250
10251 /* In this case, BITPOS must start at a byte boundary and
10252 TARGET, if specified, must be a MEM. */
10253 gcc_assert (MEM_P (op0)
10254 && (!target || MEM_P (target))
10255 && !(bitpos % BITS_PER_UNIT));
10256
10257 emit_block_move (target,
10258 adjust_address (op0, VOIDmode,
10259 bitpos / BITS_PER_UNIT),
10260 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10261 / BITS_PER_UNIT),
10262 (modifier == EXPAND_STACK_PARM
10263 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10264
10265 return target;
10266 }
10267
10268 /* If we have nothing to extract, the result will be 0 for targets
10269 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10270 return 0 for the sake of consistency, as reading a zero-sized
10271 bitfield is valid in Ada and the value is fully specified. */
10272 if (bitsize == 0)
10273 return const0_rtx;
10274
10275 op0 = validize_mem (op0);
10276
10277 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10278 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10279
10280 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10281 (modifier == EXPAND_STACK_PARM
10282 ? NULL_RTX : target),
10283 ext_mode, ext_mode);
10284
10285 /* If the result is a record type and BITSIZE is narrower than
10286 the mode of OP0, an integral mode, and this is a big endian
10287 machine, we must put the field into the high-order bits. */
10288 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10289 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10290 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10291 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10292 GET_MODE_BITSIZE (GET_MODE (op0))
10293 - bitsize, op0, 1);
10294
10295 /* If the result type is BLKmode, store the data into a temporary
10296 of the appropriate type, but with the mode corresponding to the
10297 mode for the data we have (op0's mode). */
10298 if (mode == BLKmode)
10299 {
10300 rtx new_rtx
10301 = assign_stack_temp_for_type (ext_mode,
10302 GET_MODE_BITSIZE (ext_mode),
10303 type);
10304 emit_move_insn (new_rtx, op0);
10305 op0 = copy_rtx (new_rtx);
10306 PUT_MODE (op0, BLKmode);
10307 }
10308
10309 return op0;
10310 }
10311
10312 /* If the result is BLKmode, use that to access the object
10313 now as well. */
10314 if (mode == BLKmode)
10315 mode1 = BLKmode;
10316
10317 /* Get a reference to just this component. */
10318 if (modifier == EXPAND_CONST_ADDRESS
10319 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10320 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10321 else
10322 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10323
10324 if (op0 == orig_op0)
10325 op0 = copy_rtx (op0);
10326
10327 /* If op0 is a temporary because of forcing to memory, pass only the
10328 type to set_mem_attributes so that the original expression is never
10329 marked as ADDRESSABLE through MEM_EXPR of the temporary. */
10330 if (mem_attrs_from_type)
10331 set_mem_attributes (op0, type, 0);
10332 else
10333 set_mem_attributes (op0, exp, 0);
10334
10335 if (REG_P (XEXP (op0, 0)))
10336 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10337
10338 MEM_VOLATILE_P (op0) |= volatilep;
10339 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10340 || modifier == EXPAND_CONST_ADDRESS
10341 || modifier == EXPAND_INITIALIZER)
10342 return op0;
10343
10344 if (target == 0)
10345 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10346
10347 convert_move (target, op0, unsignedp);
10348 return target;
10349 }
10350
10351 case OBJ_TYPE_REF:
10352 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10353
10354 case CALL_EXPR:
10355 /* All valid uses of __builtin_va_arg_pack () are removed during
10356 inlining. */
10357 if (CALL_EXPR_VA_ARG_PACK (exp))
10358 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10359 {
10360 tree fndecl = get_callee_fndecl (exp), attr;
10361
10362 if (fndecl
10363 && (attr = lookup_attribute ("error",
10364 DECL_ATTRIBUTES (fndecl))) != NULL)
10365 error ("%Kcall to %qs declared with attribute error: %s",
10366 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10367 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10368 if (fndecl
10369 && (attr = lookup_attribute ("warning",
10370 DECL_ATTRIBUTES (fndecl))) != NULL)
10371 warning_at (tree_nonartificial_location (exp),
10372 0, "%Kcall to %qs declared with attribute warning: %s",
10373 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10374 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10375
10376 /* Check for a built-in function. */
10377 if (fndecl && DECL_BUILT_IN (fndecl))
10378 {
10379 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10380 return expand_builtin (exp, target, subtarget, tmode, ignore);
10381 }
10382 }
10383 return expand_call (exp, target, ignore);
10384
10385 case VIEW_CONVERT_EXPR:
10386 op0 = NULL_RTX;
10387
10388 /* If we are converting to BLKmode, try to avoid an intermediate
10389 temporary by fetching an inner memory reference. */
10390 if (mode == BLKmode
10391 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10392 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10393 && handled_component_p (treeop0))
10394 {
10395 enum machine_mode mode1;
10396 HOST_WIDE_INT bitsize, bitpos;
10397 tree offset;
10398 int unsignedp;
10399 int volatilep = 0;
10400 tree tem
10401 = get_inner_reference (treeop0, &bitsize, &bitpos,
10402 &offset, &mode1, &unsignedp, &volatilep,
10403 true);
10404 rtx orig_op0;
10405
10406 /* ??? We should work harder and deal with non-zero offsets. */
10407 if (!offset
10408 && (bitpos % BITS_PER_UNIT) == 0
10409 && bitsize >= 0
10410 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10411 {
10412 /* See the normal_inner_ref case for the rationale. */
10413 orig_op0
10414 = expand_expr_real (tem,
10415 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10416 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10417 != INTEGER_CST)
10418 && modifier != EXPAND_STACK_PARM
10419 ? target : NULL_RTX),
10420 VOIDmode,
10421 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10422 NULL, true);
10423
10424 if (MEM_P (orig_op0))
10425 {
10426 op0 = orig_op0;
10427
10428 /* Get a reference to just this component. */
10429 if (modifier == EXPAND_CONST_ADDRESS
10430 || modifier == EXPAND_SUM
10431 || modifier == EXPAND_INITIALIZER)
10432 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10433 else
10434 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10435
10436 if (op0 == orig_op0)
10437 op0 = copy_rtx (op0);
10438
10439 set_mem_attributes (op0, treeop0, 0);
10440 if (REG_P (XEXP (op0, 0)))
10441 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10442
10443 MEM_VOLATILE_P (op0) |= volatilep;
10444 }
10445 }
10446 }
10447
10448 if (!op0)
10449 op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10450 NULL, inner_reference_p);
10451
10452 /* If the input and output modes are both the same, we are done. */
10453 if (mode == GET_MODE (op0))
10454 ;
10455 /* If neither mode is BLKmode, and both modes are the same size
10456 then we can use gen_lowpart. */
10457 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10458 && (GET_MODE_PRECISION (mode)
10459 == GET_MODE_PRECISION (GET_MODE (op0)))
10460 && !COMPLEX_MODE_P (GET_MODE (op0)))
10461 {
10462 if (GET_CODE (op0) == SUBREG)
10463 op0 = force_reg (GET_MODE (op0), op0);
10464 temp = gen_lowpart_common (mode, op0);
10465 if (temp)
10466 op0 = temp;
10467 else
10468 {
10469 if (!REG_P (op0) && !MEM_P (op0))
10470 op0 = force_reg (GET_MODE (op0), op0);
10471 op0 = gen_lowpart (mode, op0);
10472 }
10473 }
10474 /* If both types are integral, convert from one mode to the other. */
10475 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10476 op0 = convert_modes (mode, GET_MODE (op0), op0,
10477 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10478 /* If the output type is a bit-field type, do an extraction. */
10479 else if (reduce_bit_field)
10480 return extract_bit_field (op0, TYPE_PRECISION (type), 0,
10481 TYPE_UNSIGNED (type), NULL_RTX,
10482 mode, mode);
10483 /* As a last resort, spill op0 to memory, and reload it in a
10484 different mode. */
10485 else if (!MEM_P (op0))
10486 {
10487 /* If the operand is not a MEM, force it into memory. Since we
10488 are going to be changing the mode of the MEM, don't call
10489 force_const_mem for constants because we don't allow pool
10490 constants to change mode. */
10491 tree inner_type = TREE_TYPE (treeop0);
10492
10493 gcc_assert (!TREE_ADDRESSABLE (exp));
10494
10495 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10496 target
10497 = assign_stack_temp_for_type
10498 (TYPE_MODE (inner_type),
10499 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10500
10501 emit_move_insn (target, op0);
10502 op0 = target;
10503 }
10504
10505 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10506 output type is such that the operand is known to be aligned, indicate
10507 that it is. Otherwise, we need only be concerned about alignment for
10508 non-BLKmode results. */
10509 if (MEM_P (op0))
10510 {
10511 enum insn_code icode;
10512
10513 if (TYPE_ALIGN_OK (type))
10514 {
10515 /* ??? Copying the MEM without substantially changing it might
10516 run afoul of the code handling volatile memory references in
10517 store_expr, which assumes that TARGET is returned unmodified
10518 if it has been used. */
10519 op0 = copy_rtx (op0);
10520 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10521 }
10522 else if (modifier != EXPAND_WRITE
10523 && modifier != EXPAND_MEMORY
10524 && !inner_reference_p
10525 && mode != BLKmode
10526 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10527 {
10528 /* If the target does have special handling for unaligned
10529 loads of mode then use them. */
10530 if ((icode = optab_handler (movmisalign_optab, mode))
10531 != CODE_FOR_nothing)
10532 {
10533 rtx reg, insn;
10534
10535 op0 = adjust_address (op0, mode, 0);
10536 /* We've already validated the memory, and we're creating a
10537 new pseudo destination. The predicates really can't
10538 fail. */
10539 reg = gen_reg_rtx (mode);
10540
10541 /* Nor can the insn generator. */
10542 insn = GEN_FCN (icode) (reg, op0);
10543 emit_insn (insn);
10544 return reg;
10545 }
10546 else if (STRICT_ALIGNMENT)
10547 {
10548 tree inner_type = TREE_TYPE (treeop0);
10549 HOST_WIDE_INT temp_size
10550 = MAX (int_size_in_bytes (inner_type),
10551 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10552 rtx new_rtx
10553 = assign_stack_temp_for_type (mode, temp_size, type);
10554 rtx new_with_op0_mode
10555 = adjust_address (new_rtx, GET_MODE (op0), 0);
10556
10557 gcc_assert (!TREE_ADDRESSABLE (exp));
10558
10559 if (GET_MODE (op0) == BLKmode)
10560 emit_block_move (new_with_op0_mode, op0,
10561 GEN_INT (GET_MODE_SIZE (mode)),
10562 (modifier == EXPAND_STACK_PARM
10563 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10564 else
10565 emit_move_insn (new_with_op0_mode, op0);
10566
10567 op0 = new_rtx;
10568 }
10569 }
10570
10571 op0 = adjust_address (op0, mode, 0);
10572 }
10573
10574 return op0;
10575
10576 case MODIFY_EXPR:
10577 {
10578 tree lhs = treeop0;
10579 tree rhs = treeop1;
10580 gcc_assert (ignore);
10581
10582 /* Check for |= or &= of a bitfield of size one into another bitfield
10583 of size 1. In this case, (unless we need the result of the
10584 assignment) we can do this more efficiently with a
10585 test followed by an assignment, if necessary.
10586
10587 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10588 things change so we do, this code should be enhanced to
10589 support it. */
10590 if (TREE_CODE (lhs) == COMPONENT_REF
10591 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10592 || TREE_CODE (rhs) == BIT_AND_EXPR)
10593 && TREE_OPERAND (rhs, 0) == lhs
10594 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10595 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10596 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10597 {
10598 rtx label = gen_label_rtx ();
10599 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10600 do_jump (TREE_OPERAND (rhs, 1),
10601 value ? label : 0,
10602 value ? 0 : label, -1);
10603 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10604 false);
10605 do_pending_stack_adjust ();
10606 emit_label (label);
10607 return const0_rtx;
10608 }
10609
10610 expand_assignment (lhs, rhs, false);
10611 return const0_rtx;
10612 }
10613
10614 case ADDR_EXPR:
10615 return expand_expr_addr_expr (exp, target, tmode, modifier);
10616
10617 case REALPART_EXPR:
10618 op0 = expand_normal (treeop0);
10619 return read_complex_part (op0, false);
10620
10621 case IMAGPART_EXPR:
10622 op0 = expand_normal (treeop0);
10623 return read_complex_part (op0, true);
10624
10625 case RETURN_EXPR:
10626 case LABEL_EXPR:
10627 case GOTO_EXPR:
10628 case SWITCH_EXPR:
10629 case ASM_EXPR:
10630 /* Expanded in cfgexpand.c. */
10631 gcc_unreachable ();
10632
10633 case TRY_CATCH_EXPR:
10634 case CATCH_EXPR:
10635 case EH_FILTER_EXPR:
10636 case TRY_FINALLY_EXPR:
10637 /* Lowered by tree-eh.c. */
10638 gcc_unreachable ();
10639
10640 case WITH_CLEANUP_EXPR:
10641 case CLEANUP_POINT_EXPR:
10642 case TARGET_EXPR:
10643 case CASE_LABEL_EXPR:
10644 case VA_ARG_EXPR:
10645 case BIND_EXPR:
10646 case INIT_EXPR:
10647 case CONJ_EXPR:
10648 case COMPOUND_EXPR:
10649 case PREINCREMENT_EXPR:
10650 case PREDECREMENT_EXPR:
10651 case POSTINCREMENT_EXPR:
10652 case POSTDECREMENT_EXPR:
10653 case LOOP_EXPR:
10654 case EXIT_EXPR:
10655 case COMPOUND_LITERAL_EXPR:
10656 /* Lowered by gimplify.c. */
10657 gcc_unreachable ();
10658
10659 case FDESC_EXPR:
10660 /* Function descriptors are not valid except for as
10661 initialization constants, and should not be expanded. */
10662 gcc_unreachable ();
10663
10664 case WITH_SIZE_EXPR:
10665 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10666 have pulled out the size to use in whatever context it needed. */
10667 return expand_expr_real (treeop0, original_target, tmode,
10668 modifier, alt_rtl, inner_reference_p);
10669
10670 default:
10671 return expand_expr_real_2 (&ops, target, tmode, modifier);
10672 }
10673 }
10674 \f
10675 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10676 signedness of TYPE), possibly returning the result in TARGET. */
10677 static rtx
10678 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10679 {
10680 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10681 if (target && GET_MODE (target) != GET_MODE (exp))
10682 target = 0;
10683 /* For constant values, reduce using build_int_cst_type. */
10684 if (CONST_INT_P (exp))
10685 {
10686 HOST_WIDE_INT value = INTVAL (exp);
10687 tree t = build_int_cst_type (type, value);
10688 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10689 }
10690 else if (TYPE_UNSIGNED (type))
10691 {
10692 enum machine_mode mode = GET_MODE (exp);
10693 rtx mask = immed_wide_int_const
10694 (wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
10695 return expand_and (mode, exp, mask, target);
10696 }
10697 else
10698 {
10699 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10700 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10701 exp, count, target, 0);
10702 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10703 exp, count, target, 0);
10704 }
10705 }
10706 \f
10707 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10708 when applied to the address of EXP produces an address known to be
10709 aligned more than BIGGEST_ALIGNMENT. */
10710
10711 static int
10712 is_aligning_offset (const_tree offset, const_tree exp)
10713 {
10714 /* Strip off any conversions. */
10715 while (CONVERT_EXPR_P (offset))
10716 offset = TREE_OPERAND (offset, 0);
10717
10718 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10719 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10720 if (TREE_CODE (offset) != BIT_AND_EXPR
10721 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
10722 || compare_tree_int (TREE_OPERAND (offset, 1),
10723 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10724 || !exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
10725 return 0;
10726
10727 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10728 It must be NEGATE_EXPR. Then strip any more conversions. */
10729 offset = TREE_OPERAND (offset, 0);
10730 while (CONVERT_EXPR_P (offset))
10731 offset = TREE_OPERAND (offset, 0);
10732
10733 if (TREE_CODE (offset) != NEGATE_EXPR)
10734 return 0;
10735
10736 offset = TREE_OPERAND (offset, 0);
10737 while (CONVERT_EXPR_P (offset))
10738 offset = TREE_OPERAND (offset, 0);
10739
10740 /* This must now be the address of EXP. */
10741 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10742 }
10743 \f
10744 /* Return the tree node if an ARG corresponds to a string constant or zero
10745 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10746 in bytes within the string that ARG is accessing. The type of the
10747 offset will be `sizetype'. */
10748
10749 tree
10750 string_constant (tree arg, tree *ptr_offset)
10751 {
10752 tree array, offset, lower_bound;
10753 STRIP_NOPS (arg);
10754
10755 if (TREE_CODE (arg) == ADDR_EXPR)
10756 {
10757 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10758 {
10759 *ptr_offset = size_zero_node;
10760 return TREE_OPERAND (arg, 0);
10761 }
10762 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10763 {
10764 array = TREE_OPERAND (arg, 0);
10765 offset = size_zero_node;
10766 }
10767 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10768 {
10769 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10770 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10771 if (TREE_CODE (array) != STRING_CST
10772 && TREE_CODE (array) != VAR_DECL)
10773 return 0;
10774
10775 /* Check if the array has a nonzero lower bound. */
10776 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10777 if (!integer_zerop (lower_bound))
10778 {
10779 /* If the offset and base aren't both constants, return 0. */
10780 if (TREE_CODE (lower_bound) != INTEGER_CST)
10781 return 0;
10782 if (TREE_CODE (offset) != INTEGER_CST)
10783 return 0;
10784 /* Adjust offset by the lower bound. */
10785 offset = size_diffop (fold_convert (sizetype, offset),
10786 fold_convert (sizetype, lower_bound));
10787 }
10788 }
10789 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10790 {
10791 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10792 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10793 if (TREE_CODE (array) != ADDR_EXPR)
10794 return 0;
10795 array = TREE_OPERAND (array, 0);
10796 if (TREE_CODE (array) != STRING_CST
10797 && TREE_CODE (array) != VAR_DECL)
10798 return 0;
10799 }
10800 else
10801 return 0;
10802 }
10803 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10804 {
10805 tree arg0 = TREE_OPERAND (arg, 0);
10806 tree arg1 = TREE_OPERAND (arg, 1);
10807
10808 STRIP_NOPS (arg0);
10809 STRIP_NOPS (arg1);
10810
10811 if (TREE_CODE (arg0) == ADDR_EXPR
10812 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10813 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10814 {
10815 array = TREE_OPERAND (arg0, 0);
10816 offset = arg1;
10817 }
10818 else if (TREE_CODE (arg1) == ADDR_EXPR
10819 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10820 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10821 {
10822 array = TREE_OPERAND (arg1, 0);
10823 offset = arg0;
10824 }
10825 else
10826 return 0;
10827 }
10828 else
10829 return 0;
10830
10831 if (TREE_CODE (array) == STRING_CST)
10832 {
10833 *ptr_offset = fold_convert (sizetype, offset);
10834 return array;
10835 }
10836 else if (TREE_CODE (array) == VAR_DECL
10837 || TREE_CODE (array) == CONST_DECL)
10838 {
10839 int length;
10840 tree init = ctor_for_folding (array);
10841
10842 /* Variables initialized to string literals can be handled too. */
10843 if (init == error_mark_node
10844 || !init
10845 || TREE_CODE (init) != STRING_CST)
10846 return 0;
10847
10848 /* Avoid const char foo[4] = "abcde"; */
10849 if (DECL_SIZE_UNIT (array) == NULL_TREE
10850 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10851 || (length = TREE_STRING_LENGTH (init)) <= 0
10852 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10853 return 0;
10854
10855 /* If variable is bigger than the string literal, OFFSET must be constant
10856 and inside of the bounds of the string literal. */
10857 offset = fold_convert (sizetype, offset);
10858 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10859 && (! tree_fits_uhwi_p (offset)
10860 || compare_tree_int (offset, length) >= 0))
10861 return 0;
10862
10863 *ptr_offset = offset;
10864 return init;
10865 }
10866
10867 return 0;
10868 }
10869 \f
10870 /* Generate code to calculate OPS, and exploded expression
10871 using a store-flag instruction and return an rtx for the result.
10872 OPS reflects a comparison.
10873
10874 If TARGET is nonzero, store the result there if convenient.
10875
10876 Return zero if there is no suitable set-flag instruction
10877 available on this machine.
10878
10879 Once expand_expr has been called on the arguments of the comparison,
10880 we are committed to doing the store flag, since it is not safe to
10881 re-evaluate the expression. We emit the store-flag insn by calling
10882 emit_store_flag, but only expand the arguments if we have a reason
10883 to believe that emit_store_flag will be successful. If we think that
10884 it will, but it isn't, we have to simulate the store-flag with a
10885 set/jump/set sequence. */
10886
10887 static rtx
10888 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
10889 {
10890 enum rtx_code code;
10891 tree arg0, arg1, type;
10892 tree tem;
10893 enum machine_mode operand_mode;
10894 int unsignedp;
10895 rtx op0, op1;
10896 rtx subtarget = target;
10897 location_t loc = ops->location;
10898
10899 arg0 = ops->op0;
10900 arg1 = ops->op1;
10901
10902 /* Don't crash if the comparison was erroneous. */
10903 if (arg0 == error_mark_node || arg1 == error_mark_node)
10904 return const0_rtx;
10905
10906 type = TREE_TYPE (arg0);
10907 operand_mode = TYPE_MODE (type);
10908 unsignedp = TYPE_UNSIGNED (type);
10909
10910 /* We won't bother with BLKmode store-flag operations because it would mean
10911 passing a lot of information to emit_store_flag. */
10912 if (operand_mode == BLKmode)
10913 return 0;
10914
10915 /* We won't bother with store-flag operations involving function pointers
10916 when function pointers must be canonicalized before comparisons. */
10917 #ifdef HAVE_canonicalize_funcptr_for_compare
10918 if (HAVE_canonicalize_funcptr_for_compare
10919 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
10920 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
10921 == FUNCTION_TYPE))
10922 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
10923 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
10924 == FUNCTION_TYPE))))
10925 return 0;
10926 #endif
10927
10928 STRIP_NOPS (arg0);
10929 STRIP_NOPS (arg1);
10930
10931 /* For vector typed comparisons emit code to generate the desired
10932 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10933 expander for this. */
10934 if (TREE_CODE (ops->type) == VECTOR_TYPE)
10935 {
10936 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10937 tree if_true = constant_boolean_node (true, ops->type);
10938 tree if_false = constant_boolean_node (false, ops->type);
10939 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10940 }
10941
10942 /* Get the rtx comparison code to use. We know that EXP is a comparison
10943 operation of some type. Some comparisons against 1 and -1 can be
10944 converted to comparisons with zero. Do so here so that the tests
10945 below will be aware that we have a comparison with zero. These
10946 tests will not catch constants in the first operand, but constants
10947 are rarely passed as the first operand. */
10948
10949 switch (ops->code)
10950 {
10951 case EQ_EXPR:
10952 code = EQ;
10953 break;
10954 case NE_EXPR:
10955 code = NE;
10956 break;
10957 case LT_EXPR:
10958 if (integer_onep (arg1))
10959 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10960 else
10961 code = unsignedp ? LTU : LT;
10962 break;
10963 case LE_EXPR:
10964 if (! unsignedp && integer_all_onesp (arg1))
10965 arg1 = integer_zero_node, code = LT;
10966 else
10967 code = unsignedp ? LEU : LE;
10968 break;
10969 case GT_EXPR:
10970 if (! unsignedp && integer_all_onesp (arg1))
10971 arg1 = integer_zero_node, code = GE;
10972 else
10973 code = unsignedp ? GTU : GT;
10974 break;
10975 case GE_EXPR:
10976 if (integer_onep (arg1))
10977 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10978 else
10979 code = unsignedp ? GEU : GE;
10980 break;
10981
10982 case UNORDERED_EXPR:
10983 code = UNORDERED;
10984 break;
10985 case ORDERED_EXPR:
10986 code = ORDERED;
10987 break;
10988 case UNLT_EXPR:
10989 code = UNLT;
10990 break;
10991 case UNLE_EXPR:
10992 code = UNLE;
10993 break;
10994 case UNGT_EXPR:
10995 code = UNGT;
10996 break;
10997 case UNGE_EXPR:
10998 code = UNGE;
10999 break;
11000 case UNEQ_EXPR:
11001 code = UNEQ;
11002 break;
11003 case LTGT_EXPR:
11004 code = LTGT;
11005 break;
11006
11007 default:
11008 gcc_unreachable ();
11009 }
11010
11011 /* Put a constant second. */
11012 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
11013 || TREE_CODE (arg0) == FIXED_CST)
11014 {
11015 tem = arg0; arg0 = arg1; arg1 = tem;
11016 code = swap_condition (code);
11017 }
11018
11019 /* If this is an equality or inequality test of a single bit, we can
11020 do this by shifting the bit being tested to the low-order bit and
11021 masking the result with the constant 1. If the condition was EQ,
11022 we xor it with 1. This does not require an scc insn and is faster
11023 than an scc insn even if we have it.
11024
11025 The code to make this transformation was moved into fold_single_bit_test,
11026 so we just call into the folder and expand its result. */
11027
11028 if ((code == NE || code == EQ)
11029 && integer_zerop (arg1)
11030 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
11031 {
11032 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
11033 if (srcstmt
11034 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
11035 {
11036 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
11037 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
11038 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
11039 gimple_assign_rhs1 (srcstmt),
11040 gimple_assign_rhs2 (srcstmt));
11041 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
11042 if (temp)
11043 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
11044 }
11045 }
11046
11047 if (! get_subtarget (target)
11048 || GET_MODE (subtarget) != operand_mode)
11049 subtarget = 0;
11050
11051 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
11052
11053 if (target == 0)
11054 target = gen_reg_rtx (mode);
11055
11056 /* Try a cstore if possible. */
11057 return emit_store_flag_force (target, code, op0, op1,
11058 operand_mode, unsignedp,
11059 (TYPE_PRECISION (ops->type) == 1
11060 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11061 }
11062 \f
11063
11064 /* Stubs in case we haven't got a casesi insn. */
11065 #ifndef HAVE_casesi
11066 # define HAVE_casesi 0
11067 # define gen_casesi(a, b, c, d, e) (0)
11068 # define CODE_FOR_casesi CODE_FOR_nothing
11069 #endif
11070
11071 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11072 0 otherwise (i.e. if there is no casesi instruction).
11073
11074 DEFAULT_PROBABILITY is the probability of jumping to the default
11075 label. */
11076 int
11077 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11078 rtx table_label, rtx default_label, rtx fallback_label,
11079 int default_probability)
11080 {
11081 struct expand_operand ops[5];
11082 enum machine_mode index_mode = SImode;
11083 rtx op1, op2, index;
11084
11085 if (! HAVE_casesi)
11086 return 0;
11087
11088 /* Convert the index to SImode. */
11089 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11090 {
11091 enum machine_mode omode = TYPE_MODE (index_type);
11092 rtx rangertx = expand_normal (range);
11093
11094 /* We must handle the endpoints in the original mode. */
11095 index_expr = build2 (MINUS_EXPR, index_type,
11096 index_expr, minval);
11097 minval = integer_zero_node;
11098 index = expand_normal (index_expr);
11099 if (default_label)
11100 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11101 omode, 1, default_label,
11102 default_probability);
11103 /* Now we can safely truncate. */
11104 index = convert_to_mode (index_mode, index, 0);
11105 }
11106 else
11107 {
11108 if (TYPE_MODE (index_type) != index_mode)
11109 {
11110 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11111 index_expr = fold_convert (index_type, index_expr);
11112 }
11113
11114 index = expand_normal (index_expr);
11115 }
11116
11117 do_pending_stack_adjust ();
11118
11119 op1 = expand_normal (minval);
11120 op2 = expand_normal (range);
11121
11122 create_input_operand (&ops[0], index, index_mode);
11123 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11124 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11125 create_fixed_operand (&ops[3], table_label);
11126 create_fixed_operand (&ops[4], (default_label
11127 ? default_label
11128 : fallback_label));
11129 expand_jump_insn (CODE_FOR_casesi, 5, ops);
11130 return 1;
11131 }
11132
11133 /* Attempt to generate a tablejump instruction; same concept. */
11134 #ifndef HAVE_tablejump
11135 #define HAVE_tablejump 0
11136 #define gen_tablejump(x, y) (0)
11137 #endif
11138
11139 /* Subroutine of the next function.
11140
11141 INDEX is the value being switched on, with the lowest value
11142 in the table already subtracted.
11143 MODE is its expected mode (needed if INDEX is constant).
11144 RANGE is the length of the jump table.
11145 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11146
11147 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11148 index value is out of range.
11149 DEFAULT_PROBABILITY is the probability of jumping to
11150 the default label. */
11151
11152 static void
11153 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
11154 rtx default_label, int default_probability)
11155 {
11156 rtx temp, vector;
11157
11158 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11159 cfun->cfg->max_jumptable_ents = INTVAL (range);
11160
11161 /* Do an unsigned comparison (in the proper mode) between the index
11162 expression and the value which represents the length of the range.
11163 Since we just finished subtracting the lower bound of the range
11164 from the index expression, this comparison allows us to simultaneously
11165 check that the original index expression value is both greater than
11166 or equal to the minimum value of the range and less than or equal to
11167 the maximum value of the range. */
11168
11169 if (default_label)
11170 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11171 default_label, default_probability);
11172
11173
11174 /* If index is in range, it must fit in Pmode.
11175 Convert to Pmode so we can index with it. */
11176 if (mode != Pmode)
11177 index = convert_to_mode (Pmode, index, 1);
11178
11179 /* Don't let a MEM slip through, because then INDEX that comes
11180 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11181 and break_out_memory_refs will go to work on it and mess it up. */
11182 #ifdef PIC_CASE_VECTOR_ADDRESS
11183 if (flag_pic && !REG_P (index))
11184 index = copy_to_mode_reg (Pmode, index);
11185 #endif
11186
11187 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11188 GET_MODE_SIZE, because this indicates how large insns are. The other
11189 uses should all be Pmode, because they are addresses. This code
11190 could fail if addresses and insns are not the same size. */
11191 index = simplify_gen_binary (MULT, Pmode, index,
11192 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11193 Pmode));
11194 index = simplify_gen_binary (PLUS, Pmode, index,
11195 gen_rtx_LABEL_REF (Pmode, table_label));
11196
11197 #ifdef PIC_CASE_VECTOR_ADDRESS
11198 if (flag_pic)
11199 index = PIC_CASE_VECTOR_ADDRESS (index);
11200 else
11201 #endif
11202 index = memory_address (CASE_VECTOR_MODE, index);
11203 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11204 vector = gen_const_mem (CASE_VECTOR_MODE, index);
11205 convert_move (temp, vector, 0);
11206
11207 emit_jump_insn (gen_tablejump (temp, table_label));
11208
11209 /* If we are generating PIC code or if the table is PC-relative, the
11210 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11211 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11212 emit_barrier ();
11213 }
11214
11215 int
11216 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11217 rtx table_label, rtx default_label, int default_probability)
11218 {
11219 rtx index;
11220
11221 if (! HAVE_tablejump)
11222 return 0;
11223
11224 index_expr = fold_build2 (MINUS_EXPR, index_type,
11225 fold_convert (index_type, index_expr),
11226 fold_convert (index_type, minval));
11227 index = expand_normal (index_expr);
11228 do_pending_stack_adjust ();
11229
11230 do_tablejump (index, TYPE_MODE (index_type),
11231 convert_modes (TYPE_MODE (index_type),
11232 TYPE_MODE (TREE_TYPE (range)),
11233 expand_normal (range),
11234 TYPE_UNSIGNED (TREE_TYPE (range))),
11235 table_label, default_label, default_probability);
11236 return 1;
11237 }
11238
11239 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11240 static rtx
11241 const_vector_from_tree (tree exp)
11242 {
11243 rtvec v;
11244 unsigned i;
11245 int units;
11246 tree elt;
11247 enum machine_mode inner, mode;
11248
11249 mode = TYPE_MODE (TREE_TYPE (exp));
11250
11251 if (initializer_zerop (exp))
11252 return CONST0_RTX (mode);
11253
11254 units = GET_MODE_NUNITS (mode);
11255 inner = GET_MODE_INNER (mode);
11256
11257 v = rtvec_alloc (units);
11258
11259 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11260 {
11261 elt = VECTOR_CST_ELT (exp, i);
11262
11263 if (TREE_CODE (elt) == REAL_CST)
11264 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11265 inner);
11266 else if (TREE_CODE (elt) == FIXED_CST)
11267 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11268 inner);
11269 else
11270 RTVEC_ELT (v, i) = immed_wide_int_const (elt, inner);
11271 }
11272
11273 return gen_rtx_CONST_VECTOR (mode, v);
11274 }
11275
11276 /* Build a decl for a personality function given a language prefix. */
11277
11278 tree
11279 build_personality_function (const char *lang)
11280 {
11281 const char *unwind_and_version;
11282 tree decl, type;
11283 char *name;
11284
11285 switch (targetm_common.except_unwind_info (&global_options))
11286 {
11287 case UI_NONE:
11288 return NULL;
11289 case UI_SJLJ:
11290 unwind_and_version = "_sj0";
11291 break;
11292 case UI_DWARF2:
11293 case UI_TARGET:
11294 unwind_and_version = "_v0";
11295 break;
11296 case UI_SEH:
11297 unwind_and_version = "_seh0";
11298 break;
11299 default:
11300 gcc_unreachable ();
11301 }
11302
11303 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11304
11305 type = build_function_type_list (integer_type_node, integer_type_node,
11306 long_long_unsigned_type_node,
11307 ptr_type_node, ptr_type_node, NULL_TREE);
11308 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11309 get_identifier (name), type);
11310 DECL_ARTIFICIAL (decl) = 1;
11311 DECL_EXTERNAL (decl) = 1;
11312 TREE_PUBLIC (decl) = 1;
11313
11314 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11315 are the flags assigned by targetm.encode_section_info. */
11316 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11317
11318 return decl;
11319 }
11320
11321 /* Extracts the personality function of DECL and returns the corresponding
11322 libfunc. */
11323
11324 rtx
11325 get_personality_function (tree decl)
11326 {
11327 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11328 enum eh_personality_kind pk;
11329
11330 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11331 if (pk == eh_personality_none)
11332 return NULL;
11333
11334 if (!personality
11335 && pk == eh_personality_any)
11336 personality = lang_hooks.eh_personality ();
11337
11338 if (pk == eh_personality_lang)
11339 gcc_assert (personality != NULL_TREE);
11340
11341 return XEXP (DECL_RTL (personality), 0);
11342 }
11343
11344 #include "gt-expr.h"