genopinit.c (vec_shl_optab, [...]): Initialize new optabs.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
55
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
58
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
61
62 #ifdef PUSH_ROUNDING
63
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #endif
68 #endif
69
70 #endif
71
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
75 #else
76 #define STACK_PUSH_CODE PRE_INC
77 #endif
78 #endif
79
80
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87 int cse_not_expected;
88
89 /* This structure is used by move_by_pieces to describe the move to
90 be performed. */
91 struct move_by_pieces
92 {
93 rtx to;
94 rtx to_addr;
95 int autinc_to;
96 int explicit_inc_to;
97 rtx from;
98 rtx from_addr;
99 int autinc_from;
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
103 int reverse;
104 };
105
106 /* This structure is used by store_by_pieces to describe the clear to
107 be performed. */
108
109 struct store_by_pieces
110 {
111 rtx to;
112 rtx to_addr;
113 int autinc_to;
114 int explicit_inc_to;
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
118 void *constfundata;
119 int reverse;
120 };
121
122 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
123 unsigned int,
124 unsigned int);
125 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
129 static rtx emit_block_move_via_libcall (rtx, rtx, rtx, bool);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static bool clear_storage_via_clrmem (rtx, rtx, unsigned);
138 static rtx clear_storage_via_libcall (rtx, rtx, bool);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147 tree, tree, int);
148
149 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
150 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
151
152 static int is_aligning_offset (tree, tree);
153 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
154 enum expand_modifier);
155 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
156 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
157 #ifdef PUSH_ROUNDING
158 static void emit_single_push_insn (enum machine_mode, rtx, tree);
159 #endif
160 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
161 static rtx const_vector_from_tree (tree);
162 static void write_complex_part (rtx, rtx, bool);
163
164 /* Record for each mode whether we can move a register directly to or
165 from an object of that mode in memory. If we can't, we won't try
166 to use that mode directly when accessing a field of that mode. */
167
168 static char direct_load[NUM_MACHINE_MODES];
169 static char direct_store[NUM_MACHINE_MODES];
170
171 /* Record for each mode whether we can float-extend from memory. */
172
173 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
174
175 /* This macro is used to determine whether move_by_pieces should be called
176 to perform a structure copy. */
177 #ifndef MOVE_BY_PIECES_P
178 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
179 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
180 < (unsigned int) MOVE_RATIO)
181 #endif
182
183 /* This macro is used to determine whether clear_by_pieces should be
184 called to clear storage. */
185 #ifndef CLEAR_BY_PIECES_P
186 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
187 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
188 < (unsigned int) CLEAR_RATIO)
189 #endif
190
191 /* This macro is used to determine whether store_by_pieces should be
192 called to "memset" storage with byte values other than zero, or
193 to "memcpy" storage when the source is a constant string. */
194 #ifndef STORE_BY_PIECES_P
195 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
196 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
197 < (unsigned int) MOVE_RATIO)
198 #endif
199
200 /* This array records the insn_code of insns to perform block moves. */
201 enum insn_code movmem_optab[NUM_MACHINE_MODES];
202
203 /* This array records the insn_code of insns to perform block clears. */
204 enum insn_code clrmem_optab[NUM_MACHINE_MODES];
205
206 /* These arrays record the insn_code of two different kinds of insns
207 to perform block compares. */
208 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
209 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
210
211 /* Synchronization primitives. */
212 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
213 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
214 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
215 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
216 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
217 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
230 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
231 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
232 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
233 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
234
235 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
236
237 #ifndef SLOW_UNALIGNED_ACCESS
238 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
239 #endif
240 \f
241 /* This is run once per compilation to set up which modes can be used
242 directly in memory and to initialize the block move optab. */
243
244 void
245 init_expr_once (void)
246 {
247 rtx insn, pat;
248 enum machine_mode mode;
249 int num_clobbers;
250 rtx mem, mem1;
251 rtx reg;
252
253 /* Try indexing by frame ptr and try by stack ptr.
254 It is known that on the Convex the stack ptr isn't a valid index.
255 With luck, one or the other is valid on any machine. */
256 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
257 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
258
259 /* A scratch register we can modify in-place below to avoid
260 useless RTL allocations. */
261 reg = gen_rtx_REG (VOIDmode, -1);
262
263 insn = rtx_alloc (INSN);
264 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
265 PATTERN (insn) = pat;
266
267 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
268 mode = (enum machine_mode) ((int) mode + 1))
269 {
270 int regno;
271
272 direct_load[(int) mode] = direct_store[(int) mode] = 0;
273 PUT_MODE (mem, mode);
274 PUT_MODE (mem1, mode);
275 PUT_MODE (reg, mode);
276
277 /* See if there is some register that can be used in this mode and
278 directly loaded or stored from memory. */
279
280 if (mode != VOIDmode && mode != BLKmode)
281 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
282 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
283 regno++)
284 {
285 if (! HARD_REGNO_MODE_OK (regno, mode))
286 continue;
287
288 REGNO (reg) = regno;
289
290 SET_SRC (pat) = mem;
291 SET_DEST (pat) = reg;
292 if (recog (pat, insn, &num_clobbers) >= 0)
293 direct_load[(int) mode] = 1;
294
295 SET_SRC (pat) = mem1;
296 SET_DEST (pat) = reg;
297 if (recog (pat, insn, &num_clobbers) >= 0)
298 direct_load[(int) mode] = 1;
299
300 SET_SRC (pat) = reg;
301 SET_DEST (pat) = mem;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_store[(int) mode] = 1;
304
305 SET_SRC (pat) = reg;
306 SET_DEST (pat) = mem1;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_store[(int) mode] = 1;
309 }
310 }
311
312 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
313
314 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
315 mode = GET_MODE_WIDER_MODE (mode))
316 {
317 enum machine_mode srcmode;
318 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
319 srcmode = GET_MODE_WIDER_MODE (srcmode))
320 {
321 enum insn_code ic;
322
323 ic = can_extend_p (mode, srcmode, 0);
324 if (ic == CODE_FOR_nothing)
325 continue;
326
327 PUT_MODE (mem, srcmode);
328
329 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
330 float_extend_from_mem[mode][srcmode] = true;
331 }
332 }
333 }
334
335 /* This is run at the start of compiling a function. */
336
337 void
338 init_expr (void)
339 {
340 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
341 }
342 \f
343 /* Copy data from FROM to TO, where the machine modes are not the same.
344 Both modes may be integer, or both may be floating.
345 UNSIGNEDP should be nonzero if FROM is an unsigned type.
346 This causes zero-extension instead of sign-extension. */
347
348 void
349 convert_move (rtx to, rtx from, int unsignedp)
350 {
351 enum machine_mode to_mode = GET_MODE (to);
352 enum machine_mode from_mode = GET_MODE (from);
353 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
354 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
355 enum insn_code code;
356 rtx libcall;
357
358 /* rtx code for making an equivalent value. */
359 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
360 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
361
362
363 gcc_assert (to_real == from_real);
364
365 /* If the source and destination are already the same, then there's
366 nothing to do. */
367 if (to == from)
368 return;
369
370 /* If FROM is a SUBREG that indicates that we have already done at least
371 the required extension, strip it. We don't handle such SUBREGs as
372 TO here. */
373
374 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
375 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
376 >= GET_MODE_SIZE (to_mode))
377 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
378 from = gen_lowpart (to_mode, from), from_mode = to_mode;
379
380 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
381
382 if (to_mode == from_mode
383 || (from_mode == VOIDmode && CONSTANT_P (from)))
384 {
385 emit_move_insn (to, from);
386 return;
387 }
388
389 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
390 {
391 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
392
393 if (VECTOR_MODE_P (to_mode))
394 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
395 else
396 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
397
398 emit_move_insn (to, from);
399 return;
400 }
401
402 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
403 {
404 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
405 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
406 return;
407 }
408
409 if (to_real)
410 {
411 rtx value, insns;
412 convert_optab tab;
413
414 gcc_assert (GET_MODE_PRECISION (from_mode)
415 != GET_MODE_PRECISION (to_mode));
416
417 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
418 tab = sext_optab;
419 else
420 tab = trunc_optab;
421
422 /* Try converting directly if the insn is supported. */
423
424 code = tab->handlers[to_mode][from_mode].insn_code;
425 if (code != CODE_FOR_nothing)
426 {
427 emit_unop_insn (code, to, from,
428 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
429 return;
430 }
431
432 /* Otherwise use a libcall. */
433 libcall = tab->handlers[to_mode][from_mode].libfunc;
434
435 /* Is this conversion implemented yet? */
436 gcc_assert (libcall);
437
438 start_sequence ();
439 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
440 1, from, from_mode);
441 insns = get_insns ();
442 end_sequence ();
443 emit_libcall_block (insns, to, value,
444 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
445 from)
446 : gen_rtx_FLOAT_EXTEND (to_mode, from));
447 return;
448 }
449
450 /* Handle pointer conversion. */ /* SPEE 900220. */
451 /* Targets are expected to provide conversion insns between PxImode and
452 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
453 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
454 {
455 enum machine_mode full_mode
456 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
457
458 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
459 != CODE_FOR_nothing);
460
461 if (full_mode != from_mode)
462 from = convert_to_mode (full_mode, from, unsignedp);
463 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
464 to, from, UNKNOWN);
465 return;
466 }
467 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
468 {
469 rtx new_from;
470 enum machine_mode full_mode
471 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
472
473 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
474 != CODE_FOR_nothing);
475
476 if (to_mode == full_mode)
477 {
478 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
479 to, from, UNKNOWN);
480 return;
481 }
482
483 new_from = gen_reg_rtx (full_mode);
484 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
485 new_from, from, UNKNOWN);
486
487 /* else proceed to integer conversions below. */
488 from_mode = full_mode;
489 from = new_from;
490 }
491
492 /* Now both modes are integers. */
493
494 /* Handle expanding beyond a word. */
495 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
496 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
497 {
498 rtx insns;
499 rtx lowpart;
500 rtx fill_value;
501 rtx lowfrom;
502 int i;
503 enum machine_mode lowpart_mode;
504 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
505
506 /* Try converting directly if the insn is supported. */
507 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
508 != CODE_FOR_nothing)
509 {
510 /* If FROM is a SUBREG, put it into a register. Do this
511 so that we always generate the same set of insns for
512 better cse'ing; if an intermediate assignment occurred,
513 we won't be doing the operation directly on the SUBREG. */
514 if (optimize > 0 && GET_CODE (from) == SUBREG)
515 from = force_reg (from_mode, from);
516 emit_unop_insn (code, to, from, equiv_code);
517 return;
518 }
519 /* Next, try converting via full word. */
520 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
521 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
522 != CODE_FOR_nothing))
523 {
524 if (REG_P (to))
525 {
526 if (reg_overlap_mentioned_p (to, from))
527 from = force_reg (from_mode, from);
528 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
529 }
530 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
531 emit_unop_insn (code, to,
532 gen_lowpart (word_mode, to), equiv_code);
533 return;
534 }
535
536 /* No special multiword conversion insn; do it by hand. */
537 start_sequence ();
538
539 /* Since we will turn this into a no conflict block, we must ensure
540 that the source does not overlap the target. */
541
542 if (reg_overlap_mentioned_p (to, from))
543 from = force_reg (from_mode, from);
544
545 /* Get a copy of FROM widened to a word, if necessary. */
546 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
547 lowpart_mode = word_mode;
548 else
549 lowpart_mode = from_mode;
550
551 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
552
553 lowpart = gen_lowpart (lowpart_mode, to);
554 emit_move_insn (lowpart, lowfrom);
555
556 /* Compute the value to put in each remaining word. */
557 if (unsignedp)
558 fill_value = const0_rtx;
559 else
560 {
561 #ifdef HAVE_slt
562 if (HAVE_slt
563 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
564 && STORE_FLAG_VALUE == -1)
565 {
566 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
567 lowpart_mode, 0);
568 fill_value = gen_reg_rtx (word_mode);
569 emit_insn (gen_slt (fill_value));
570 }
571 else
572 #endif
573 {
574 fill_value
575 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
576 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
577 NULL_RTX, 0);
578 fill_value = convert_to_mode (word_mode, fill_value, 1);
579 }
580 }
581
582 /* Fill the remaining words. */
583 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
584 {
585 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
586 rtx subword = operand_subword (to, index, 1, to_mode);
587
588 gcc_assert (subword);
589
590 if (fill_value != subword)
591 emit_move_insn (subword, fill_value);
592 }
593
594 insns = get_insns ();
595 end_sequence ();
596
597 emit_no_conflict_block (insns, to, from, NULL_RTX,
598 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
599 return;
600 }
601
602 /* Truncating multi-word to a word or less. */
603 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
604 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
605 {
606 if (!((MEM_P (from)
607 && ! MEM_VOLATILE_P (from)
608 && direct_load[(int) to_mode]
609 && ! mode_dependent_address_p (XEXP (from, 0)))
610 || REG_P (from)
611 || GET_CODE (from) == SUBREG))
612 from = force_reg (from_mode, from);
613 convert_move (to, gen_lowpart (word_mode, from), 0);
614 return;
615 }
616
617 /* Now follow all the conversions between integers
618 no more than a word long. */
619
620 /* For truncation, usually we can just refer to FROM in a narrower mode. */
621 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
622 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
623 GET_MODE_BITSIZE (from_mode)))
624 {
625 if (!((MEM_P (from)
626 && ! MEM_VOLATILE_P (from)
627 && direct_load[(int) to_mode]
628 && ! mode_dependent_address_p (XEXP (from, 0)))
629 || REG_P (from)
630 || GET_CODE (from) == SUBREG))
631 from = force_reg (from_mode, from);
632 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
633 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
634 from = copy_to_reg (from);
635 emit_move_insn (to, gen_lowpart (to_mode, from));
636 return;
637 }
638
639 /* Handle extension. */
640 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
641 {
642 /* Convert directly if that works. */
643 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
644 != CODE_FOR_nothing)
645 {
646 if (flag_force_mem)
647 from = force_not_mem (from);
648
649 emit_unop_insn (code, to, from, equiv_code);
650 return;
651 }
652 else
653 {
654 enum machine_mode intermediate;
655 rtx tmp;
656 tree shift_amount;
657
658 /* Search for a mode to convert via. */
659 for (intermediate = from_mode; intermediate != VOIDmode;
660 intermediate = GET_MODE_WIDER_MODE (intermediate))
661 if (((can_extend_p (to_mode, intermediate, unsignedp)
662 != CODE_FOR_nothing)
663 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
664 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
665 GET_MODE_BITSIZE (intermediate))))
666 && (can_extend_p (intermediate, from_mode, unsignedp)
667 != CODE_FOR_nothing))
668 {
669 convert_move (to, convert_to_mode (intermediate, from,
670 unsignedp), unsignedp);
671 return;
672 }
673
674 /* No suitable intermediate mode.
675 Generate what we need with shifts. */
676 shift_amount = build_int_cst (NULL_TREE,
677 GET_MODE_BITSIZE (to_mode)
678 - GET_MODE_BITSIZE (from_mode));
679 from = gen_lowpart (to_mode, force_reg (from_mode, from));
680 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
681 to, unsignedp);
682 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
683 to, unsignedp);
684 if (tmp != to)
685 emit_move_insn (to, tmp);
686 return;
687 }
688 }
689
690 /* Support special truncate insns for certain modes. */
691 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
692 {
693 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
694 to, from, UNKNOWN);
695 return;
696 }
697
698 /* Handle truncation of volatile memrefs, and so on;
699 the things that couldn't be truncated directly,
700 and for which there was no special instruction.
701
702 ??? Code above formerly short-circuited this, for most integer
703 mode pairs, with a force_reg in from_mode followed by a recursive
704 call to this routine. Appears always to have been wrong. */
705 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
706 {
707 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
708 emit_move_insn (to, temp);
709 return;
710 }
711
712 /* Mode combination is not recognized. */
713 gcc_unreachable ();
714 }
715
716 /* Return an rtx for a value that would result
717 from converting X to mode MODE.
718 Both X and MODE may be floating, or both integer.
719 UNSIGNEDP is nonzero if X is an unsigned value.
720 This can be done by referring to a part of X in place
721 or by copying to a new temporary with conversion. */
722
723 rtx
724 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
725 {
726 return convert_modes (mode, VOIDmode, x, unsignedp);
727 }
728
729 /* Return an rtx for a value that would result
730 from converting X from mode OLDMODE to mode MODE.
731 Both modes may be floating, or both integer.
732 UNSIGNEDP is nonzero if X is an unsigned value.
733
734 This can be done by referring to a part of X in place
735 or by copying to a new temporary with conversion.
736
737 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
738
739 rtx
740 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
741 {
742 rtx temp;
743
744 /* If FROM is a SUBREG that indicates that we have already done at least
745 the required extension, strip it. */
746
747 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
748 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
749 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
750 x = gen_lowpart (mode, x);
751
752 if (GET_MODE (x) != VOIDmode)
753 oldmode = GET_MODE (x);
754
755 if (mode == oldmode)
756 return x;
757
758 /* There is one case that we must handle specially: If we are converting
759 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
760 we are to interpret the constant as unsigned, gen_lowpart will do
761 the wrong if the constant appears negative. What we want to do is
762 make the high-order word of the constant zero, not all ones. */
763
764 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
765 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
766 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
767 {
768 HOST_WIDE_INT val = INTVAL (x);
769
770 if (oldmode != VOIDmode
771 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
772 {
773 int width = GET_MODE_BITSIZE (oldmode);
774
775 /* We need to zero extend VAL. */
776 val &= ((HOST_WIDE_INT) 1 << width) - 1;
777 }
778
779 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
780 }
781
782 /* We can do this with a gen_lowpart if both desired and current modes
783 are integer, and this is either a constant integer, a register, or a
784 non-volatile MEM. Except for the constant case where MODE is no
785 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
786
787 if ((GET_CODE (x) == CONST_INT
788 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
789 || (GET_MODE_CLASS (mode) == MODE_INT
790 && GET_MODE_CLASS (oldmode) == MODE_INT
791 && (GET_CODE (x) == CONST_DOUBLE
792 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
793 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
794 && direct_load[(int) mode])
795 || (REG_P (x)
796 && (! HARD_REGISTER_P (x)
797 || HARD_REGNO_MODE_OK (REGNO (x), mode))
798 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
799 GET_MODE_BITSIZE (GET_MODE (x)))))))))
800 {
801 /* ?? If we don't know OLDMODE, we have to assume here that
802 X does not need sign- or zero-extension. This may not be
803 the case, but it's the best we can do. */
804 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
805 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
806 {
807 HOST_WIDE_INT val = INTVAL (x);
808 int width = GET_MODE_BITSIZE (oldmode);
809
810 /* We must sign or zero-extend in this case. Start by
811 zero-extending, then sign extend if we need to. */
812 val &= ((HOST_WIDE_INT) 1 << width) - 1;
813 if (! unsignedp
814 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
815 val |= (HOST_WIDE_INT) (-1) << width;
816
817 return gen_int_mode (val, mode);
818 }
819
820 return gen_lowpart (mode, x);
821 }
822
823 /* Converting from integer constant into mode is always equivalent to an
824 subreg operation. */
825 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
826 {
827 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
828 return simplify_gen_subreg (mode, x, oldmode, 0);
829 }
830
831 temp = gen_reg_rtx (mode);
832 convert_move (temp, x, unsignedp);
833 return temp;
834 }
835 \f
836 /* STORE_MAX_PIECES is the number of bytes at a time that we can
837 store efficiently. Due to internal GCC limitations, this is
838 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
839 for an immediate constant. */
840
841 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
842
843 /* Determine whether the LEN bytes can be moved by using several move
844 instructions. Return nonzero if a call to move_by_pieces should
845 succeed. */
846
847 int
848 can_move_by_pieces (unsigned HOST_WIDE_INT len,
849 unsigned int align ATTRIBUTE_UNUSED)
850 {
851 return MOVE_BY_PIECES_P (len, align);
852 }
853
854 /* Generate several move instructions to copy LEN bytes from block FROM to
855 block TO. (These are MEM rtx's with BLKmode).
856
857 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
858 used to push FROM to the stack.
859
860 ALIGN is maximum stack alignment we can assume.
861
862 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
863 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
864 stpcpy. */
865
866 rtx
867 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
868 unsigned int align, int endp)
869 {
870 struct move_by_pieces data;
871 rtx to_addr, from_addr = XEXP (from, 0);
872 unsigned int max_size = MOVE_MAX_PIECES + 1;
873 enum machine_mode mode = VOIDmode, tmode;
874 enum insn_code icode;
875
876 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
877
878 data.offset = 0;
879 data.from_addr = from_addr;
880 if (to)
881 {
882 to_addr = XEXP (to, 0);
883 data.to = to;
884 data.autinc_to
885 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
886 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
887 data.reverse
888 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
889 }
890 else
891 {
892 to_addr = NULL_RTX;
893 data.to = NULL_RTX;
894 data.autinc_to = 1;
895 #ifdef STACK_GROWS_DOWNWARD
896 data.reverse = 1;
897 #else
898 data.reverse = 0;
899 #endif
900 }
901 data.to_addr = to_addr;
902 data.from = from;
903 data.autinc_from
904 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
905 || GET_CODE (from_addr) == POST_INC
906 || GET_CODE (from_addr) == POST_DEC);
907
908 data.explicit_inc_from = 0;
909 data.explicit_inc_to = 0;
910 if (data.reverse) data.offset = len;
911 data.len = len;
912
913 /* If copying requires more than two move insns,
914 copy addresses to registers (to make displacements shorter)
915 and use post-increment if available. */
916 if (!(data.autinc_from && data.autinc_to)
917 && move_by_pieces_ninsns (len, align, max_size) > 2)
918 {
919 /* Find the mode of the largest move... */
920 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
921 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
922 if (GET_MODE_SIZE (tmode) < max_size)
923 mode = tmode;
924
925 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
926 {
927 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
928 data.autinc_from = 1;
929 data.explicit_inc_from = -1;
930 }
931 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
932 {
933 data.from_addr = copy_addr_to_reg (from_addr);
934 data.autinc_from = 1;
935 data.explicit_inc_from = 1;
936 }
937 if (!data.autinc_from && CONSTANT_P (from_addr))
938 data.from_addr = copy_addr_to_reg (from_addr);
939 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
940 {
941 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
942 data.autinc_to = 1;
943 data.explicit_inc_to = -1;
944 }
945 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
946 {
947 data.to_addr = copy_addr_to_reg (to_addr);
948 data.autinc_to = 1;
949 data.explicit_inc_to = 1;
950 }
951 if (!data.autinc_to && CONSTANT_P (to_addr))
952 data.to_addr = copy_addr_to_reg (to_addr);
953 }
954
955 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
956 if (align >= GET_MODE_ALIGNMENT (tmode))
957 align = GET_MODE_ALIGNMENT (tmode);
958 else
959 {
960 enum machine_mode xmode;
961
962 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
963 tmode != VOIDmode;
964 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
965 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
966 || SLOW_UNALIGNED_ACCESS (tmode, align))
967 break;
968
969 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
970 }
971
972 /* First move what we can in the largest integer mode, then go to
973 successively smaller modes. */
974
975 while (max_size > 1)
976 {
977 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
978 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
979 if (GET_MODE_SIZE (tmode) < max_size)
980 mode = tmode;
981
982 if (mode == VOIDmode)
983 break;
984
985 icode = mov_optab->handlers[(int) mode].insn_code;
986 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
987 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
988
989 max_size = GET_MODE_SIZE (mode);
990 }
991
992 /* The code above should have handled everything. */
993 gcc_assert (!data.len);
994
995 if (endp)
996 {
997 rtx to1;
998
999 gcc_assert (!data.reverse);
1000 if (data.autinc_to)
1001 {
1002 if (endp == 2)
1003 {
1004 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1005 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1006 else
1007 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1008 -1));
1009 }
1010 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1011 data.offset);
1012 }
1013 else
1014 {
1015 if (endp == 2)
1016 --data.offset;
1017 to1 = adjust_address (data.to, QImode, data.offset);
1018 }
1019 return to1;
1020 }
1021 else
1022 return data.to;
1023 }
1024
1025 /* Return number of insns required to move L bytes by pieces.
1026 ALIGN (in bits) is maximum alignment we can assume. */
1027
1028 static unsigned HOST_WIDE_INT
1029 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1030 unsigned int max_size)
1031 {
1032 unsigned HOST_WIDE_INT n_insns = 0;
1033 enum machine_mode tmode;
1034
1035 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1036 if (align >= GET_MODE_ALIGNMENT (tmode))
1037 align = GET_MODE_ALIGNMENT (tmode);
1038 else
1039 {
1040 enum machine_mode tmode, xmode;
1041
1042 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1043 tmode != VOIDmode;
1044 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1045 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1046 || SLOW_UNALIGNED_ACCESS (tmode, align))
1047 break;
1048
1049 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1050 }
1051
1052 while (max_size > 1)
1053 {
1054 enum machine_mode mode = VOIDmode;
1055 enum insn_code icode;
1056
1057 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1058 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1059 if (GET_MODE_SIZE (tmode) < max_size)
1060 mode = tmode;
1061
1062 if (mode == VOIDmode)
1063 break;
1064
1065 icode = mov_optab->handlers[(int) mode].insn_code;
1066 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1067 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1068
1069 max_size = GET_MODE_SIZE (mode);
1070 }
1071
1072 gcc_assert (!l);
1073 return n_insns;
1074 }
1075
1076 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1077 with move instructions for mode MODE. GENFUN is the gen_... function
1078 to make a move insn for that mode. DATA has all the other info. */
1079
1080 static void
1081 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1082 struct move_by_pieces *data)
1083 {
1084 unsigned int size = GET_MODE_SIZE (mode);
1085 rtx to1 = NULL_RTX, from1;
1086
1087 while (data->len >= size)
1088 {
1089 if (data->reverse)
1090 data->offset -= size;
1091
1092 if (data->to)
1093 {
1094 if (data->autinc_to)
1095 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1096 data->offset);
1097 else
1098 to1 = adjust_address (data->to, mode, data->offset);
1099 }
1100
1101 if (data->autinc_from)
1102 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1103 data->offset);
1104 else
1105 from1 = adjust_address (data->from, mode, data->offset);
1106
1107 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1108 emit_insn (gen_add2_insn (data->to_addr,
1109 GEN_INT (-(HOST_WIDE_INT)size)));
1110 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1111 emit_insn (gen_add2_insn (data->from_addr,
1112 GEN_INT (-(HOST_WIDE_INT)size)));
1113
1114 if (data->to)
1115 emit_insn ((*genfun) (to1, from1));
1116 else
1117 {
1118 #ifdef PUSH_ROUNDING
1119 emit_single_push_insn (mode, from1, NULL);
1120 #else
1121 gcc_unreachable ();
1122 #endif
1123 }
1124
1125 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1126 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1127 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1128 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1129
1130 if (! data->reverse)
1131 data->offset += size;
1132
1133 data->len -= size;
1134 }
1135 }
1136 \f
1137 /* Emit code to move a block Y to a block X. This may be done with
1138 string-move instructions, with multiple scalar move instructions,
1139 or with a library call.
1140
1141 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1142 SIZE is an rtx that says how long they are.
1143 ALIGN is the maximum alignment we can assume they have.
1144 METHOD describes what kind of copy this is, and what mechanisms may be used.
1145
1146 Return the address of the new block, if memcpy is called and returns it,
1147 0 otherwise. */
1148
1149 rtx
1150 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1151 {
1152 bool may_use_call;
1153 rtx retval = 0;
1154 unsigned int align;
1155
1156 switch (method)
1157 {
1158 case BLOCK_OP_NORMAL:
1159 case BLOCK_OP_TAILCALL:
1160 may_use_call = true;
1161 break;
1162
1163 case BLOCK_OP_CALL_PARM:
1164 may_use_call = block_move_libcall_safe_for_call_parm ();
1165
1166 /* Make inhibit_defer_pop nonzero around the library call
1167 to force it to pop the arguments right away. */
1168 NO_DEFER_POP;
1169 break;
1170
1171 case BLOCK_OP_NO_LIBCALL:
1172 may_use_call = false;
1173 break;
1174
1175 default:
1176 gcc_unreachable ();
1177 }
1178
1179 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1180
1181 gcc_assert (MEM_P (x));
1182 gcc_assert (MEM_P (y));
1183 gcc_assert (size);
1184
1185 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1186 block copy is more efficient for other large modes, e.g. DCmode. */
1187 x = adjust_address (x, BLKmode, 0);
1188 y = adjust_address (y, BLKmode, 0);
1189
1190 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1191 can be incorrect is coming from __builtin_memcpy. */
1192 if (GET_CODE (size) == CONST_INT)
1193 {
1194 if (INTVAL (size) == 0)
1195 return 0;
1196
1197 x = shallow_copy_rtx (x);
1198 y = shallow_copy_rtx (y);
1199 set_mem_size (x, size);
1200 set_mem_size (y, size);
1201 }
1202
1203 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1204 move_by_pieces (x, y, INTVAL (size), align, 0);
1205 else if (emit_block_move_via_movmem (x, y, size, align))
1206 ;
1207 else if (may_use_call)
1208 retval = emit_block_move_via_libcall (x, y, size,
1209 method == BLOCK_OP_TAILCALL);
1210 else
1211 emit_block_move_via_loop (x, y, size, align);
1212
1213 if (method == BLOCK_OP_CALL_PARM)
1214 OK_DEFER_POP;
1215
1216 return retval;
1217 }
1218
1219 /* A subroutine of emit_block_move. Returns true if calling the
1220 block move libcall will not clobber any parameters which may have
1221 already been placed on the stack. */
1222
1223 static bool
1224 block_move_libcall_safe_for_call_parm (void)
1225 {
1226 /* If arguments are pushed on the stack, then they're safe. */
1227 if (PUSH_ARGS)
1228 return true;
1229
1230 /* If registers go on the stack anyway, any argument is sure to clobber
1231 an outgoing argument. */
1232 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1233 {
1234 tree fn = emit_block_move_libcall_fn (false);
1235 (void) fn;
1236 if (REG_PARM_STACK_SPACE (fn) != 0)
1237 return false;
1238 }
1239 #endif
1240
1241 /* If any argument goes in memory, then it might clobber an outgoing
1242 argument. */
1243 {
1244 CUMULATIVE_ARGS args_so_far;
1245 tree fn, arg;
1246
1247 fn = emit_block_move_libcall_fn (false);
1248 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1249
1250 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1251 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1252 {
1253 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1254 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1255 if (!tmp || !REG_P (tmp))
1256 return false;
1257 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1258 return false;
1259 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1260 }
1261 }
1262 return true;
1263 }
1264
1265 /* A subroutine of emit_block_move. Expand a movmem pattern;
1266 return true if successful. */
1267
1268 static bool
1269 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1270 {
1271 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1272 int save_volatile_ok = volatile_ok;
1273 enum machine_mode mode;
1274
1275 /* Since this is a move insn, we don't care about volatility. */
1276 volatile_ok = 1;
1277
1278 /* Try the most limited insn first, because there's no point
1279 including more than one in the machine description unless
1280 the more limited one has some advantage. */
1281
1282 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1283 mode = GET_MODE_WIDER_MODE (mode))
1284 {
1285 enum insn_code code = movmem_optab[(int) mode];
1286 insn_operand_predicate_fn pred;
1287
1288 if (code != CODE_FOR_nothing
1289 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1290 here because if SIZE is less than the mode mask, as it is
1291 returned by the macro, it will definitely be less than the
1292 actual mode mask. */
1293 && ((GET_CODE (size) == CONST_INT
1294 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1295 <= (GET_MODE_MASK (mode) >> 1)))
1296 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1297 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1298 || (*pred) (x, BLKmode))
1299 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1300 || (*pred) (y, BLKmode))
1301 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1302 || (*pred) (opalign, VOIDmode)))
1303 {
1304 rtx op2;
1305 rtx last = get_last_insn ();
1306 rtx pat;
1307
1308 op2 = convert_to_mode (mode, size, 1);
1309 pred = insn_data[(int) code].operand[2].predicate;
1310 if (pred != 0 && ! (*pred) (op2, mode))
1311 op2 = copy_to_mode_reg (mode, op2);
1312
1313 /* ??? When called via emit_block_move_for_call, it'd be
1314 nice if there were some way to inform the backend, so
1315 that it doesn't fail the expansion because it thinks
1316 emitting the libcall would be more efficient. */
1317
1318 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1319 if (pat)
1320 {
1321 emit_insn (pat);
1322 volatile_ok = save_volatile_ok;
1323 return true;
1324 }
1325 else
1326 delete_insns_since (last);
1327 }
1328 }
1329
1330 volatile_ok = save_volatile_ok;
1331 return false;
1332 }
1333
1334 /* A subroutine of emit_block_move. Expand a call to memcpy.
1335 Return the return value from memcpy, 0 otherwise. */
1336
1337 static rtx
1338 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1339 {
1340 rtx dst_addr, src_addr;
1341 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1342 enum machine_mode size_mode;
1343 rtx retval;
1344
1345 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1346 pseudos. We can then place those new pseudos into a VAR_DECL and
1347 use them later. */
1348
1349 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1350 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1351
1352 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1353 src_addr = convert_memory_address (ptr_mode, src_addr);
1354
1355 dst_tree = make_tree (ptr_type_node, dst_addr);
1356 src_tree = make_tree (ptr_type_node, src_addr);
1357
1358 size_mode = TYPE_MODE (sizetype);
1359
1360 size = convert_to_mode (size_mode, size, 1);
1361 size = copy_to_mode_reg (size_mode, size);
1362
1363 /* It is incorrect to use the libcall calling conventions to call
1364 memcpy in this context. This could be a user call to memcpy and
1365 the user may wish to examine the return value from memcpy. For
1366 targets where libcalls and normal calls have different conventions
1367 for returning pointers, we could end up generating incorrect code. */
1368
1369 size_tree = make_tree (sizetype, size);
1370
1371 fn = emit_block_move_libcall_fn (true);
1372 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1373 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1374 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1375
1376 /* Now we have to build up the CALL_EXPR itself. */
1377 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1378 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1379 call_expr, arg_list, NULL_TREE);
1380 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1381
1382 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1383
1384 return retval;
1385 }
1386
1387 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1388 for the function we use for block copies. The first time FOR_CALL
1389 is true, we call assemble_external. */
1390
1391 static GTY(()) tree block_move_fn;
1392
1393 void
1394 init_block_move_fn (const char *asmspec)
1395 {
1396 if (!block_move_fn)
1397 {
1398 tree args, fn;
1399
1400 fn = get_identifier ("memcpy");
1401 args = build_function_type_list (ptr_type_node, ptr_type_node,
1402 const_ptr_type_node, sizetype,
1403 NULL_TREE);
1404
1405 fn = build_decl (FUNCTION_DECL, fn, args);
1406 DECL_EXTERNAL (fn) = 1;
1407 TREE_PUBLIC (fn) = 1;
1408 DECL_ARTIFICIAL (fn) = 1;
1409 TREE_NOTHROW (fn) = 1;
1410
1411 block_move_fn = fn;
1412 }
1413
1414 if (asmspec)
1415 set_user_assembler_name (block_move_fn, asmspec);
1416 }
1417
1418 static tree
1419 emit_block_move_libcall_fn (int for_call)
1420 {
1421 static bool emitted_extern;
1422
1423 if (!block_move_fn)
1424 init_block_move_fn (NULL);
1425
1426 if (for_call && !emitted_extern)
1427 {
1428 emitted_extern = true;
1429 make_decl_rtl (block_move_fn);
1430 assemble_external (block_move_fn);
1431 }
1432
1433 return block_move_fn;
1434 }
1435
1436 /* A subroutine of emit_block_move. Copy the data via an explicit
1437 loop. This is used only when libcalls are forbidden. */
1438 /* ??? It'd be nice to copy in hunks larger than QImode. */
1439
1440 static void
1441 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1442 unsigned int align ATTRIBUTE_UNUSED)
1443 {
1444 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1445 enum machine_mode iter_mode;
1446
1447 iter_mode = GET_MODE (size);
1448 if (iter_mode == VOIDmode)
1449 iter_mode = word_mode;
1450
1451 top_label = gen_label_rtx ();
1452 cmp_label = gen_label_rtx ();
1453 iter = gen_reg_rtx (iter_mode);
1454
1455 emit_move_insn (iter, const0_rtx);
1456
1457 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1458 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1459 do_pending_stack_adjust ();
1460
1461 emit_jump (cmp_label);
1462 emit_label (top_label);
1463
1464 tmp = convert_modes (Pmode, iter_mode, iter, true);
1465 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1466 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1467 x = change_address (x, QImode, x_addr);
1468 y = change_address (y, QImode, y_addr);
1469
1470 emit_move_insn (x, y);
1471
1472 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1473 true, OPTAB_LIB_WIDEN);
1474 if (tmp != iter)
1475 emit_move_insn (iter, tmp);
1476
1477 emit_label (cmp_label);
1478
1479 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1480 true, top_label);
1481 }
1482 \f
1483 /* Copy all or part of a value X into registers starting at REGNO.
1484 The number of registers to be filled is NREGS. */
1485
1486 void
1487 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1488 {
1489 int i;
1490 #ifdef HAVE_load_multiple
1491 rtx pat;
1492 rtx last;
1493 #endif
1494
1495 if (nregs == 0)
1496 return;
1497
1498 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1499 x = validize_mem (force_const_mem (mode, x));
1500
1501 /* See if the machine can do this with a load multiple insn. */
1502 #ifdef HAVE_load_multiple
1503 if (HAVE_load_multiple)
1504 {
1505 last = get_last_insn ();
1506 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1507 GEN_INT (nregs));
1508 if (pat)
1509 {
1510 emit_insn (pat);
1511 return;
1512 }
1513 else
1514 delete_insns_since (last);
1515 }
1516 #endif
1517
1518 for (i = 0; i < nregs; i++)
1519 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1520 operand_subword_force (x, i, mode));
1521 }
1522
1523 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1524 The number of registers to be filled is NREGS. */
1525
1526 void
1527 move_block_from_reg (int regno, rtx x, int nregs)
1528 {
1529 int i;
1530
1531 if (nregs == 0)
1532 return;
1533
1534 /* See if the machine can do this with a store multiple insn. */
1535 #ifdef HAVE_store_multiple
1536 if (HAVE_store_multiple)
1537 {
1538 rtx last = get_last_insn ();
1539 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1540 GEN_INT (nregs));
1541 if (pat)
1542 {
1543 emit_insn (pat);
1544 return;
1545 }
1546 else
1547 delete_insns_since (last);
1548 }
1549 #endif
1550
1551 for (i = 0; i < nregs; i++)
1552 {
1553 rtx tem = operand_subword (x, i, 1, BLKmode);
1554
1555 gcc_assert (tem);
1556
1557 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1558 }
1559 }
1560
1561 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1562 ORIG, where ORIG is a non-consecutive group of registers represented by
1563 a PARALLEL. The clone is identical to the original except in that the
1564 original set of registers is replaced by a new set of pseudo registers.
1565 The new set has the same modes as the original set. */
1566
1567 rtx
1568 gen_group_rtx (rtx orig)
1569 {
1570 int i, length;
1571 rtx *tmps;
1572
1573 gcc_assert (GET_CODE (orig) == PARALLEL);
1574
1575 length = XVECLEN (orig, 0);
1576 tmps = alloca (sizeof (rtx) * length);
1577
1578 /* Skip a NULL entry in first slot. */
1579 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1580
1581 if (i)
1582 tmps[0] = 0;
1583
1584 for (; i < length; i++)
1585 {
1586 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1587 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1588
1589 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1590 }
1591
1592 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1593 }
1594
1595 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1596 except that values are placed in TMPS[i], and must later be moved
1597 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1598
1599 static void
1600 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1601 {
1602 rtx src;
1603 int start, i;
1604 enum machine_mode m = GET_MODE (orig_src);
1605
1606 gcc_assert (GET_CODE (dst) == PARALLEL);
1607
1608 if (m != VOIDmode
1609 && !SCALAR_INT_MODE_P (m)
1610 && !MEM_P (orig_src)
1611 && GET_CODE (orig_src) != CONCAT)
1612 {
1613 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1614 if (imode == BLKmode)
1615 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1616 else
1617 src = gen_reg_rtx (imode);
1618 if (imode != BLKmode)
1619 src = gen_lowpart (GET_MODE (orig_src), src);
1620 emit_move_insn (src, orig_src);
1621 /* ...and back again. */
1622 if (imode != BLKmode)
1623 src = gen_lowpart (imode, src);
1624 emit_group_load_1 (tmps, dst, src, type, ssize);
1625 return;
1626 }
1627
1628 /* Check for a NULL entry, used to indicate that the parameter goes
1629 both on the stack and in registers. */
1630 if (XEXP (XVECEXP (dst, 0, 0), 0))
1631 start = 0;
1632 else
1633 start = 1;
1634
1635 /* Process the pieces. */
1636 for (i = start; i < XVECLEN (dst, 0); i++)
1637 {
1638 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1639 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1640 unsigned int bytelen = GET_MODE_SIZE (mode);
1641 int shift = 0;
1642
1643 /* Handle trailing fragments that run over the size of the struct. */
1644 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1645 {
1646 /* Arrange to shift the fragment to where it belongs.
1647 extract_bit_field loads to the lsb of the reg. */
1648 if (
1649 #ifdef BLOCK_REG_PADDING
1650 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1651 == (BYTES_BIG_ENDIAN ? upward : downward)
1652 #else
1653 BYTES_BIG_ENDIAN
1654 #endif
1655 )
1656 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1657 bytelen = ssize - bytepos;
1658 gcc_assert (bytelen > 0);
1659 }
1660
1661 /* If we won't be loading directly from memory, protect the real source
1662 from strange tricks we might play; but make sure that the source can
1663 be loaded directly into the destination. */
1664 src = orig_src;
1665 if (!MEM_P (orig_src)
1666 && (!CONSTANT_P (orig_src)
1667 || (GET_MODE (orig_src) != mode
1668 && GET_MODE (orig_src) != VOIDmode)))
1669 {
1670 if (GET_MODE (orig_src) == VOIDmode)
1671 src = gen_reg_rtx (mode);
1672 else
1673 src = gen_reg_rtx (GET_MODE (orig_src));
1674
1675 emit_move_insn (src, orig_src);
1676 }
1677
1678 /* Optimize the access just a bit. */
1679 if (MEM_P (src)
1680 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1681 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1682 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1683 && bytelen == GET_MODE_SIZE (mode))
1684 {
1685 tmps[i] = gen_reg_rtx (mode);
1686 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1687 }
1688 else if (COMPLEX_MODE_P (mode)
1689 && GET_MODE (src) == mode
1690 && bytelen == GET_MODE_SIZE (mode))
1691 /* Let emit_move_complex do the bulk of the work. */
1692 tmps[i] = src;
1693 else if (GET_CODE (src) == CONCAT)
1694 {
1695 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1696 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1697
1698 if ((bytepos == 0 && bytelen == slen0)
1699 || (bytepos != 0 && bytepos + bytelen <= slen))
1700 {
1701 /* The following assumes that the concatenated objects all
1702 have the same size. In this case, a simple calculation
1703 can be used to determine the object and the bit field
1704 to be extracted. */
1705 tmps[i] = XEXP (src, bytepos / slen0);
1706 if (! CONSTANT_P (tmps[i])
1707 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1708 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1709 (bytepos % slen0) * BITS_PER_UNIT,
1710 1, NULL_RTX, mode, mode);
1711 }
1712 else
1713 {
1714 rtx mem;
1715
1716 gcc_assert (!bytepos);
1717 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1718 emit_move_insn (mem, src);
1719 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1720 0, 1, NULL_RTX, mode, mode);
1721 }
1722 }
1723 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1724 SIMD register, which is currently broken. While we get GCC
1725 to emit proper RTL for these cases, let's dump to memory. */
1726 else if (VECTOR_MODE_P (GET_MODE (dst))
1727 && REG_P (src))
1728 {
1729 int slen = GET_MODE_SIZE (GET_MODE (src));
1730 rtx mem;
1731
1732 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1733 emit_move_insn (mem, src);
1734 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1735 }
1736 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1737 && XVECLEN (dst, 0) > 1)
1738 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1739 else if (CONSTANT_P (src)
1740 || (REG_P (src) && GET_MODE (src) == mode))
1741 tmps[i] = src;
1742 else
1743 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1744 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1745 mode, mode);
1746
1747 if (shift)
1748 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1749 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1750 }
1751 }
1752
1753 /* Emit code to move a block SRC of type TYPE to a block DST,
1754 where DST is non-consecutive registers represented by a PARALLEL.
1755 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1756 if not known. */
1757
1758 void
1759 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1760 {
1761 rtx *tmps;
1762 int i;
1763
1764 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1765 emit_group_load_1 (tmps, dst, src, type, ssize);
1766
1767 /* Copy the extracted pieces into the proper (probable) hard regs. */
1768 for (i = 0; i < XVECLEN (dst, 0); i++)
1769 {
1770 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1771 if (d == NULL)
1772 continue;
1773 emit_move_insn (d, tmps[i]);
1774 }
1775 }
1776
1777 /* Similar, but load SRC into new pseudos in a format that looks like
1778 PARALLEL. This can later be fed to emit_group_move to get things
1779 in the right place. */
1780
1781 rtx
1782 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1783 {
1784 rtvec vec;
1785 int i;
1786
1787 vec = rtvec_alloc (XVECLEN (parallel, 0));
1788 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1789
1790 /* Convert the vector to look just like the original PARALLEL, except
1791 with the computed values. */
1792 for (i = 0; i < XVECLEN (parallel, 0); i++)
1793 {
1794 rtx e = XVECEXP (parallel, 0, i);
1795 rtx d = XEXP (e, 0);
1796
1797 if (d)
1798 {
1799 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1800 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1801 }
1802 RTVEC_ELT (vec, i) = e;
1803 }
1804
1805 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1806 }
1807
1808 /* Emit code to move a block SRC to block DST, where SRC and DST are
1809 non-consecutive groups of registers, each represented by a PARALLEL. */
1810
1811 void
1812 emit_group_move (rtx dst, rtx src)
1813 {
1814 int i;
1815
1816 gcc_assert (GET_CODE (src) == PARALLEL
1817 && GET_CODE (dst) == PARALLEL
1818 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1819
1820 /* Skip first entry if NULL. */
1821 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1822 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1823 XEXP (XVECEXP (src, 0, i), 0));
1824 }
1825
1826 /* Move a group of registers represented by a PARALLEL into pseudos. */
1827
1828 rtx
1829 emit_group_move_into_temps (rtx src)
1830 {
1831 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1832 int i;
1833
1834 for (i = 0; i < XVECLEN (src, 0); i++)
1835 {
1836 rtx e = XVECEXP (src, 0, i);
1837 rtx d = XEXP (e, 0);
1838
1839 if (d)
1840 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1841 RTVEC_ELT (vec, i) = e;
1842 }
1843
1844 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1845 }
1846
1847 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1848 where SRC is non-consecutive registers represented by a PARALLEL.
1849 SSIZE represents the total size of block ORIG_DST, or -1 if not
1850 known. */
1851
1852 void
1853 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1854 {
1855 rtx *tmps, dst;
1856 int start, i;
1857 enum machine_mode m = GET_MODE (orig_dst);
1858
1859 gcc_assert (GET_CODE (src) == PARALLEL);
1860
1861 if (!SCALAR_INT_MODE_P (m)
1862 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1863 {
1864 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1865 if (imode == BLKmode)
1866 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1867 else
1868 dst = gen_reg_rtx (imode);
1869 emit_group_store (dst, src, type, ssize);
1870 if (imode != BLKmode)
1871 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1872 emit_move_insn (orig_dst, dst);
1873 return;
1874 }
1875
1876 /* Check for a NULL entry, used to indicate that the parameter goes
1877 both on the stack and in registers. */
1878 if (XEXP (XVECEXP (src, 0, 0), 0))
1879 start = 0;
1880 else
1881 start = 1;
1882
1883 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1884
1885 /* Copy the (probable) hard regs into pseudos. */
1886 for (i = start; i < XVECLEN (src, 0); i++)
1887 {
1888 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1889 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1890 emit_move_insn (tmps[i], reg);
1891 }
1892
1893 /* If we won't be storing directly into memory, protect the real destination
1894 from strange tricks we might play. */
1895 dst = orig_dst;
1896 if (GET_CODE (dst) == PARALLEL)
1897 {
1898 rtx temp;
1899
1900 /* We can get a PARALLEL dst if there is a conditional expression in
1901 a return statement. In that case, the dst and src are the same,
1902 so no action is necessary. */
1903 if (rtx_equal_p (dst, src))
1904 return;
1905
1906 /* It is unclear if we can ever reach here, but we may as well handle
1907 it. Allocate a temporary, and split this into a store/load to/from
1908 the temporary. */
1909
1910 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1911 emit_group_store (temp, src, type, ssize);
1912 emit_group_load (dst, temp, type, ssize);
1913 return;
1914 }
1915 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1916 {
1917 dst = gen_reg_rtx (GET_MODE (orig_dst));
1918 /* Make life a bit easier for combine. */
1919 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
1920 }
1921
1922 /* Process the pieces. */
1923 for (i = start; i < XVECLEN (src, 0); i++)
1924 {
1925 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1926 enum machine_mode mode = GET_MODE (tmps[i]);
1927 unsigned int bytelen = GET_MODE_SIZE (mode);
1928 rtx dest = dst;
1929
1930 /* Handle trailing fragments that run over the size of the struct. */
1931 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1932 {
1933 /* store_bit_field always takes its value from the lsb.
1934 Move the fragment to the lsb if it's not already there. */
1935 if (
1936 #ifdef BLOCK_REG_PADDING
1937 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1938 == (BYTES_BIG_ENDIAN ? upward : downward)
1939 #else
1940 BYTES_BIG_ENDIAN
1941 #endif
1942 )
1943 {
1944 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1945 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1946 build_int_cst (NULL_TREE, shift),
1947 tmps[i], 0);
1948 }
1949 bytelen = ssize - bytepos;
1950 }
1951
1952 if (GET_CODE (dst) == CONCAT)
1953 {
1954 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1955 dest = XEXP (dst, 0);
1956 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1957 {
1958 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1959 dest = XEXP (dst, 1);
1960 }
1961 else
1962 {
1963 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
1964 dest = assign_stack_temp (GET_MODE (dest),
1965 GET_MODE_SIZE (GET_MODE (dest)), 0);
1966 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1967 tmps[i]);
1968 dst = dest;
1969 break;
1970 }
1971 }
1972
1973 /* Optimize the access just a bit. */
1974 if (MEM_P (dest)
1975 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1976 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
1977 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1978 && bytelen == GET_MODE_SIZE (mode))
1979 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1980 else
1981 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1982 mode, tmps[i]);
1983 }
1984
1985 /* Copy from the pseudo into the (probable) hard reg. */
1986 if (orig_dst != dst)
1987 emit_move_insn (orig_dst, dst);
1988 }
1989
1990 /* Generate code to copy a BLKmode object of TYPE out of a
1991 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1992 is null, a stack temporary is created. TGTBLK is returned.
1993
1994 The purpose of this routine is to handle functions that return
1995 BLKmode structures in registers. Some machines (the PA for example)
1996 want to return all small structures in registers regardless of the
1997 structure's alignment. */
1998
1999 rtx
2000 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2001 {
2002 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2003 rtx src = NULL, dst = NULL;
2004 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2005 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2006
2007 if (tgtblk == 0)
2008 {
2009 tgtblk = assign_temp (build_qualified_type (type,
2010 (TYPE_QUALS (type)
2011 | TYPE_QUAL_CONST)),
2012 0, 1, 1);
2013 preserve_temp_slots (tgtblk);
2014 }
2015
2016 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2017 into a new pseudo which is a full word. */
2018
2019 if (GET_MODE (srcreg) != BLKmode
2020 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2021 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2022
2023 /* If the structure doesn't take up a whole number of words, see whether
2024 SRCREG is padded on the left or on the right. If it's on the left,
2025 set PADDING_CORRECTION to the number of bits to skip.
2026
2027 In most ABIs, the structure will be returned at the least end of
2028 the register, which translates to right padding on little-endian
2029 targets and left padding on big-endian targets. The opposite
2030 holds if the structure is returned at the most significant
2031 end of the register. */
2032 if (bytes % UNITS_PER_WORD != 0
2033 && (targetm.calls.return_in_msb (type)
2034 ? !BYTES_BIG_ENDIAN
2035 : BYTES_BIG_ENDIAN))
2036 padding_correction
2037 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2038
2039 /* Copy the structure BITSIZE bites at a time.
2040
2041 We could probably emit more efficient code for machines which do not use
2042 strict alignment, but it doesn't seem worth the effort at the current
2043 time. */
2044 for (bitpos = 0, xbitpos = padding_correction;
2045 bitpos < bytes * BITS_PER_UNIT;
2046 bitpos += bitsize, xbitpos += bitsize)
2047 {
2048 /* We need a new source operand each time xbitpos is on a
2049 word boundary and when xbitpos == padding_correction
2050 (the first time through). */
2051 if (xbitpos % BITS_PER_WORD == 0
2052 || xbitpos == padding_correction)
2053 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2054 GET_MODE (srcreg));
2055
2056 /* We need a new destination operand each time bitpos is on
2057 a word boundary. */
2058 if (bitpos % BITS_PER_WORD == 0)
2059 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2060
2061 /* Use xbitpos for the source extraction (right justified) and
2062 xbitpos for the destination store (left justified). */
2063 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2064 extract_bit_field (src, bitsize,
2065 xbitpos % BITS_PER_WORD, 1,
2066 NULL_RTX, word_mode, word_mode));
2067 }
2068
2069 return tgtblk;
2070 }
2071
2072 /* Add a USE expression for REG to the (possibly empty) list pointed
2073 to by CALL_FUSAGE. REG must denote a hard register. */
2074
2075 void
2076 use_reg (rtx *call_fusage, rtx reg)
2077 {
2078 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2079
2080 *call_fusage
2081 = gen_rtx_EXPR_LIST (VOIDmode,
2082 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2083 }
2084
2085 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2086 starting at REGNO. All of these registers must be hard registers. */
2087
2088 void
2089 use_regs (rtx *call_fusage, int regno, int nregs)
2090 {
2091 int i;
2092
2093 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2094
2095 for (i = 0; i < nregs; i++)
2096 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2097 }
2098
2099 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2100 PARALLEL REGS. This is for calls that pass values in multiple
2101 non-contiguous locations. The Irix 6 ABI has examples of this. */
2102
2103 void
2104 use_group_regs (rtx *call_fusage, rtx regs)
2105 {
2106 int i;
2107
2108 for (i = 0; i < XVECLEN (regs, 0); i++)
2109 {
2110 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2111
2112 /* A NULL entry means the parameter goes both on the stack and in
2113 registers. This can also be a MEM for targets that pass values
2114 partially on the stack and partially in registers. */
2115 if (reg != 0 && REG_P (reg))
2116 use_reg (call_fusage, reg);
2117 }
2118 }
2119 \f
2120
2121 /* Determine whether the LEN bytes generated by CONSTFUN can be
2122 stored to memory using several move instructions. CONSTFUNDATA is
2123 a pointer which will be passed as argument in every CONSTFUN call.
2124 ALIGN is maximum alignment we can assume. Return nonzero if a
2125 call to store_by_pieces should succeed. */
2126
2127 int
2128 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2129 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2130 void *constfundata, unsigned int align)
2131 {
2132 unsigned HOST_WIDE_INT l;
2133 unsigned int max_size;
2134 HOST_WIDE_INT offset = 0;
2135 enum machine_mode mode, tmode;
2136 enum insn_code icode;
2137 int reverse;
2138 rtx cst;
2139
2140 if (len == 0)
2141 return 1;
2142
2143 if (! STORE_BY_PIECES_P (len, align))
2144 return 0;
2145
2146 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2147 if (align >= GET_MODE_ALIGNMENT (tmode))
2148 align = GET_MODE_ALIGNMENT (tmode);
2149 else
2150 {
2151 enum machine_mode xmode;
2152
2153 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2154 tmode != VOIDmode;
2155 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2156 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2157 || SLOW_UNALIGNED_ACCESS (tmode, align))
2158 break;
2159
2160 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2161 }
2162
2163 /* We would first store what we can in the largest integer mode, then go to
2164 successively smaller modes. */
2165
2166 for (reverse = 0;
2167 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2168 reverse++)
2169 {
2170 l = len;
2171 mode = VOIDmode;
2172 max_size = STORE_MAX_PIECES + 1;
2173 while (max_size > 1)
2174 {
2175 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2176 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2177 if (GET_MODE_SIZE (tmode) < max_size)
2178 mode = tmode;
2179
2180 if (mode == VOIDmode)
2181 break;
2182
2183 icode = mov_optab->handlers[(int) mode].insn_code;
2184 if (icode != CODE_FOR_nothing
2185 && align >= GET_MODE_ALIGNMENT (mode))
2186 {
2187 unsigned int size = GET_MODE_SIZE (mode);
2188
2189 while (l >= size)
2190 {
2191 if (reverse)
2192 offset -= size;
2193
2194 cst = (*constfun) (constfundata, offset, mode);
2195 if (!LEGITIMATE_CONSTANT_P (cst))
2196 return 0;
2197
2198 if (!reverse)
2199 offset += size;
2200
2201 l -= size;
2202 }
2203 }
2204
2205 max_size = GET_MODE_SIZE (mode);
2206 }
2207
2208 /* The code above should have handled everything. */
2209 gcc_assert (!l);
2210 }
2211
2212 return 1;
2213 }
2214
2215 /* Generate several move instructions to store LEN bytes generated by
2216 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2217 pointer which will be passed as argument in every CONSTFUN call.
2218 ALIGN is maximum alignment we can assume.
2219 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2220 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2221 stpcpy. */
2222
2223 rtx
2224 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2225 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2226 void *constfundata, unsigned int align, int endp)
2227 {
2228 struct store_by_pieces data;
2229
2230 if (len == 0)
2231 {
2232 gcc_assert (endp != 2);
2233 return to;
2234 }
2235
2236 gcc_assert (STORE_BY_PIECES_P (len, align));
2237 data.constfun = constfun;
2238 data.constfundata = constfundata;
2239 data.len = len;
2240 data.to = to;
2241 store_by_pieces_1 (&data, align);
2242 if (endp)
2243 {
2244 rtx to1;
2245
2246 gcc_assert (!data.reverse);
2247 if (data.autinc_to)
2248 {
2249 if (endp == 2)
2250 {
2251 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2252 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2253 else
2254 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2255 -1));
2256 }
2257 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2258 data.offset);
2259 }
2260 else
2261 {
2262 if (endp == 2)
2263 --data.offset;
2264 to1 = adjust_address (data.to, QImode, data.offset);
2265 }
2266 return to1;
2267 }
2268 else
2269 return data.to;
2270 }
2271
2272 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2273 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2274
2275 static void
2276 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2277 {
2278 struct store_by_pieces data;
2279
2280 if (len == 0)
2281 return;
2282
2283 data.constfun = clear_by_pieces_1;
2284 data.constfundata = NULL;
2285 data.len = len;
2286 data.to = to;
2287 store_by_pieces_1 (&data, align);
2288 }
2289
2290 /* Callback routine for clear_by_pieces.
2291 Return const0_rtx unconditionally. */
2292
2293 static rtx
2294 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2295 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2296 enum machine_mode mode ATTRIBUTE_UNUSED)
2297 {
2298 return const0_rtx;
2299 }
2300
2301 /* Subroutine of clear_by_pieces and store_by_pieces.
2302 Generate several move instructions to store LEN bytes of block TO. (A MEM
2303 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2304
2305 static void
2306 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2307 unsigned int align ATTRIBUTE_UNUSED)
2308 {
2309 rtx to_addr = XEXP (data->to, 0);
2310 unsigned int max_size = STORE_MAX_PIECES + 1;
2311 enum machine_mode mode = VOIDmode, tmode;
2312 enum insn_code icode;
2313
2314 data->offset = 0;
2315 data->to_addr = to_addr;
2316 data->autinc_to
2317 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2318 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2319
2320 data->explicit_inc_to = 0;
2321 data->reverse
2322 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2323 if (data->reverse)
2324 data->offset = data->len;
2325
2326 /* If storing requires more than two move insns,
2327 copy addresses to registers (to make displacements shorter)
2328 and use post-increment if available. */
2329 if (!data->autinc_to
2330 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2331 {
2332 /* Determine the main mode we'll be using. */
2333 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2334 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2335 if (GET_MODE_SIZE (tmode) < max_size)
2336 mode = tmode;
2337
2338 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2339 {
2340 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2341 data->autinc_to = 1;
2342 data->explicit_inc_to = -1;
2343 }
2344
2345 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2346 && ! data->autinc_to)
2347 {
2348 data->to_addr = copy_addr_to_reg (to_addr);
2349 data->autinc_to = 1;
2350 data->explicit_inc_to = 1;
2351 }
2352
2353 if ( !data->autinc_to && CONSTANT_P (to_addr))
2354 data->to_addr = copy_addr_to_reg (to_addr);
2355 }
2356
2357 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2358 if (align >= GET_MODE_ALIGNMENT (tmode))
2359 align = GET_MODE_ALIGNMENT (tmode);
2360 else
2361 {
2362 enum machine_mode xmode;
2363
2364 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2365 tmode != VOIDmode;
2366 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2367 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2368 || SLOW_UNALIGNED_ACCESS (tmode, align))
2369 break;
2370
2371 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2372 }
2373
2374 /* First store what we can in the largest integer mode, then go to
2375 successively smaller modes. */
2376
2377 while (max_size > 1)
2378 {
2379 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2380 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2381 if (GET_MODE_SIZE (tmode) < max_size)
2382 mode = tmode;
2383
2384 if (mode == VOIDmode)
2385 break;
2386
2387 icode = mov_optab->handlers[(int) mode].insn_code;
2388 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2389 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2390
2391 max_size = GET_MODE_SIZE (mode);
2392 }
2393
2394 /* The code above should have handled everything. */
2395 gcc_assert (!data->len);
2396 }
2397
2398 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2399 with move instructions for mode MODE. GENFUN is the gen_... function
2400 to make a move insn for that mode. DATA has all the other info. */
2401
2402 static void
2403 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2404 struct store_by_pieces *data)
2405 {
2406 unsigned int size = GET_MODE_SIZE (mode);
2407 rtx to1, cst;
2408
2409 while (data->len >= size)
2410 {
2411 if (data->reverse)
2412 data->offset -= size;
2413
2414 if (data->autinc_to)
2415 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2416 data->offset);
2417 else
2418 to1 = adjust_address (data->to, mode, data->offset);
2419
2420 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2421 emit_insn (gen_add2_insn (data->to_addr,
2422 GEN_INT (-(HOST_WIDE_INT) size)));
2423
2424 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2425 emit_insn ((*genfun) (to1, cst));
2426
2427 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2428 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2429
2430 if (! data->reverse)
2431 data->offset += size;
2432
2433 data->len -= size;
2434 }
2435 }
2436 \f
2437 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2438 its length in bytes. */
2439
2440 rtx
2441 clear_storage (rtx object, rtx size, enum block_op_methods method)
2442 {
2443 enum machine_mode mode = GET_MODE (object);
2444 unsigned int align;
2445
2446 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2447
2448 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2449 just move a zero. Otherwise, do this a piece at a time. */
2450 if (mode != BLKmode
2451 && GET_CODE (size) == CONST_INT
2452 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2453 {
2454 rtx zero = CONST0_RTX (mode);
2455 if (zero != NULL)
2456 {
2457 emit_move_insn (object, zero);
2458 return NULL;
2459 }
2460
2461 if (COMPLEX_MODE_P (mode))
2462 {
2463 zero = CONST0_RTX (GET_MODE_INNER (mode));
2464 if (zero != NULL)
2465 {
2466 write_complex_part (object, zero, 0);
2467 write_complex_part (object, zero, 1);
2468 return NULL;
2469 }
2470 }
2471 }
2472
2473 if (size == const0_rtx)
2474 return NULL;
2475
2476 align = MEM_ALIGN (object);
2477
2478 if (GET_CODE (size) == CONST_INT
2479 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2480 clear_by_pieces (object, INTVAL (size), align);
2481 else if (clear_storage_via_clrmem (object, size, align))
2482 ;
2483 else
2484 return clear_storage_via_libcall (object, size,
2485 method == BLOCK_OP_TAILCALL);
2486
2487 return NULL;
2488 }
2489
2490 /* A subroutine of clear_storage. Expand a clrmem pattern;
2491 return true if successful. */
2492
2493 static bool
2494 clear_storage_via_clrmem (rtx object, rtx size, unsigned int align)
2495 {
2496 /* Try the most limited insn first, because there's no point
2497 including more than one in the machine description unless
2498 the more limited one has some advantage. */
2499
2500 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2501 enum machine_mode mode;
2502
2503 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2504 mode = GET_MODE_WIDER_MODE (mode))
2505 {
2506 enum insn_code code = clrmem_optab[(int) mode];
2507 insn_operand_predicate_fn pred;
2508
2509 if (code != CODE_FOR_nothing
2510 /* We don't need MODE to be narrower than
2511 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2512 the mode mask, as it is returned by the macro, it will
2513 definitely be less than the actual mode mask. */
2514 && ((GET_CODE (size) == CONST_INT
2515 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2516 <= (GET_MODE_MASK (mode) >> 1)))
2517 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2518 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2519 || (*pred) (object, BLKmode))
2520 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2521 || (*pred) (opalign, VOIDmode)))
2522 {
2523 rtx op1;
2524 rtx last = get_last_insn ();
2525 rtx pat;
2526
2527 op1 = convert_to_mode (mode, size, 1);
2528 pred = insn_data[(int) code].operand[1].predicate;
2529 if (pred != 0 && ! (*pred) (op1, mode))
2530 op1 = copy_to_mode_reg (mode, op1);
2531
2532 pat = GEN_FCN ((int) code) (object, op1, opalign);
2533 if (pat)
2534 {
2535 emit_insn (pat);
2536 return true;
2537 }
2538 else
2539 delete_insns_since (last);
2540 }
2541 }
2542
2543 return false;
2544 }
2545
2546 /* A subroutine of clear_storage. Expand a call to memset.
2547 Return the return value of memset, 0 otherwise. */
2548
2549 static rtx
2550 clear_storage_via_libcall (rtx object, rtx size, bool tailcall)
2551 {
2552 tree call_expr, arg_list, fn, object_tree, size_tree;
2553 enum machine_mode size_mode;
2554 rtx retval;
2555
2556 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2557 place those into new pseudos into a VAR_DECL and use them later. */
2558
2559 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2560
2561 size_mode = TYPE_MODE (sizetype);
2562 size = convert_to_mode (size_mode, size, 1);
2563 size = copy_to_mode_reg (size_mode, size);
2564
2565 /* It is incorrect to use the libcall calling conventions to call
2566 memset in this context. This could be a user call to memset and
2567 the user may wish to examine the return value from memset. For
2568 targets where libcalls and normal calls have different conventions
2569 for returning pointers, we could end up generating incorrect code. */
2570
2571 object_tree = make_tree (ptr_type_node, object);
2572 size_tree = make_tree (sizetype, size);
2573
2574 fn = clear_storage_libcall_fn (true);
2575 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2576 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2577 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2578
2579 /* Now we have to build up the CALL_EXPR itself. */
2580 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2581 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2582 call_expr, arg_list, NULL_TREE);
2583 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2584
2585 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2586
2587 return retval;
2588 }
2589
2590 /* A subroutine of clear_storage_via_libcall. Create the tree node
2591 for the function we use for block clears. The first time FOR_CALL
2592 is true, we call assemble_external. */
2593
2594 static GTY(()) tree block_clear_fn;
2595
2596 void
2597 init_block_clear_fn (const char *asmspec)
2598 {
2599 if (!block_clear_fn)
2600 {
2601 tree fn, args;
2602
2603 fn = get_identifier ("memset");
2604 args = build_function_type_list (ptr_type_node, ptr_type_node,
2605 integer_type_node, sizetype,
2606 NULL_TREE);
2607
2608 fn = build_decl (FUNCTION_DECL, fn, args);
2609 DECL_EXTERNAL (fn) = 1;
2610 TREE_PUBLIC (fn) = 1;
2611 DECL_ARTIFICIAL (fn) = 1;
2612 TREE_NOTHROW (fn) = 1;
2613
2614 block_clear_fn = fn;
2615 }
2616
2617 if (asmspec)
2618 set_user_assembler_name (block_clear_fn, asmspec);
2619 }
2620
2621 static tree
2622 clear_storage_libcall_fn (int for_call)
2623 {
2624 static bool emitted_extern;
2625
2626 if (!block_clear_fn)
2627 init_block_clear_fn (NULL);
2628
2629 if (for_call && !emitted_extern)
2630 {
2631 emitted_extern = true;
2632 make_decl_rtl (block_clear_fn);
2633 assemble_external (block_clear_fn);
2634 }
2635
2636 return block_clear_fn;
2637 }
2638 \f
2639 /* Write to one of the components of the complex value CPLX. Write VAL to
2640 the real part if IMAG_P is false, and the imaginary part if its true. */
2641
2642 static void
2643 write_complex_part (rtx cplx, rtx val, bool imag_p)
2644 {
2645 enum machine_mode cmode;
2646 enum machine_mode imode;
2647 unsigned ibitsize;
2648
2649 if (GET_CODE (cplx) == CONCAT)
2650 {
2651 emit_move_insn (XEXP (cplx, imag_p), val);
2652 return;
2653 }
2654
2655 cmode = GET_MODE (cplx);
2656 imode = GET_MODE_INNER (cmode);
2657 ibitsize = GET_MODE_BITSIZE (imode);
2658
2659 /* If the sub-object is at least word sized, then we know that subregging
2660 will work. This special case is important, since store_bit_field
2661 wants to operate on integer modes, and there's rarely an OImode to
2662 correspond to TCmode. */
2663 if (ibitsize >= BITS_PER_WORD
2664 /* For hard regs we have exact predicates. Assume we can split
2665 the original object if it spans an even number of hard regs.
2666 This special case is important for SCmode on 64-bit platforms
2667 where the natural size of floating-point regs is 32-bit. */
2668 || (REG_P (cplx)
2669 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2670 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0)
2671 /* For MEMs we always try to make a "subreg", that is to adjust
2672 the MEM, because store_bit_field may generate overly
2673 convoluted RTL for sub-word fields. */
2674 || MEM_P (cplx))
2675 {
2676 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2677 imag_p ? GET_MODE_SIZE (imode) : 0);
2678 if (part)
2679 {
2680 emit_move_insn (part, val);
2681 return;
2682 }
2683 else
2684 /* simplify_gen_subreg may fail for sub-word MEMs. */
2685 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2686 }
2687
2688 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2689 }
2690
2691 /* Extract one of the components of the complex value CPLX. Extract the
2692 real part if IMAG_P is false, and the imaginary part if it's true. */
2693
2694 static rtx
2695 read_complex_part (rtx cplx, bool imag_p)
2696 {
2697 enum machine_mode cmode, imode;
2698 unsigned ibitsize;
2699
2700 if (GET_CODE (cplx) == CONCAT)
2701 return XEXP (cplx, imag_p);
2702
2703 cmode = GET_MODE (cplx);
2704 imode = GET_MODE_INNER (cmode);
2705 ibitsize = GET_MODE_BITSIZE (imode);
2706
2707 /* Special case reads from complex constants that got spilled to memory. */
2708 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2709 {
2710 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2711 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2712 {
2713 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2714 if (CONSTANT_CLASS_P (part))
2715 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2716 }
2717 }
2718
2719 /* If the sub-object is at least word sized, then we know that subregging
2720 will work. This special case is important, since extract_bit_field
2721 wants to operate on integer modes, and there's rarely an OImode to
2722 correspond to TCmode. */
2723 if (ibitsize >= BITS_PER_WORD
2724 /* For hard regs we have exact predicates. Assume we can split
2725 the original object if it spans an even number of hard regs.
2726 This special case is important for SCmode on 64-bit platforms
2727 where the natural size of floating-point regs is 32-bit. */
2728 || (REG_P (cplx)
2729 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2730 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0)
2731 /* For MEMs we always try to make a "subreg", that is to adjust
2732 the MEM, because extract_bit_field may generate overly
2733 convoluted RTL for sub-word fields. */
2734 || MEM_P (cplx))
2735 {
2736 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2737 imag_p ? GET_MODE_SIZE (imode) : 0);
2738 if (ret)
2739 return ret;
2740 else
2741 /* simplify_gen_subreg may fail for sub-word MEMs. */
2742 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2743 }
2744
2745 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2746 true, NULL_RTX, imode, imode);
2747 }
2748 \f
2749 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2750 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2751 represented in NEW_MODE. If FORCE is true, this will never happen, as
2752 we'll force-create a SUBREG if needed. */
2753
2754 static rtx
2755 emit_move_change_mode (enum machine_mode new_mode,
2756 enum machine_mode old_mode, rtx x, bool force)
2757 {
2758 rtx ret;
2759
2760 if (reload_in_progress && MEM_P (x))
2761 {
2762 /* We can't use gen_lowpart here because it may call change_address
2763 which is not appropriate if we were called when a reload was in
2764 progress. We don't have to worry about changing the address since
2765 the size in bytes is supposed to be the same. Copy the MEM to
2766 change the mode and move any substitutions from the old MEM to
2767 the new one. */
2768
2769 ret = adjust_address_nv (x, new_mode, 0);
2770 copy_replacements (x, ret);
2771 }
2772 else
2773 {
2774 /* Note that we do want simplify_subreg's behavior of validating
2775 that the new mode is ok for a hard register. If we were to use
2776 simplify_gen_subreg, we would create the subreg, but would
2777 probably run into the target not being able to implement it. */
2778 /* Except, of course, when FORCE is true, when this is exactly what
2779 we want. Which is needed for CCmodes on some targets. */
2780 if (force)
2781 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2782 else
2783 ret = simplify_subreg (new_mode, x, old_mode, 0);
2784 }
2785
2786 return ret;
2787 }
2788
2789 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2790 an integer mode of the same size as MODE. Returns the instruction
2791 emitted, or NULL if such a move could not be generated. */
2792
2793 static rtx
2794 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y)
2795 {
2796 enum machine_mode imode;
2797 enum insn_code code;
2798
2799 /* There must exist a mode of the exact size we require. */
2800 imode = int_mode_for_mode (mode);
2801 if (imode == BLKmode)
2802 return NULL_RTX;
2803
2804 /* The target must support moves in this mode. */
2805 code = mov_optab->handlers[imode].insn_code;
2806 if (code == CODE_FOR_nothing)
2807 return NULL_RTX;
2808
2809 x = emit_move_change_mode (imode, mode, x, false);
2810 if (x == NULL_RTX)
2811 return NULL_RTX;
2812 y = emit_move_change_mode (imode, mode, y, false);
2813 if (y == NULL_RTX)
2814 return NULL_RTX;
2815 return emit_insn (GEN_FCN (code) (x, y));
2816 }
2817
2818 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2819 Return an equivalent MEM that does not use an auto-increment. */
2820
2821 static rtx
2822 emit_move_resolve_push (enum machine_mode mode, rtx x)
2823 {
2824 enum rtx_code code = GET_CODE (XEXP (x, 0));
2825 HOST_WIDE_INT adjust;
2826 rtx temp;
2827
2828 adjust = GET_MODE_SIZE (mode);
2829 #ifdef PUSH_ROUNDING
2830 adjust = PUSH_ROUNDING (adjust);
2831 #endif
2832 if (code == PRE_DEC || code == POST_DEC)
2833 adjust = -adjust;
2834
2835 /* Do not use anti_adjust_stack, since we don't want to update
2836 stack_pointer_delta. */
2837 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2838 GEN_INT (adjust), stack_pointer_rtx,
2839 0, OPTAB_LIB_WIDEN);
2840 if (temp != stack_pointer_rtx)
2841 emit_move_insn (stack_pointer_rtx, temp);
2842
2843 switch (code)
2844 {
2845 case PRE_INC:
2846 case PRE_DEC:
2847 temp = stack_pointer_rtx;
2848 break;
2849 case POST_INC:
2850 temp = plus_constant (stack_pointer_rtx, -GET_MODE_SIZE (mode));
2851 break;
2852 case POST_DEC:
2853 temp = plus_constant (stack_pointer_rtx, GET_MODE_SIZE (mode));
2854 break;
2855 default:
2856 gcc_unreachable ();
2857 }
2858
2859 return replace_equiv_address (x, temp);
2860 }
2861
2862 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2863 X is known to satisfy push_operand, and MODE is known to be complex.
2864 Returns the last instruction emitted. */
2865
2866 static rtx
2867 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2868 {
2869 enum machine_mode submode = GET_MODE_INNER (mode);
2870 bool imag_first;
2871
2872 #ifdef PUSH_ROUNDING
2873 unsigned int submodesize = GET_MODE_SIZE (submode);
2874
2875 /* In case we output to the stack, but the size is smaller than the
2876 machine can push exactly, we need to use move instructions. */
2877 if (PUSH_ROUNDING (submodesize) != submodesize)
2878 {
2879 x = emit_move_resolve_push (mode, x);
2880 return emit_move_insn (x, y);
2881 }
2882 #endif
2883
2884 /* Note that the real part always precedes the imag part in memory
2885 regardless of machine's endianness. */
2886 switch (GET_CODE (XEXP (x, 0)))
2887 {
2888 case PRE_DEC:
2889 case POST_DEC:
2890 imag_first = true;
2891 break;
2892 case PRE_INC:
2893 case POST_INC:
2894 imag_first = false;
2895 break;
2896 default:
2897 gcc_unreachable ();
2898 }
2899
2900 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2901 read_complex_part (y, imag_first));
2902 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2903 read_complex_part (y, !imag_first));
2904 }
2905
2906 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2907 MODE is known to be complex. Returns the last instruction emitted. */
2908
2909 static rtx
2910 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
2911 {
2912 bool try_int;
2913
2914 /* Need to take special care for pushes, to maintain proper ordering
2915 of the data, and possibly extra padding. */
2916 if (push_operand (x, mode))
2917 return emit_move_complex_push (mode, x, y);
2918
2919 /* See if we can coerce the target into moving both values at once. */
2920
2921 /* Move floating point as parts. */
2922 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
2923 && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
2924 try_int = false;
2925 /* Not possible if the values are inherently not adjacent. */
2926 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
2927 try_int = false;
2928 /* Is possible if both are registers (or subregs of registers). */
2929 else if (register_operand (x, mode) && register_operand (y, mode))
2930 try_int = true;
2931 /* If one of the operands is a memory, and alignment constraints
2932 are friendly enough, we may be able to do combined memory operations.
2933 We do not attempt this if Y is a constant because that combination is
2934 usually better with the by-parts thing below. */
2935 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
2936 && (!STRICT_ALIGNMENT
2937 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
2938 try_int = true;
2939 else
2940 try_int = false;
2941
2942 if (try_int)
2943 {
2944 rtx ret;
2945
2946 /* For memory to memory moves, optimal behavior can be had with the
2947 existing block move logic. */
2948 if (MEM_P (x) && MEM_P (y))
2949 {
2950 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
2951 BLOCK_OP_NO_LIBCALL);
2952 return get_last_insn ();
2953 }
2954
2955 ret = emit_move_via_integer (mode, x, y);
2956 if (ret)
2957 return ret;
2958 }
2959
2960 /* Show the output dies here. This is necessary for SUBREGs
2961 of pseudos since we cannot track their lifetimes correctly;
2962 hard regs shouldn't appear here except as return values. */
2963 if (!reload_completed && !reload_in_progress
2964 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
2965 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2966
2967 write_complex_part (x, read_complex_part (y, false), false);
2968 write_complex_part (x, read_complex_part (y, true), true);
2969 return get_last_insn ();
2970 }
2971
2972 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2973 MODE is known to be MODE_CC. Returns the last instruction emitted. */
2974
2975 static rtx
2976 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
2977 {
2978 rtx ret;
2979
2980 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
2981 if (mode != CCmode)
2982 {
2983 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
2984 if (code != CODE_FOR_nothing)
2985 {
2986 x = emit_move_change_mode (CCmode, mode, x, true);
2987 y = emit_move_change_mode (CCmode, mode, y, true);
2988 return emit_insn (GEN_FCN (code) (x, y));
2989 }
2990 }
2991
2992 /* Otherwise, find the MODE_INT mode of the same width. */
2993 ret = emit_move_via_integer (mode, x, y);
2994 gcc_assert (ret != NULL);
2995 return ret;
2996 }
2997
2998 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2999 MODE is any multi-word or full-word mode that lacks a move_insn
3000 pattern. Note that you will get better code if you define such
3001 patterns, even if they must turn into multiple assembler instructions. */
3002
3003 static rtx
3004 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3005 {
3006 rtx last_insn = 0;
3007 rtx seq, inner;
3008 bool need_clobber;
3009 int i;
3010
3011 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3012
3013 /* If X is a push on the stack, do the push now and replace
3014 X with a reference to the stack pointer. */
3015 if (push_operand (x, mode))
3016 x = emit_move_resolve_push (mode, x);
3017
3018 /* If we are in reload, see if either operand is a MEM whose address
3019 is scheduled for replacement. */
3020 if (reload_in_progress && MEM_P (x)
3021 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3022 x = replace_equiv_address_nv (x, inner);
3023 if (reload_in_progress && MEM_P (y)
3024 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3025 y = replace_equiv_address_nv (y, inner);
3026
3027 start_sequence ();
3028
3029 need_clobber = false;
3030 for (i = 0;
3031 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3032 i++)
3033 {
3034 rtx xpart = operand_subword (x, i, 1, mode);
3035 rtx ypart = operand_subword (y, i, 1, mode);
3036
3037 /* If we can't get a part of Y, put Y into memory if it is a
3038 constant. Otherwise, force it into a register. Then we must
3039 be able to get a part of Y. */
3040 if (ypart == 0 && CONSTANT_P (y))
3041 {
3042 y = force_const_mem (mode, y);
3043 ypart = operand_subword (y, i, 1, mode);
3044 }
3045 else if (ypart == 0)
3046 ypart = operand_subword_force (y, i, mode);
3047
3048 gcc_assert (xpart && ypart);
3049
3050 need_clobber |= (GET_CODE (xpart) == SUBREG);
3051
3052 last_insn = emit_move_insn (xpart, ypart);
3053 }
3054
3055 seq = get_insns ();
3056 end_sequence ();
3057
3058 /* Show the output dies here. This is necessary for SUBREGs
3059 of pseudos since we cannot track their lifetimes correctly;
3060 hard regs shouldn't appear here except as return values.
3061 We never want to emit such a clobber after reload. */
3062 if (x != y
3063 && ! (reload_in_progress || reload_completed)
3064 && need_clobber != 0)
3065 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3066
3067 emit_insn (seq);
3068
3069 return last_insn;
3070 }
3071
3072 /* Low level part of emit_move_insn.
3073 Called just like emit_move_insn, but assumes X and Y
3074 are basically valid. */
3075
3076 rtx
3077 emit_move_insn_1 (rtx x, rtx y)
3078 {
3079 enum machine_mode mode = GET_MODE (x);
3080 enum insn_code code;
3081
3082 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3083
3084 code = mov_optab->handlers[mode].insn_code;
3085 if (code != CODE_FOR_nothing)
3086 return emit_insn (GEN_FCN (code) (x, y));
3087
3088 /* Expand complex moves by moving real part and imag part. */
3089 if (COMPLEX_MODE_P (mode))
3090 return emit_move_complex (mode, x, y);
3091
3092 if (GET_MODE_CLASS (mode) == MODE_CC)
3093 return emit_move_ccmode (mode, x, y);
3094
3095 /* Try using a move pattern for the corresponding integer mode. This is
3096 only safe when simplify_subreg can convert MODE constants into integer
3097 constants. At present, it can only do this reliably if the value
3098 fits within a HOST_WIDE_INT. */
3099 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3100 {
3101 rtx ret = emit_move_via_integer (mode, x, y);
3102 if (ret)
3103 return ret;
3104 }
3105
3106 return emit_move_multi_word (mode, x, y);
3107 }
3108
3109 /* Generate code to copy Y into X.
3110 Both Y and X must have the same mode, except that
3111 Y can be a constant with VOIDmode.
3112 This mode cannot be BLKmode; use emit_block_move for that.
3113
3114 Return the last instruction emitted. */
3115
3116 rtx
3117 emit_move_insn (rtx x, rtx y)
3118 {
3119 enum machine_mode mode = GET_MODE (x);
3120 rtx y_cst = NULL_RTX;
3121 rtx last_insn, set;
3122
3123 gcc_assert (mode != BLKmode
3124 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3125
3126 if (CONSTANT_P (y))
3127 {
3128 if (optimize
3129 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3130 && (last_insn = compress_float_constant (x, y)))
3131 return last_insn;
3132
3133 y_cst = y;
3134
3135 if (!LEGITIMATE_CONSTANT_P (y))
3136 {
3137 y = force_const_mem (mode, y);
3138
3139 /* If the target's cannot_force_const_mem prevented the spill,
3140 assume that the target's move expanders will also take care
3141 of the non-legitimate constant. */
3142 if (!y)
3143 y = y_cst;
3144 }
3145 }
3146
3147 /* If X or Y are memory references, verify that their addresses are valid
3148 for the machine. */
3149 if (MEM_P (x)
3150 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3151 && ! push_operand (x, GET_MODE (x)))
3152 || (flag_force_addr
3153 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3154 x = validize_mem (x);
3155
3156 if (MEM_P (y)
3157 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3158 || (flag_force_addr
3159 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3160 y = validize_mem (y);
3161
3162 gcc_assert (mode != BLKmode);
3163
3164 last_insn = emit_move_insn_1 (x, y);
3165
3166 if (y_cst && REG_P (x)
3167 && (set = single_set (last_insn)) != NULL_RTX
3168 && SET_DEST (set) == x
3169 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3170 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3171
3172 return last_insn;
3173 }
3174
3175 /* If Y is representable exactly in a narrower mode, and the target can
3176 perform the extension directly from constant or memory, then emit the
3177 move as an extension. */
3178
3179 static rtx
3180 compress_float_constant (rtx x, rtx y)
3181 {
3182 enum machine_mode dstmode = GET_MODE (x);
3183 enum machine_mode orig_srcmode = GET_MODE (y);
3184 enum machine_mode srcmode;
3185 REAL_VALUE_TYPE r;
3186
3187 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3188
3189 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3190 srcmode != orig_srcmode;
3191 srcmode = GET_MODE_WIDER_MODE (srcmode))
3192 {
3193 enum insn_code ic;
3194 rtx trunc_y, last_insn;
3195
3196 /* Skip if the target can't extend this way. */
3197 ic = can_extend_p (dstmode, srcmode, 0);
3198 if (ic == CODE_FOR_nothing)
3199 continue;
3200
3201 /* Skip if the narrowed value isn't exact. */
3202 if (! exact_real_truncate (srcmode, &r))
3203 continue;
3204
3205 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3206
3207 if (LEGITIMATE_CONSTANT_P (trunc_y))
3208 {
3209 /* Skip if the target needs extra instructions to perform
3210 the extension. */
3211 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3212 continue;
3213 }
3214 else if (float_extend_from_mem[dstmode][srcmode])
3215 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3216 else
3217 continue;
3218
3219 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3220 last_insn = get_last_insn ();
3221
3222 if (REG_P (x))
3223 set_unique_reg_note (last_insn, REG_EQUAL, y);
3224
3225 return last_insn;
3226 }
3227
3228 return NULL_RTX;
3229 }
3230 \f
3231 /* Pushing data onto the stack. */
3232
3233 /* Push a block of length SIZE (perhaps variable)
3234 and return an rtx to address the beginning of the block.
3235 The value may be virtual_outgoing_args_rtx.
3236
3237 EXTRA is the number of bytes of padding to push in addition to SIZE.
3238 BELOW nonzero means this padding comes at low addresses;
3239 otherwise, the padding comes at high addresses. */
3240
3241 rtx
3242 push_block (rtx size, int extra, int below)
3243 {
3244 rtx temp;
3245
3246 size = convert_modes (Pmode, ptr_mode, size, 1);
3247 if (CONSTANT_P (size))
3248 anti_adjust_stack (plus_constant (size, extra));
3249 else if (REG_P (size) && extra == 0)
3250 anti_adjust_stack (size);
3251 else
3252 {
3253 temp = copy_to_mode_reg (Pmode, size);
3254 if (extra != 0)
3255 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3256 temp, 0, OPTAB_LIB_WIDEN);
3257 anti_adjust_stack (temp);
3258 }
3259
3260 #ifndef STACK_GROWS_DOWNWARD
3261 if (0)
3262 #else
3263 if (1)
3264 #endif
3265 {
3266 temp = virtual_outgoing_args_rtx;
3267 if (extra != 0 && below)
3268 temp = plus_constant (temp, extra);
3269 }
3270 else
3271 {
3272 if (GET_CODE (size) == CONST_INT)
3273 temp = plus_constant (virtual_outgoing_args_rtx,
3274 -INTVAL (size) - (below ? 0 : extra));
3275 else if (extra != 0 && !below)
3276 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3277 negate_rtx (Pmode, plus_constant (size, extra)));
3278 else
3279 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3280 negate_rtx (Pmode, size));
3281 }
3282
3283 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3284 }
3285
3286 #ifdef PUSH_ROUNDING
3287
3288 /* Emit single push insn. */
3289
3290 static void
3291 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3292 {
3293 rtx dest_addr;
3294 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3295 rtx dest;
3296 enum insn_code icode;
3297 insn_operand_predicate_fn pred;
3298
3299 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3300 /* If there is push pattern, use it. Otherwise try old way of throwing
3301 MEM representing push operation to move expander. */
3302 icode = push_optab->handlers[(int) mode].insn_code;
3303 if (icode != CODE_FOR_nothing)
3304 {
3305 if (((pred = insn_data[(int) icode].operand[0].predicate)
3306 && !((*pred) (x, mode))))
3307 x = force_reg (mode, x);
3308 emit_insn (GEN_FCN (icode) (x));
3309 return;
3310 }
3311 if (GET_MODE_SIZE (mode) == rounded_size)
3312 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3313 /* If we are to pad downward, adjust the stack pointer first and
3314 then store X into the stack location using an offset. This is
3315 because emit_move_insn does not know how to pad; it does not have
3316 access to type. */
3317 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3318 {
3319 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3320 HOST_WIDE_INT offset;
3321
3322 emit_move_insn (stack_pointer_rtx,
3323 expand_binop (Pmode,
3324 #ifdef STACK_GROWS_DOWNWARD
3325 sub_optab,
3326 #else
3327 add_optab,
3328 #endif
3329 stack_pointer_rtx,
3330 GEN_INT (rounded_size),
3331 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3332
3333 offset = (HOST_WIDE_INT) padding_size;
3334 #ifdef STACK_GROWS_DOWNWARD
3335 if (STACK_PUSH_CODE == POST_DEC)
3336 /* We have already decremented the stack pointer, so get the
3337 previous value. */
3338 offset += (HOST_WIDE_INT) rounded_size;
3339 #else
3340 if (STACK_PUSH_CODE == POST_INC)
3341 /* We have already incremented the stack pointer, so get the
3342 previous value. */
3343 offset -= (HOST_WIDE_INT) rounded_size;
3344 #endif
3345 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3346 }
3347 else
3348 {
3349 #ifdef STACK_GROWS_DOWNWARD
3350 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3351 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3352 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3353 #else
3354 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3355 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3356 GEN_INT (rounded_size));
3357 #endif
3358 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3359 }
3360
3361 dest = gen_rtx_MEM (mode, dest_addr);
3362
3363 if (type != 0)
3364 {
3365 set_mem_attributes (dest, type, 1);
3366
3367 if (flag_optimize_sibling_calls)
3368 /* Function incoming arguments may overlap with sibling call
3369 outgoing arguments and we cannot allow reordering of reads
3370 from function arguments with stores to outgoing arguments
3371 of sibling calls. */
3372 set_mem_alias_set (dest, 0);
3373 }
3374 emit_move_insn (dest, x);
3375 }
3376 #endif
3377
3378 /* Generate code to push X onto the stack, assuming it has mode MODE and
3379 type TYPE.
3380 MODE is redundant except when X is a CONST_INT (since they don't
3381 carry mode info).
3382 SIZE is an rtx for the size of data to be copied (in bytes),
3383 needed only if X is BLKmode.
3384
3385 ALIGN (in bits) is maximum alignment we can assume.
3386
3387 If PARTIAL and REG are both nonzero, then copy that many of the first
3388 bytes of X into registers starting with REG, and push the rest of X.
3389 The amount of space pushed is decreased by PARTIAL bytes.
3390 REG must be a hard register in this case.
3391 If REG is zero but PARTIAL is not, take any all others actions for an
3392 argument partially in registers, but do not actually load any
3393 registers.
3394
3395 EXTRA is the amount in bytes of extra space to leave next to this arg.
3396 This is ignored if an argument block has already been allocated.
3397
3398 On a machine that lacks real push insns, ARGS_ADDR is the address of
3399 the bottom of the argument block for this call. We use indexing off there
3400 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3401 argument block has not been preallocated.
3402
3403 ARGS_SO_FAR is the size of args previously pushed for this call.
3404
3405 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3406 for arguments passed in registers. If nonzero, it will be the number
3407 of bytes required. */
3408
3409 void
3410 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3411 unsigned int align, int partial, rtx reg, int extra,
3412 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3413 rtx alignment_pad)
3414 {
3415 rtx xinner;
3416 enum direction stack_direction
3417 #ifdef STACK_GROWS_DOWNWARD
3418 = downward;
3419 #else
3420 = upward;
3421 #endif
3422
3423 /* Decide where to pad the argument: `downward' for below,
3424 `upward' for above, or `none' for don't pad it.
3425 Default is below for small data on big-endian machines; else above. */
3426 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3427
3428 /* Invert direction if stack is post-decrement.
3429 FIXME: why? */
3430 if (STACK_PUSH_CODE == POST_DEC)
3431 if (where_pad != none)
3432 where_pad = (where_pad == downward ? upward : downward);
3433
3434 xinner = x;
3435
3436 if (mode == BLKmode)
3437 {
3438 /* Copy a block into the stack, entirely or partially. */
3439
3440 rtx temp;
3441 int used;
3442 int offset;
3443 int skip;
3444
3445 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3446 used = partial - offset;
3447
3448 gcc_assert (size);
3449
3450 /* USED is now the # of bytes we need not copy to the stack
3451 because registers will take care of them. */
3452
3453 if (partial != 0)
3454 xinner = adjust_address (xinner, BLKmode, used);
3455
3456 /* If the partial register-part of the arg counts in its stack size,
3457 skip the part of stack space corresponding to the registers.
3458 Otherwise, start copying to the beginning of the stack space,
3459 by setting SKIP to 0. */
3460 skip = (reg_parm_stack_space == 0) ? 0 : used;
3461
3462 #ifdef PUSH_ROUNDING
3463 /* Do it with several push insns if that doesn't take lots of insns
3464 and if there is no difficulty with push insns that skip bytes
3465 on the stack for alignment purposes. */
3466 if (args_addr == 0
3467 && PUSH_ARGS
3468 && GET_CODE (size) == CONST_INT
3469 && skip == 0
3470 && MEM_ALIGN (xinner) >= align
3471 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3472 /* Here we avoid the case of a structure whose weak alignment
3473 forces many pushes of a small amount of data,
3474 and such small pushes do rounding that causes trouble. */
3475 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3476 || align >= BIGGEST_ALIGNMENT
3477 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3478 == (align / BITS_PER_UNIT)))
3479 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3480 {
3481 /* Push padding now if padding above and stack grows down,
3482 or if padding below and stack grows up.
3483 But if space already allocated, this has already been done. */
3484 if (extra && args_addr == 0
3485 && where_pad != none && where_pad != stack_direction)
3486 anti_adjust_stack (GEN_INT (extra));
3487
3488 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3489 }
3490 else
3491 #endif /* PUSH_ROUNDING */
3492 {
3493 rtx target;
3494
3495 /* Otherwise make space on the stack and copy the data
3496 to the address of that space. */
3497
3498 /* Deduct words put into registers from the size we must copy. */
3499 if (partial != 0)
3500 {
3501 if (GET_CODE (size) == CONST_INT)
3502 size = GEN_INT (INTVAL (size) - used);
3503 else
3504 size = expand_binop (GET_MODE (size), sub_optab, size,
3505 GEN_INT (used), NULL_RTX, 0,
3506 OPTAB_LIB_WIDEN);
3507 }
3508
3509 /* Get the address of the stack space.
3510 In this case, we do not deal with EXTRA separately.
3511 A single stack adjust will do. */
3512 if (! args_addr)
3513 {
3514 temp = push_block (size, extra, where_pad == downward);
3515 extra = 0;
3516 }
3517 else if (GET_CODE (args_so_far) == CONST_INT)
3518 temp = memory_address (BLKmode,
3519 plus_constant (args_addr,
3520 skip + INTVAL (args_so_far)));
3521 else
3522 temp = memory_address (BLKmode,
3523 plus_constant (gen_rtx_PLUS (Pmode,
3524 args_addr,
3525 args_so_far),
3526 skip));
3527
3528 if (!ACCUMULATE_OUTGOING_ARGS)
3529 {
3530 /* If the source is referenced relative to the stack pointer,
3531 copy it to another register to stabilize it. We do not need
3532 to do this if we know that we won't be changing sp. */
3533
3534 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3535 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3536 temp = copy_to_reg (temp);
3537 }
3538
3539 target = gen_rtx_MEM (BLKmode, temp);
3540
3541 /* We do *not* set_mem_attributes here, because incoming arguments
3542 may overlap with sibling call outgoing arguments and we cannot
3543 allow reordering of reads from function arguments with stores
3544 to outgoing arguments of sibling calls. We do, however, want
3545 to record the alignment of the stack slot. */
3546 /* ALIGN may well be better aligned than TYPE, e.g. due to
3547 PARM_BOUNDARY. Assume the caller isn't lying. */
3548 set_mem_align (target, align);
3549
3550 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3551 }
3552 }
3553 else if (partial > 0)
3554 {
3555 /* Scalar partly in registers. */
3556
3557 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3558 int i;
3559 int not_stack;
3560 /* # bytes of start of argument
3561 that we must make space for but need not store. */
3562 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3563 int args_offset = INTVAL (args_so_far);
3564 int skip;
3565
3566 /* Push padding now if padding above and stack grows down,
3567 or if padding below and stack grows up.
3568 But if space already allocated, this has already been done. */
3569 if (extra && args_addr == 0
3570 && where_pad != none && where_pad != stack_direction)
3571 anti_adjust_stack (GEN_INT (extra));
3572
3573 /* If we make space by pushing it, we might as well push
3574 the real data. Otherwise, we can leave OFFSET nonzero
3575 and leave the space uninitialized. */
3576 if (args_addr == 0)
3577 offset = 0;
3578
3579 /* Now NOT_STACK gets the number of words that we don't need to
3580 allocate on the stack. Convert OFFSET to words too. */
3581 not_stack = (partial - offset) / UNITS_PER_WORD;
3582 offset /= UNITS_PER_WORD;
3583
3584 /* If the partial register-part of the arg counts in its stack size,
3585 skip the part of stack space corresponding to the registers.
3586 Otherwise, start copying to the beginning of the stack space,
3587 by setting SKIP to 0. */
3588 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3589
3590 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3591 x = validize_mem (force_const_mem (mode, x));
3592
3593 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3594 SUBREGs of such registers are not allowed. */
3595 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3596 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3597 x = copy_to_reg (x);
3598
3599 /* Loop over all the words allocated on the stack for this arg. */
3600 /* We can do it by words, because any scalar bigger than a word
3601 has a size a multiple of a word. */
3602 #ifndef PUSH_ARGS_REVERSED
3603 for (i = not_stack; i < size; i++)
3604 #else
3605 for (i = size - 1; i >= not_stack; i--)
3606 #endif
3607 if (i >= not_stack + offset)
3608 emit_push_insn (operand_subword_force (x, i, mode),
3609 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3610 0, args_addr,
3611 GEN_INT (args_offset + ((i - not_stack + skip)
3612 * UNITS_PER_WORD)),
3613 reg_parm_stack_space, alignment_pad);
3614 }
3615 else
3616 {
3617 rtx addr;
3618 rtx dest;
3619
3620 /* Push padding now if padding above and stack grows down,
3621 or if padding below and stack grows up.
3622 But if space already allocated, this has already been done. */
3623 if (extra && args_addr == 0
3624 && where_pad != none && where_pad != stack_direction)
3625 anti_adjust_stack (GEN_INT (extra));
3626
3627 #ifdef PUSH_ROUNDING
3628 if (args_addr == 0 && PUSH_ARGS)
3629 emit_single_push_insn (mode, x, type);
3630 else
3631 #endif
3632 {
3633 if (GET_CODE (args_so_far) == CONST_INT)
3634 addr
3635 = memory_address (mode,
3636 plus_constant (args_addr,
3637 INTVAL (args_so_far)));
3638 else
3639 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3640 args_so_far));
3641 dest = gen_rtx_MEM (mode, addr);
3642
3643 /* We do *not* set_mem_attributes here, because incoming arguments
3644 may overlap with sibling call outgoing arguments and we cannot
3645 allow reordering of reads from function arguments with stores
3646 to outgoing arguments of sibling calls. We do, however, want
3647 to record the alignment of the stack slot. */
3648 /* ALIGN may well be better aligned than TYPE, e.g. due to
3649 PARM_BOUNDARY. Assume the caller isn't lying. */
3650 set_mem_align (dest, align);
3651
3652 emit_move_insn (dest, x);
3653 }
3654 }
3655
3656 /* If part should go in registers, copy that part
3657 into the appropriate registers. Do this now, at the end,
3658 since mem-to-mem copies above may do function calls. */
3659 if (partial > 0 && reg != 0)
3660 {
3661 /* Handle calls that pass values in multiple non-contiguous locations.
3662 The Irix 6 ABI has examples of this. */
3663 if (GET_CODE (reg) == PARALLEL)
3664 emit_group_load (reg, x, type, -1);
3665 else
3666 {
3667 gcc_assert (partial % UNITS_PER_WORD == 0);
3668 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3669 }
3670 }
3671
3672 if (extra && args_addr == 0 && where_pad == stack_direction)
3673 anti_adjust_stack (GEN_INT (extra));
3674
3675 if (alignment_pad && args_addr == 0)
3676 anti_adjust_stack (alignment_pad);
3677 }
3678 \f
3679 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3680 operations. */
3681
3682 static rtx
3683 get_subtarget (rtx x)
3684 {
3685 return (optimize
3686 || x == 0
3687 /* Only registers can be subtargets. */
3688 || !REG_P (x)
3689 /* Don't use hard regs to avoid extending their life. */
3690 || REGNO (x) < FIRST_PSEUDO_REGISTER
3691 ? 0 : x);
3692 }
3693
3694 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3695 FIELD is a bitfield. Returns true if the optimization was successful,
3696 and there's nothing else to do. */
3697
3698 static bool
3699 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3700 unsigned HOST_WIDE_INT bitpos,
3701 enum machine_mode mode1, rtx str_rtx,
3702 tree to, tree src)
3703 {
3704 enum machine_mode str_mode = GET_MODE (str_rtx);
3705 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3706 tree op0, op1;
3707 rtx value, result;
3708 optab binop;
3709
3710 if (mode1 != VOIDmode
3711 || bitsize >= BITS_PER_WORD
3712 || str_bitsize > BITS_PER_WORD
3713 || TREE_SIDE_EFFECTS (to)
3714 || TREE_THIS_VOLATILE (to))
3715 return false;
3716
3717 STRIP_NOPS (src);
3718 if (!BINARY_CLASS_P (src)
3719 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3720 return false;
3721
3722 op0 = TREE_OPERAND (src, 0);
3723 op1 = TREE_OPERAND (src, 1);
3724 STRIP_NOPS (op0);
3725
3726 if (!operand_equal_p (to, op0, 0))
3727 return false;
3728
3729 if (MEM_P (str_rtx))
3730 {
3731 unsigned HOST_WIDE_INT offset1;
3732
3733 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3734 str_mode = word_mode;
3735 str_mode = get_best_mode (bitsize, bitpos,
3736 MEM_ALIGN (str_rtx), str_mode, 0);
3737 if (str_mode == VOIDmode)
3738 return false;
3739 str_bitsize = GET_MODE_BITSIZE (str_mode);
3740
3741 offset1 = bitpos;
3742 bitpos %= str_bitsize;
3743 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3744 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3745 }
3746 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3747 return false;
3748
3749 /* If the bit field covers the whole REG/MEM, store_field
3750 will likely generate better code. */
3751 if (bitsize >= str_bitsize)
3752 return false;
3753
3754 /* We can't handle fields split across multiple entities. */
3755 if (bitpos + bitsize > str_bitsize)
3756 return false;
3757
3758 if (BYTES_BIG_ENDIAN)
3759 bitpos = str_bitsize - bitpos - bitsize;
3760
3761 switch (TREE_CODE (src))
3762 {
3763 case PLUS_EXPR:
3764 case MINUS_EXPR:
3765 /* For now, just optimize the case of the topmost bitfield
3766 where we don't need to do any masking and also
3767 1 bit bitfields where xor can be used.
3768 We might win by one instruction for the other bitfields
3769 too if insv/extv instructions aren't used, so that
3770 can be added later. */
3771 if (bitpos + bitsize != str_bitsize
3772 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3773 break;
3774
3775 value = expand_expr (op1, NULL_RTX, str_mode, 0);
3776 value = convert_modes (str_mode,
3777 TYPE_MODE (TREE_TYPE (op1)), value,
3778 TYPE_UNSIGNED (TREE_TYPE (op1)));
3779
3780 /* We may be accessing data outside the field, which means
3781 we can alias adjacent data. */
3782 if (MEM_P (str_rtx))
3783 {
3784 str_rtx = shallow_copy_rtx (str_rtx);
3785 set_mem_alias_set (str_rtx, 0);
3786 set_mem_expr (str_rtx, 0);
3787 }
3788
3789 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3790 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3791 {
3792 value = expand_and (str_mode, value, const1_rtx, NULL);
3793 binop = xor_optab;
3794 }
3795 value = expand_shift (LSHIFT_EXPR, str_mode, value,
3796 build_int_cst (NULL_TREE, bitpos),
3797 NULL_RTX, 1);
3798 result = expand_binop (str_mode, binop, str_rtx,
3799 value, str_rtx, 1, OPTAB_WIDEN);
3800 if (result != str_rtx)
3801 emit_move_insn (str_rtx, result);
3802 return true;
3803
3804 case BIT_IOR_EXPR:
3805 case BIT_XOR_EXPR:
3806 if (TREE_CODE (op1) != INTEGER_CST)
3807 break;
3808 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3809 value = convert_modes (GET_MODE (str_rtx),
3810 TYPE_MODE (TREE_TYPE (op1)), value,
3811 TYPE_UNSIGNED (TREE_TYPE (op1)));
3812
3813 /* We may be accessing data outside the field, which means
3814 we can alias adjacent data. */
3815 if (MEM_P (str_rtx))
3816 {
3817 str_rtx = shallow_copy_rtx (str_rtx);
3818 set_mem_alias_set (str_rtx, 0);
3819 set_mem_expr (str_rtx, 0);
3820 }
3821
3822 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
3823 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3824 {
3825 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
3826 - 1);
3827 value = expand_and (GET_MODE (str_rtx), value, mask,
3828 NULL_RTX);
3829 }
3830 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
3831 build_int_cst (NULL_TREE, bitpos),
3832 NULL_RTX, 1);
3833 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
3834 value, str_rtx, 1, OPTAB_WIDEN);
3835 if (result != str_rtx)
3836 emit_move_insn (str_rtx, result);
3837 return true;
3838
3839 default:
3840 break;
3841 }
3842
3843 return false;
3844 }
3845
3846
3847 /* Expand an assignment that stores the value of FROM into TO. */
3848
3849 void
3850 expand_assignment (tree to, tree from)
3851 {
3852 rtx to_rtx = 0;
3853 rtx result;
3854
3855 /* Don't crash if the lhs of the assignment was erroneous. */
3856
3857 if (TREE_CODE (to) == ERROR_MARK)
3858 {
3859 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3860 return;
3861 }
3862
3863 /* Assignment of a structure component needs special treatment
3864 if the structure component's rtx is not simply a MEM.
3865 Assignment of an array element at a constant index, and assignment of
3866 an array element in an unaligned packed structure field, has the same
3867 problem. */
3868 if (handled_component_p (to)
3869 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3870 {
3871 enum machine_mode mode1;
3872 HOST_WIDE_INT bitsize, bitpos;
3873 tree offset;
3874 int unsignedp;
3875 int volatilep = 0;
3876 tree tem;
3877
3878 push_temp_slots ();
3879 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3880 &unsignedp, &volatilep, true);
3881
3882 /* If we are going to use store_bit_field and extract_bit_field,
3883 make sure to_rtx will be safe for multiple use. */
3884
3885 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3886
3887 if (offset != 0)
3888 {
3889 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3890
3891 gcc_assert (MEM_P (to_rtx));
3892
3893 #ifdef POINTERS_EXTEND_UNSIGNED
3894 if (GET_MODE (offset_rtx) != Pmode)
3895 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3896 #else
3897 if (GET_MODE (offset_rtx) != ptr_mode)
3898 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3899 #endif
3900
3901 /* A constant address in TO_RTX can have VOIDmode, we must not try
3902 to call force_reg for that case. Avoid that case. */
3903 if (MEM_P (to_rtx)
3904 && GET_MODE (to_rtx) == BLKmode
3905 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3906 && bitsize > 0
3907 && (bitpos % bitsize) == 0
3908 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3909 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3910 {
3911 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3912 bitpos = 0;
3913 }
3914
3915 to_rtx = offset_address (to_rtx, offset_rtx,
3916 highest_pow2_factor_for_target (to,
3917 offset));
3918 }
3919
3920 /* Handle expand_expr of a complex value returning a CONCAT. */
3921 if (GET_CODE (to_rtx) == CONCAT)
3922 {
3923 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
3924 {
3925 gcc_assert (bitpos == 0);
3926 result = store_expr (from, to_rtx, false);
3927 }
3928 else
3929 {
3930 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
3931 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
3932 }
3933 }
3934 else
3935 {
3936 if (MEM_P (to_rtx))
3937 {
3938 /* If the field is at offset zero, we could have been given the
3939 DECL_RTX of the parent struct. Don't munge it. */
3940 to_rtx = shallow_copy_rtx (to_rtx);
3941
3942 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3943
3944 /* Deal with volatile and readonly fields. The former is only
3945 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3946 if (volatilep)
3947 MEM_VOLATILE_P (to_rtx) = 1;
3948 if (component_uses_parent_alias_set (to))
3949 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3950 }
3951
3952 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
3953 to_rtx, to, from))
3954 result = NULL;
3955 else
3956 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3957 TREE_TYPE (tem), get_alias_set (to));
3958 }
3959
3960 if (result)
3961 preserve_temp_slots (result);
3962 free_temp_slots ();
3963 pop_temp_slots ();
3964 return;
3965 }
3966
3967 /* If the rhs is a function call and its value is not an aggregate,
3968 call the function before we start to compute the lhs.
3969 This is needed for correct code for cases such as
3970 val = setjmp (buf) on machines where reference to val
3971 requires loading up part of an address in a separate insn.
3972
3973 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3974 since it might be a promoted variable where the zero- or sign- extension
3975 needs to be done. Handling this in the normal way is safe because no
3976 computation is done before the call. */
3977 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3978 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3979 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3980 && REG_P (DECL_RTL (to))))
3981 {
3982 rtx value;
3983
3984 push_temp_slots ();
3985 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3986 if (to_rtx == 0)
3987 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3988
3989 /* Handle calls that return values in multiple non-contiguous locations.
3990 The Irix 6 ABI has examples of this. */
3991 if (GET_CODE (to_rtx) == PARALLEL)
3992 emit_group_load (to_rtx, value, TREE_TYPE (from),
3993 int_size_in_bytes (TREE_TYPE (from)));
3994 else if (GET_MODE (to_rtx) == BLKmode)
3995 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3996 else
3997 {
3998 if (POINTER_TYPE_P (TREE_TYPE (to)))
3999 value = convert_memory_address (GET_MODE (to_rtx), value);
4000 emit_move_insn (to_rtx, value);
4001 }
4002 preserve_temp_slots (to_rtx);
4003 free_temp_slots ();
4004 pop_temp_slots ();
4005 return;
4006 }
4007
4008 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4009 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4010
4011 if (to_rtx == 0)
4012 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4013
4014 /* Don't move directly into a return register. */
4015 if (TREE_CODE (to) == RESULT_DECL
4016 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4017 {
4018 rtx temp;
4019
4020 push_temp_slots ();
4021 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4022
4023 if (GET_CODE (to_rtx) == PARALLEL)
4024 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4025 int_size_in_bytes (TREE_TYPE (from)));
4026 else
4027 emit_move_insn (to_rtx, temp);
4028
4029 preserve_temp_slots (to_rtx);
4030 free_temp_slots ();
4031 pop_temp_slots ();
4032 return;
4033 }
4034
4035 /* In case we are returning the contents of an object which overlaps
4036 the place the value is being stored, use a safe function when copying
4037 a value through a pointer into a structure value return block. */
4038 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4039 && current_function_returns_struct
4040 && !current_function_returns_pcc_struct)
4041 {
4042 rtx from_rtx, size;
4043
4044 push_temp_slots ();
4045 size = expr_size (from);
4046 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4047
4048 emit_library_call (memmove_libfunc, LCT_NORMAL,
4049 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4050 XEXP (from_rtx, 0), Pmode,
4051 convert_to_mode (TYPE_MODE (sizetype),
4052 size, TYPE_UNSIGNED (sizetype)),
4053 TYPE_MODE (sizetype));
4054
4055 preserve_temp_slots (to_rtx);
4056 free_temp_slots ();
4057 pop_temp_slots ();
4058 return;
4059 }
4060
4061 /* Compute FROM and store the value in the rtx we got. */
4062
4063 push_temp_slots ();
4064 result = store_expr (from, to_rtx, 0);
4065 preserve_temp_slots (result);
4066 free_temp_slots ();
4067 pop_temp_slots ();
4068 return;
4069 }
4070
4071 /* Generate code for computing expression EXP,
4072 and storing the value into TARGET.
4073
4074 If the mode is BLKmode then we may return TARGET itself.
4075 It turns out that in BLKmode it doesn't cause a problem.
4076 because C has no operators that could combine two different
4077 assignments into the same BLKmode object with different values
4078 with no sequence point. Will other languages need this to
4079 be more thorough?
4080
4081 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4082 stack, and block moves may need to be treated specially. */
4083
4084 rtx
4085 store_expr (tree exp, rtx target, int call_param_p)
4086 {
4087 rtx temp;
4088 rtx alt_rtl = NULL_RTX;
4089 int dont_return_target = 0;
4090
4091 if (VOID_TYPE_P (TREE_TYPE (exp)))
4092 {
4093 /* C++ can generate ?: expressions with a throw expression in one
4094 branch and an rvalue in the other. Here, we resolve attempts to
4095 store the throw expression's nonexistent result. */
4096 gcc_assert (!call_param_p);
4097 expand_expr (exp, const0_rtx, VOIDmode, 0);
4098 return NULL_RTX;
4099 }
4100 if (TREE_CODE (exp) == COMPOUND_EXPR)
4101 {
4102 /* Perform first part of compound expression, then assign from second
4103 part. */
4104 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4105 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4106 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4107 }
4108 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4109 {
4110 /* For conditional expression, get safe form of the target. Then
4111 test the condition, doing the appropriate assignment on either
4112 side. This avoids the creation of unnecessary temporaries.
4113 For non-BLKmode, it is more efficient not to do this. */
4114
4115 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4116
4117 do_pending_stack_adjust ();
4118 NO_DEFER_POP;
4119 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4120 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4121 emit_jump_insn (gen_jump (lab2));
4122 emit_barrier ();
4123 emit_label (lab1);
4124 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4125 emit_label (lab2);
4126 OK_DEFER_POP;
4127
4128 return NULL_RTX;
4129 }
4130 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4131 /* If this is a scalar in a register that is stored in a wider mode
4132 than the declared mode, compute the result into its declared mode
4133 and then convert to the wider mode. Our value is the computed
4134 expression. */
4135 {
4136 rtx inner_target = 0;
4137
4138 /* We can do the conversion inside EXP, which will often result
4139 in some optimizations. Do the conversion in two steps: first
4140 change the signedness, if needed, then the extend. But don't
4141 do this if the type of EXP is a subtype of something else
4142 since then the conversion might involve more than just
4143 converting modes. */
4144 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4145 && TREE_TYPE (TREE_TYPE (exp)) == 0
4146 && (!lang_hooks.reduce_bit_field_operations
4147 || (GET_MODE_PRECISION (GET_MODE (target))
4148 == TYPE_PRECISION (TREE_TYPE (exp)))))
4149 {
4150 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4151 != SUBREG_PROMOTED_UNSIGNED_P (target))
4152 exp = convert
4153 (lang_hooks.types.signed_or_unsigned_type
4154 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4155
4156 exp = convert (lang_hooks.types.type_for_mode
4157 (GET_MODE (SUBREG_REG (target)),
4158 SUBREG_PROMOTED_UNSIGNED_P (target)),
4159 exp);
4160
4161 inner_target = SUBREG_REG (target);
4162 }
4163
4164 temp = expand_expr (exp, inner_target, VOIDmode,
4165 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4166
4167 /* If TEMP is a VOIDmode constant, use convert_modes to make
4168 sure that we properly convert it. */
4169 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4170 {
4171 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4172 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4173 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4174 GET_MODE (target), temp,
4175 SUBREG_PROMOTED_UNSIGNED_P (target));
4176 }
4177
4178 convert_move (SUBREG_REG (target), temp,
4179 SUBREG_PROMOTED_UNSIGNED_P (target));
4180
4181 return NULL_RTX;
4182 }
4183 else
4184 {
4185 temp = expand_expr_real (exp, target, GET_MODE (target),
4186 (call_param_p
4187 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4188 &alt_rtl);
4189 /* Return TARGET if it's a specified hardware register.
4190 If TARGET is a volatile mem ref, either return TARGET
4191 or return a reg copied *from* TARGET; ANSI requires this.
4192
4193 Otherwise, if TEMP is not TARGET, return TEMP
4194 if it is constant (for efficiency),
4195 or if we really want the correct value. */
4196 if (!(target && REG_P (target)
4197 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4198 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4199 && ! rtx_equal_p (temp, target)
4200 && CONSTANT_P (temp))
4201 dont_return_target = 1;
4202 }
4203
4204 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4205 the same as that of TARGET, adjust the constant. This is needed, for
4206 example, in case it is a CONST_DOUBLE and we want only a word-sized
4207 value. */
4208 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4209 && TREE_CODE (exp) != ERROR_MARK
4210 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4211 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4212 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4213
4214 /* If value was not generated in the target, store it there.
4215 Convert the value to TARGET's type first if necessary and emit the
4216 pending incrementations that have been queued when expanding EXP.
4217 Note that we cannot emit the whole queue blindly because this will
4218 effectively disable the POST_INC optimization later.
4219
4220 If TEMP and TARGET compare equal according to rtx_equal_p, but
4221 one or both of them are volatile memory refs, we have to distinguish
4222 two cases:
4223 - expand_expr has used TARGET. In this case, we must not generate
4224 another copy. This can be detected by TARGET being equal according
4225 to == .
4226 - expand_expr has not used TARGET - that means that the source just
4227 happens to have the same RTX form. Since temp will have been created
4228 by expand_expr, it will compare unequal according to == .
4229 We must generate a copy in this case, to reach the correct number
4230 of volatile memory references. */
4231
4232 if ((! rtx_equal_p (temp, target)
4233 || (temp != target && (side_effects_p (temp)
4234 || side_effects_p (target))))
4235 && TREE_CODE (exp) != ERROR_MARK
4236 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4237 but TARGET is not valid memory reference, TEMP will differ
4238 from TARGET although it is really the same location. */
4239 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4240 /* If there's nothing to copy, don't bother. Don't call
4241 expr_size unless necessary, because some front-ends (C++)
4242 expr_size-hook must not be given objects that are not
4243 supposed to be bit-copied or bit-initialized. */
4244 && expr_size (exp) != const0_rtx)
4245 {
4246 if (GET_MODE (temp) != GET_MODE (target)
4247 && GET_MODE (temp) != VOIDmode)
4248 {
4249 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4250 if (dont_return_target)
4251 {
4252 /* In this case, we will return TEMP,
4253 so make sure it has the proper mode.
4254 But don't forget to store the value into TARGET. */
4255 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4256 emit_move_insn (target, temp);
4257 }
4258 else
4259 convert_move (target, temp, unsignedp);
4260 }
4261
4262 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4263 {
4264 /* Handle copying a string constant into an array. The string
4265 constant may be shorter than the array. So copy just the string's
4266 actual length, and clear the rest. First get the size of the data
4267 type of the string, which is actually the size of the target. */
4268 rtx size = expr_size (exp);
4269
4270 if (GET_CODE (size) == CONST_INT
4271 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4272 emit_block_move (target, temp, size,
4273 (call_param_p
4274 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4275 else
4276 {
4277 /* Compute the size of the data to copy from the string. */
4278 tree copy_size
4279 = size_binop (MIN_EXPR,
4280 make_tree (sizetype, size),
4281 size_int (TREE_STRING_LENGTH (exp)));
4282 rtx copy_size_rtx
4283 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4284 (call_param_p
4285 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4286 rtx label = 0;
4287
4288 /* Copy that much. */
4289 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4290 TYPE_UNSIGNED (sizetype));
4291 emit_block_move (target, temp, copy_size_rtx,
4292 (call_param_p
4293 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4294
4295 /* Figure out how much is left in TARGET that we have to clear.
4296 Do all calculations in ptr_mode. */
4297 if (GET_CODE (copy_size_rtx) == CONST_INT)
4298 {
4299 size = plus_constant (size, -INTVAL (copy_size_rtx));
4300 target = adjust_address (target, BLKmode,
4301 INTVAL (copy_size_rtx));
4302 }
4303 else
4304 {
4305 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4306 copy_size_rtx, NULL_RTX, 0,
4307 OPTAB_LIB_WIDEN);
4308
4309 #ifdef POINTERS_EXTEND_UNSIGNED
4310 if (GET_MODE (copy_size_rtx) != Pmode)
4311 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4312 TYPE_UNSIGNED (sizetype));
4313 #endif
4314
4315 target = offset_address (target, copy_size_rtx,
4316 highest_pow2_factor (copy_size));
4317 label = gen_label_rtx ();
4318 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4319 GET_MODE (size), 0, label);
4320 }
4321
4322 if (size != const0_rtx)
4323 clear_storage (target, size, BLOCK_OP_NORMAL);
4324
4325 if (label)
4326 emit_label (label);
4327 }
4328 }
4329 /* Handle calls that return values in multiple non-contiguous locations.
4330 The Irix 6 ABI has examples of this. */
4331 else if (GET_CODE (target) == PARALLEL)
4332 emit_group_load (target, temp, TREE_TYPE (exp),
4333 int_size_in_bytes (TREE_TYPE (exp)));
4334 else if (GET_MODE (temp) == BLKmode)
4335 emit_block_move (target, temp, expr_size (exp),
4336 (call_param_p
4337 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4338 else
4339 {
4340 temp = force_operand (temp, target);
4341 if (temp != target)
4342 emit_move_insn (target, temp);
4343 }
4344 }
4345
4346 return NULL_RTX;
4347 }
4348 \f
4349 /* Examine CTOR to discover:
4350 * how many scalar fields are set to nonzero values,
4351 and place it in *P_NZ_ELTS;
4352 * how many scalar fields are set to non-constant values,
4353 and place it in *P_NC_ELTS; and
4354 * how many scalar fields in total are in CTOR,
4355 and place it in *P_ELT_COUNT.
4356 * if a type is a union, and the initializer from the constructor
4357 is not the largest element in the union, then set *p_must_clear. */
4358
4359 static void
4360 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4361 HOST_WIDE_INT *p_nc_elts,
4362 HOST_WIDE_INT *p_elt_count,
4363 bool *p_must_clear)
4364 {
4365 HOST_WIDE_INT nz_elts, nc_elts, elt_count;
4366 tree list;
4367
4368 nz_elts = 0;
4369 nc_elts = 0;
4370 elt_count = 0;
4371
4372 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4373 {
4374 tree value = TREE_VALUE (list);
4375 tree purpose = TREE_PURPOSE (list);
4376 HOST_WIDE_INT mult;
4377
4378 mult = 1;
4379 if (TREE_CODE (purpose) == RANGE_EXPR)
4380 {
4381 tree lo_index = TREE_OPERAND (purpose, 0);
4382 tree hi_index = TREE_OPERAND (purpose, 1);
4383
4384 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4385 mult = (tree_low_cst (hi_index, 1)
4386 - tree_low_cst (lo_index, 1) + 1);
4387 }
4388
4389 switch (TREE_CODE (value))
4390 {
4391 case CONSTRUCTOR:
4392 {
4393 HOST_WIDE_INT nz = 0, nc = 0, ic = 0;
4394 categorize_ctor_elements_1 (value, &nz, &nc, &ic, p_must_clear);
4395 nz_elts += mult * nz;
4396 nc_elts += mult * nc;
4397 elt_count += mult * ic;
4398 }
4399 break;
4400
4401 case INTEGER_CST:
4402 case REAL_CST:
4403 if (!initializer_zerop (value))
4404 nz_elts += mult;
4405 elt_count += mult;
4406 break;
4407
4408 case STRING_CST:
4409 nz_elts += mult * TREE_STRING_LENGTH (value);
4410 elt_count += mult * TREE_STRING_LENGTH (value);
4411 break;
4412
4413 case COMPLEX_CST:
4414 if (!initializer_zerop (TREE_REALPART (value)))
4415 nz_elts += mult;
4416 if (!initializer_zerop (TREE_IMAGPART (value)))
4417 nz_elts += mult;
4418 elt_count += mult;
4419 break;
4420
4421 case VECTOR_CST:
4422 {
4423 tree v;
4424 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4425 {
4426 if (!initializer_zerop (TREE_VALUE (v)))
4427 nz_elts += mult;
4428 elt_count += mult;
4429 }
4430 }
4431 break;
4432
4433 default:
4434 nz_elts += mult;
4435 elt_count += mult;
4436 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4437 nc_elts += mult;
4438 break;
4439 }
4440 }
4441
4442 if (!*p_must_clear
4443 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4444 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4445 {
4446 tree init_sub_type;
4447 bool clear_this = true;
4448
4449 list = CONSTRUCTOR_ELTS (ctor);
4450 if (list)
4451 {
4452 /* We don't expect more than one element of the union to be
4453 initialized. Not sure what we should do otherwise... */
4454 gcc_assert (TREE_CHAIN (list) == NULL);
4455
4456 init_sub_type = TREE_TYPE (TREE_VALUE (list));
4457
4458 /* ??? We could look at each element of the union, and find the
4459 largest element. Which would avoid comparing the size of the
4460 initialized element against any tail padding in the union.
4461 Doesn't seem worth the effort... */
4462 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4463 TYPE_SIZE (init_sub_type)) == 1)
4464 {
4465 /* And now we have to find out if the element itself is fully
4466 constructed. E.g. for union { struct { int a, b; } s; } u
4467 = { .s = { .a = 1 } }. */
4468 if (elt_count == count_type_elements (init_sub_type))
4469 clear_this = false;
4470 }
4471 }
4472
4473 *p_must_clear = clear_this;
4474 }
4475
4476 *p_nz_elts += nz_elts;
4477 *p_nc_elts += nc_elts;
4478 *p_elt_count += elt_count;
4479 }
4480
4481 void
4482 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4483 HOST_WIDE_INT *p_nc_elts,
4484 HOST_WIDE_INT *p_elt_count,
4485 bool *p_must_clear)
4486 {
4487 *p_nz_elts = 0;
4488 *p_nc_elts = 0;
4489 *p_elt_count = 0;
4490 *p_must_clear = false;
4491 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts, p_elt_count,
4492 p_must_clear);
4493 }
4494
4495 /* Count the number of scalars in TYPE. Return -1 on overflow or
4496 variable-sized. */
4497
4498 HOST_WIDE_INT
4499 count_type_elements (tree type)
4500 {
4501 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4502 switch (TREE_CODE (type))
4503 {
4504 case ARRAY_TYPE:
4505 {
4506 tree telts = array_type_nelts (type);
4507 if (telts && host_integerp (telts, 1))
4508 {
4509 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4510 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4511 if (n == 0)
4512 return 0;
4513 else if (max / n > m)
4514 return n * m;
4515 }
4516 return -1;
4517 }
4518
4519 case RECORD_TYPE:
4520 {
4521 HOST_WIDE_INT n = 0, t;
4522 tree f;
4523
4524 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4525 if (TREE_CODE (f) == FIELD_DECL)
4526 {
4527 t = count_type_elements (TREE_TYPE (f));
4528 if (t < 0)
4529 return -1;
4530 n += t;
4531 }
4532
4533 return n;
4534 }
4535
4536 case UNION_TYPE:
4537 case QUAL_UNION_TYPE:
4538 {
4539 /* Ho hum. How in the world do we guess here? Clearly it isn't
4540 right to count the fields. Guess based on the number of words. */
4541 HOST_WIDE_INT n = int_size_in_bytes (type);
4542 if (n < 0)
4543 return -1;
4544 return n / UNITS_PER_WORD;
4545 }
4546
4547 case COMPLEX_TYPE:
4548 return 2;
4549
4550 case VECTOR_TYPE:
4551 return TYPE_VECTOR_SUBPARTS (type);
4552
4553 case INTEGER_TYPE:
4554 case REAL_TYPE:
4555 case ENUMERAL_TYPE:
4556 case BOOLEAN_TYPE:
4557 case CHAR_TYPE:
4558 case POINTER_TYPE:
4559 case OFFSET_TYPE:
4560 case REFERENCE_TYPE:
4561 return 1;
4562
4563 case VOID_TYPE:
4564 case METHOD_TYPE:
4565 case FUNCTION_TYPE:
4566 case LANG_TYPE:
4567 default:
4568 gcc_unreachable ();
4569 }
4570 }
4571
4572 /* Return 1 if EXP contains mostly (3/4) zeros. */
4573
4574 static int
4575 mostly_zeros_p (tree exp)
4576 {
4577 if (TREE_CODE (exp) == CONSTRUCTOR)
4578
4579 {
4580 HOST_WIDE_INT nz_elts, nc_elts, count, elts;
4581 bool must_clear;
4582
4583 categorize_ctor_elements (exp, &nz_elts, &nc_elts, &count, &must_clear);
4584 if (must_clear)
4585 return 1;
4586
4587 elts = count_type_elements (TREE_TYPE (exp));
4588
4589 return nz_elts < elts / 4;
4590 }
4591
4592 return initializer_zerop (exp);
4593 }
4594 \f
4595 /* Helper function for store_constructor.
4596 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4597 TYPE is the type of the CONSTRUCTOR, not the element type.
4598 CLEARED is as for store_constructor.
4599 ALIAS_SET is the alias set to use for any stores.
4600
4601 This provides a recursive shortcut back to store_constructor when it isn't
4602 necessary to go through store_field. This is so that we can pass through
4603 the cleared field to let store_constructor know that we may not have to
4604 clear a substructure if the outer structure has already been cleared. */
4605
4606 static void
4607 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4608 HOST_WIDE_INT bitpos, enum machine_mode mode,
4609 tree exp, tree type, int cleared, int alias_set)
4610 {
4611 if (TREE_CODE (exp) == CONSTRUCTOR
4612 /* We can only call store_constructor recursively if the size and
4613 bit position are on a byte boundary. */
4614 && bitpos % BITS_PER_UNIT == 0
4615 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4616 /* If we have a nonzero bitpos for a register target, then we just
4617 let store_field do the bitfield handling. This is unlikely to
4618 generate unnecessary clear instructions anyways. */
4619 && (bitpos == 0 || MEM_P (target)))
4620 {
4621 if (MEM_P (target))
4622 target
4623 = adjust_address (target,
4624 GET_MODE (target) == BLKmode
4625 || 0 != (bitpos
4626 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4627 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4628
4629
4630 /* Update the alias set, if required. */
4631 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4632 && MEM_ALIAS_SET (target) != 0)
4633 {
4634 target = copy_rtx (target);
4635 set_mem_alias_set (target, alias_set);
4636 }
4637
4638 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4639 }
4640 else
4641 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4642 }
4643
4644 /* Store the value of constructor EXP into the rtx TARGET.
4645 TARGET is either a REG or a MEM; we know it cannot conflict, since
4646 safe_from_p has been called.
4647 CLEARED is true if TARGET is known to have been zero'd.
4648 SIZE is the number of bytes of TARGET we are allowed to modify: this
4649 may not be the same as the size of EXP if we are assigning to a field
4650 which has been packed to exclude padding bits. */
4651
4652 static void
4653 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4654 {
4655 tree type = TREE_TYPE (exp);
4656 #ifdef WORD_REGISTER_OPERATIONS
4657 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4658 #endif
4659
4660 switch (TREE_CODE (type))
4661 {
4662 case RECORD_TYPE:
4663 case UNION_TYPE:
4664 case QUAL_UNION_TYPE:
4665 {
4666 tree elt;
4667
4668 /* If size is zero or the target is already cleared, do nothing. */
4669 if (size == 0 || cleared)
4670 cleared = 1;
4671 /* We either clear the aggregate or indicate the value is dead. */
4672 else if ((TREE_CODE (type) == UNION_TYPE
4673 || TREE_CODE (type) == QUAL_UNION_TYPE)
4674 && ! CONSTRUCTOR_ELTS (exp))
4675 /* If the constructor is empty, clear the union. */
4676 {
4677 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
4678 cleared = 1;
4679 }
4680
4681 /* If we are building a static constructor into a register,
4682 set the initial value as zero so we can fold the value into
4683 a constant. But if more than one register is involved,
4684 this probably loses. */
4685 else if (REG_P (target) && TREE_STATIC (exp)
4686 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4687 {
4688 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4689 cleared = 1;
4690 }
4691
4692 /* If the constructor has fewer fields than the structure or
4693 if we are initializing the structure to mostly zeros, clear
4694 the whole structure first. Don't do this if TARGET is a
4695 register whose mode size isn't equal to SIZE since
4696 clear_storage can't handle this case. */
4697 else if (size > 0
4698 && ((list_length (CONSTRUCTOR_ELTS (exp))
4699 != fields_length (type))
4700 || mostly_zeros_p (exp))
4701 && (!REG_P (target)
4702 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4703 == size)))
4704 {
4705 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
4706 cleared = 1;
4707 }
4708
4709 if (! cleared)
4710 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4711
4712 /* Store each element of the constructor into the
4713 corresponding field of TARGET. */
4714
4715 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4716 {
4717 tree field = TREE_PURPOSE (elt);
4718 tree value = TREE_VALUE (elt);
4719 enum machine_mode mode;
4720 HOST_WIDE_INT bitsize;
4721 HOST_WIDE_INT bitpos = 0;
4722 tree offset;
4723 rtx to_rtx = target;
4724
4725 /* Just ignore missing fields. We cleared the whole
4726 structure, above, if any fields are missing. */
4727 if (field == 0)
4728 continue;
4729
4730 if (cleared && initializer_zerop (value))
4731 continue;
4732
4733 if (host_integerp (DECL_SIZE (field), 1))
4734 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4735 else
4736 bitsize = -1;
4737
4738 mode = DECL_MODE (field);
4739 if (DECL_BIT_FIELD (field))
4740 mode = VOIDmode;
4741
4742 offset = DECL_FIELD_OFFSET (field);
4743 if (host_integerp (offset, 0)
4744 && host_integerp (bit_position (field), 0))
4745 {
4746 bitpos = int_bit_position (field);
4747 offset = 0;
4748 }
4749 else
4750 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4751
4752 if (offset)
4753 {
4754 rtx offset_rtx;
4755
4756 offset
4757 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4758 make_tree (TREE_TYPE (exp),
4759 target));
4760
4761 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4762 gcc_assert (MEM_P (to_rtx));
4763
4764 #ifdef POINTERS_EXTEND_UNSIGNED
4765 if (GET_MODE (offset_rtx) != Pmode)
4766 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4767 #else
4768 if (GET_MODE (offset_rtx) != ptr_mode)
4769 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4770 #endif
4771
4772 to_rtx = offset_address (to_rtx, offset_rtx,
4773 highest_pow2_factor (offset));
4774 }
4775
4776 #ifdef WORD_REGISTER_OPERATIONS
4777 /* If this initializes a field that is smaller than a
4778 word, at the start of a word, try to widen it to a full
4779 word. This special case allows us to output C++ member
4780 function initializations in a form that the optimizers
4781 can understand. */
4782 if (REG_P (target)
4783 && bitsize < BITS_PER_WORD
4784 && bitpos % BITS_PER_WORD == 0
4785 && GET_MODE_CLASS (mode) == MODE_INT
4786 && TREE_CODE (value) == INTEGER_CST
4787 && exp_size >= 0
4788 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4789 {
4790 tree type = TREE_TYPE (value);
4791
4792 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4793 {
4794 type = lang_hooks.types.type_for_size
4795 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4796 value = convert (type, value);
4797 }
4798
4799 if (BYTES_BIG_ENDIAN)
4800 value
4801 = fold_build2 (LSHIFT_EXPR, type, value,
4802 build_int_cst (NULL_TREE,
4803 BITS_PER_WORD - bitsize));
4804 bitsize = BITS_PER_WORD;
4805 mode = word_mode;
4806 }
4807 #endif
4808
4809 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4810 && DECL_NONADDRESSABLE_P (field))
4811 {
4812 to_rtx = copy_rtx (to_rtx);
4813 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4814 }
4815
4816 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4817 value, type, cleared,
4818 get_alias_set (TREE_TYPE (field)));
4819 }
4820 break;
4821 }
4822 case ARRAY_TYPE:
4823 {
4824 tree elt;
4825 int i;
4826 int need_to_clear;
4827 tree domain;
4828 tree elttype = TREE_TYPE (type);
4829 int const_bounds_p;
4830 HOST_WIDE_INT minelt = 0;
4831 HOST_WIDE_INT maxelt = 0;
4832
4833 domain = TYPE_DOMAIN (type);
4834 const_bounds_p = (TYPE_MIN_VALUE (domain)
4835 && TYPE_MAX_VALUE (domain)
4836 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4837 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4838
4839 /* If we have constant bounds for the range of the type, get them. */
4840 if (const_bounds_p)
4841 {
4842 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4843 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4844 }
4845
4846 /* If the constructor has fewer elements than the array, clear
4847 the whole array first. Similarly if this is static
4848 constructor of a non-BLKmode object. */
4849 if (cleared)
4850 need_to_clear = 0;
4851 else if (REG_P (target) && TREE_STATIC (exp))
4852 need_to_clear = 1;
4853 else
4854 {
4855 HOST_WIDE_INT count = 0, zero_count = 0;
4856 need_to_clear = ! const_bounds_p;
4857
4858 /* This loop is a more accurate version of the loop in
4859 mostly_zeros_p (it handles RANGE_EXPR in an index). It
4860 is also needed to check for missing elements. */
4861 for (elt = CONSTRUCTOR_ELTS (exp);
4862 elt != NULL_TREE && ! need_to_clear;
4863 elt = TREE_CHAIN (elt))
4864 {
4865 tree index = TREE_PURPOSE (elt);
4866 HOST_WIDE_INT this_node_count;
4867
4868 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4869 {
4870 tree lo_index = TREE_OPERAND (index, 0);
4871 tree hi_index = TREE_OPERAND (index, 1);
4872
4873 if (! host_integerp (lo_index, 1)
4874 || ! host_integerp (hi_index, 1))
4875 {
4876 need_to_clear = 1;
4877 break;
4878 }
4879
4880 this_node_count = (tree_low_cst (hi_index, 1)
4881 - tree_low_cst (lo_index, 1) + 1);
4882 }
4883 else
4884 this_node_count = 1;
4885
4886 count += this_node_count;
4887 if (mostly_zeros_p (TREE_VALUE (elt)))
4888 zero_count += this_node_count;
4889 }
4890
4891 /* Clear the entire array first if there are any missing
4892 elements, or if the incidence of zero elements is >=
4893 75%. */
4894 if (! need_to_clear
4895 && (count < maxelt - minelt + 1
4896 || 4 * zero_count >= 3 * count))
4897 need_to_clear = 1;
4898 }
4899
4900 if (need_to_clear && size > 0)
4901 {
4902 if (REG_P (target))
4903 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4904 else
4905 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
4906 cleared = 1;
4907 }
4908
4909 if (!cleared && REG_P (target))
4910 /* Inform later passes that the old value is dead. */
4911 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4912
4913 /* Store each element of the constructor into the
4914 corresponding element of TARGET, determined by counting the
4915 elements. */
4916 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4917 elt;
4918 elt = TREE_CHAIN (elt), i++)
4919 {
4920 enum machine_mode mode;
4921 HOST_WIDE_INT bitsize;
4922 HOST_WIDE_INT bitpos;
4923 int unsignedp;
4924 tree value = TREE_VALUE (elt);
4925 tree index = TREE_PURPOSE (elt);
4926 rtx xtarget = target;
4927
4928 if (cleared && initializer_zerop (value))
4929 continue;
4930
4931 unsignedp = TYPE_UNSIGNED (elttype);
4932 mode = TYPE_MODE (elttype);
4933 if (mode == BLKmode)
4934 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4935 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4936 : -1);
4937 else
4938 bitsize = GET_MODE_BITSIZE (mode);
4939
4940 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4941 {
4942 tree lo_index = TREE_OPERAND (index, 0);
4943 tree hi_index = TREE_OPERAND (index, 1);
4944 rtx index_r, pos_rtx;
4945 HOST_WIDE_INT lo, hi, count;
4946 tree position;
4947
4948 /* If the range is constant and "small", unroll the loop. */
4949 if (const_bounds_p
4950 && host_integerp (lo_index, 0)
4951 && host_integerp (hi_index, 0)
4952 && (lo = tree_low_cst (lo_index, 0),
4953 hi = tree_low_cst (hi_index, 0),
4954 count = hi - lo + 1,
4955 (!MEM_P (target)
4956 || count <= 2
4957 || (host_integerp (TYPE_SIZE (elttype), 1)
4958 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4959 <= 40 * 8)))))
4960 {
4961 lo -= minelt; hi -= minelt;
4962 for (; lo <= hi; lo++)
4963 {
4964 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4965
4966 if (MEM_P (target)
4967 && !MEM_KEEP_ALIAS_SET_P (target)
4968 && TREE_CODE (type) == ARRAY_TYPE
4969 && TYPE_NONALIASED_COMPONENT (type))
4970 {
4971 target = copy_rtx (target);
4972 MEM_KEEP_ALIAS_SET_P (target) = 1;
4973 }
4974
4975 store_constructor_field
4976 (target, bitsize, bitpos, mode, value, type, cleared,
4977 get_alias_set (elttype));
4978 }
4979 }
4980 else
4981 {
4982 rtx loop_start = gen_label_rtx ();
4983 rtx loop_end = gen_label_rtx ();
4984 tree exit_cond;
4985
4986 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4987 unsignedp = TYPE_UNSIGNED (domain);
4988
4989 index = build_decl (VAR_DECL, NULL_TREE, domain);
4990
4991 index_r
4992 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4993 &unsignedp, 0));
4994 SET_DECL_RTL (index, index_r);
4995 store_expr (lo_index, index_r, 0);
4996
4997 /* Build the head of the loop. */
4998 do_pending_stack_adjust ();
4999 emit_label (loop_start);
5000
5001 /* Assign value to element index. */
5002 position
5003 = convert (ssizetype,
5004 fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5005 index, TYPE_MIN_VALUE (domain)));
5006 position = size_binop (MULT_EXPR, position,
5007 convert (ssizetype,
5008 TYPE_SIZE_UNIT (elttype)));
5009
5010 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5011 xtarget = offset_address (target, pos_rtx,
5012 highest_pow2_factor (position));
5013 xtarget = adjust_address (xtarget, mode, 0);
5014 if (TREE_CODE (value) == CONSTRUCTOR)
5015 store_constructor (value, xtarget, cleared,
5016 bitsize / BITS_PER_UNIT);
5017 else
5018 store_expr (value, xtarget, 0);
5019
5020 /* Generate a conditional jump to exit the loop. */
5021 exit_cond = build2 (LT_EXPR, integer_type_node,
5022 index, hi_index);
5023 jumpif (exit_cond, loop_end);
5024
5025 /* Update the loop counter, and jump to the head of
5026 the loop. */
5027 expand_assignment (index,
5028 build2 (PLUS_EXPR, TREE_TYPE (index),
5029 index, integer_one_node));
5030
5031 emit_jump (loop_start);
5032
5033 /* Build the end of the loop. */
5034 emit_label (loop_end);
5035 }
5036 }
5037 else if ((index != 0 && ! host_integerp (index, 0))
5038 || ! host_integerp (TYPE_SIZE (elttype), 1))
5039 {
5040 tree position;
5041
5042 if (index == 0)
5043 index = ssize_int (1);
5044
5045 if (minelt)
5046 index = fold_convert (ssizetype,
5047 fold_build2 (MINUS_EXPR,
5048 TREE_TYPE (index),
5049 index,
5050 TYPE_MIN_VALUE (domain)));
5051
5052 position = size_binop (MULT_EXPR, index,
5053 convert (ssizetype,
5054 TYPE_SIZE_UNIT (elttype)));
5055 xtarget = offset_address (target,
5056 expand_expr (position, 0, VOIDmode, 0),
5057 highest_pow2_factor (position));
5058 xtarget = adjust_address (xtarget, mode, 0);
5059 store_expr (value, xtarget, 0);
5060 }
5061 else
5062 {
5063 if (index != 0)
5064 bitpos = ((tree_low_cst (index, 0) - minelt)
5065 * tree_low_cst (TYPE_SIZE (elttype), 1));
5066 else
5067 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5068
5069 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5070 && TREE_CODE (type) == ARRAY_TYPE
5071 && TYPE_NONALIASED_COMPONENT (type))
5072 {
5073 target = copy_rtx (target);
5074 MEM_KEEP_ALIAS_SET_P (target) = 1;
5075 }
5076 store_constructor_field (target, bitsize, bitpos, mode, value,
5077 type, cleared, get_alias_set (elttype));
5078 }
5079 }
5080 break;
5081 }
5082
5083 case VECTOR_TYPE:
5084 {
5085 tree elt;
5086 int i;
5087 int need_to_clear;
5088 int icode = 0;
5089 tree elttype = TREE_TYPE (type);
5090 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5091 enum machine_mode eltmode = TYPE_MODE (elttype);
5092 HOST_WIDE_INT bitsize;
5093 HOST_WIDE_INT bitpos;
5094 rtvec vector = NULL;
5095 unsigned n_elts;
5096
5097 gcc_assert (eltmode != BLKmode);
5098
5099 n_elts = TYPE_VECTOR_SUBPARTS (type);
5100 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5101 {
5102 enum machine_mode mode = GET_MODE (target);
5103
5104 icode = (int) vec_init_optab->handlers[mode].insn_code;
5105 if (icode != CODE_FOR_nothing)
5106 {
5107 unsigned int i;
5108
5109 vector = rtvec_alloc (n_elts);
5110 for (i = 0; i < n_elts; i++)
5111 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5112 }
5113 }
5114
5115 /* If the constructor has fewer elements than the vector,
5116 clear the whole array first. Similarly if this is static
5117 constructor of a non-BLKmode object. */
5118 if (cleared)
5119 need_to_clear = 0;
5120 else if (REG_P (target) && TREE_STATIC (exp))
5121 need_to_clear = 1;
5122 else
5123 {
5124 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5125
5126 for (elt = CONSTRUCTOR_ELTS (exp);
5127 elt != NULL_TREE;
5128 elt = TREE_CHAIN (elt))
5129 {
5130 int n_elts_here = tree_low_cst
5131 (int_const_binop (TRUNC_DIV_EXPR,
5132 TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))),
5133 TYPE_SIZE (elttype), 0), 1);
5134
5135 count += n_elts_here;
5136 if (mostly_zeros_p (TREE_VALUE (elt)))
5137 zero_count += n_elts_here;
5138 }
5139
5140 /* Clear the entire vector first if there are any missing elements,
5141 or if the incidence of zero elements is >= 75%. */
5142 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5143 }
5144
5145 if (need_to_clear && size > 0 && !vector)
5146 {
5147 if (REG_P (target))
5148 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5149 else
5150 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5151 cleared = 1;
5152 }
5153
5154 /* Inform later passes that the old value is dead. */
5155 if (!cleared && REG_P (target))
5156 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5157
5158 /* Store each element of the constructor into the corresponding
5159 element of TARGET, determined by counting the elements. */
5160 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5161 elt;
5162 elt = TREE_CHAIN (elt), i += bitsize / elt_size)
5163 {
5164 tree value = TREE_VALUE (elt);
5165 tree index = TREE_PURPOSE (elt);
5166 HOST_WIDE_INT eltpos;
5167
5168 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5169 if (cleared && initializer_zerop (value))
5170 continue;
5171
5172 if (index != 0)
5173 eltpos = tree_low_cst (index, 1);
5174 else
5175 eltpos = i;
5176
5177 if (vector)
5178 {
5179 /* Vector CONSTRUCTORs should only be built from smaller
5180 vectors in the case of BLKmode vectors. */
5181 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5182 RTVEC_ELT (vector, eltpos)
5183 = expand_expr (value, NULL_RTX, VOIDmode, 0);
5184 }
5185 else
5186 {
5187 enum machine_mode value_mode =
5188 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5189 ? TYPE_MODE (TREE_TYPE (value))
5190 : eltmode;
5191 bitpos = eltpos * elt_size;
5192 store_constructor_field (target, bitsize, bitpos,
5193 value_mode, value, type,
5194 cleared, get_alias_set (elttype));
5195 }
5196 }
5197
5198 if (vector)
5199 emit_insn (GEN_FCN (icode)
5200 (target,
5201 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5202 break;
5203 }
5204
5205 default:
5206 gcc_unreachable ();
5207 }
5208 }
5209
5210 /* Store the value of EXP (an expression tree)
5211 into a subfield of TARGET which has mode MODE and occupies
5212 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5213 If MODE is VOIDmode, it means that we are storing into a bit-field.
5214
5215 Always return const0_rtx unless we have something particular to
5216 return.
5217
5218 TYPE is the type of the underlying object,
5219
5220 ALIAS_SET is the alias set for the destination. This value will
5221 (in general) be different from that for TARGET, since TARGET is a
5222 reference to the containing structure. */
5223
5224 static rtx
5225 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5226 enum machine_mode mode, tree exp, tree type, int alias_set)
5227 {
5228 HOST_WIDE_INT width_mask = 0;
5229
5230 if (TREE_CODE (exp) == ERROR_MARK)
5231 return const0_rtx;
5232
5233 /* If we have nothing to store, do nothing unless the expression has
5234 side-effects. */
5235 if (bitsize == 0)
5236 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5237 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5238 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5239
5240 /* If we are storing into an unaligned field of an aligned union that is
5241 in a register, we may have the mode of TARGET being an integer mode but
5242 MODE == BLKmode. In that case, get an aligned object whose size and
5243 alignment are the same as TARGET and store TARGET into it (we can avoid
5244 the store if the field being stored is the entire width of TARGET). Then
5245 call ourselves recursively to store the field into a BLKmode version of
5246 that object. Finally, load from the object into TARGET. This is not
5247 very efficient in general, but should only be slightly more expensive
5248 than the otherwise-required unaligned accesses. Perhaps this can be
5249 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5250 twice, once with emit_move_insn and once via store_field. */
5251
5252 if (mode == BLKmode
5253 && (REG_P (target) || GET_CODE (target) == SUBREG))
5254 {
5255 rtx object = assign_temp (type, 0, 1, 1);
5256 rtx blk_object = adjust_address (object, BLKmode, 0);
5257
5258 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5259 emit_move_insn (object, target);
5260
5261 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5262
5263 emit_move_insn (target, object);
5264
5265 /* We want to return the BLKmode version of the data. */
5266 return blk_object;
5267 }
5268
5269 if (GET_CODE (target) == CONCAT)
5270 {
5271 /* We're storing into a struct containing a single __complex. */
5272
5273 gcc_assert (!bitpos);
5274 return store_expr (exp, target, 0);
5275 }
5276
5277 /* If the structure is in a register or if the component
5278 is a bit field, we cannot use addressing to access it.
5279 Use bit-field techniques or SUBREG to store in it. */
5280
5281 if (mode == VOIDmode
5282 || (mode != BLKmode && ! direct_store[(int) mode]
5283 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5284 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5285 || REG_P (target)
5286 || GET_CODE (target) == SUBREG
5287 /* If the field isn't aligned enough to store as an ordinary memref,
5288 store it as a bit field. */
5289 || (mode != BLKmode
5290 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5291 || bitpos % GET_MODE_ALIGNMENT (mode))
5292 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5293 || (bitpos % BITS_PER_UNIT != 0)))
5294 /* If the RHS and field are a constant size and the size of the
5295 RHS isn't the same size as the bitfield, we must use bitfield
5296 operations. */
5297 || (bitsize >= 0
5298 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5299 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5300 {
5301 rtx temp;
5302
5303 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5304 implies a mask operation. If the precision is the same size as
5305 the field we're storing into, that mask is redundant. This is
5306 particularly common with bit field assignments generated by the
5307 C front end. */
5308 if (TREE_CODE (exp) == NOP_EXPR)
5309 {
5310 tree type = TREE_TYPE (exp);
5311 if (INTEGRAL_TYPE_P (type)
5312 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5313 && bitsize == TYPE_PRECISION (type))
5314 {
5315 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5316 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5317 exp = TREE_OPERAND (exp, 0);
5318 }
5319 }
5320
5321 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5322
5323 /* If BITSIZE is narrower than the size of the type of EXP
5324 we will be narrowing TEMP. Normally, what's wanted are the
5325 low-order bits. However, if EXP's type is a record and this is
5326 big-endian machine, we want the upper BITSIZE bits. */
5327 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5328 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5329 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5330 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5331 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5332 - bitsize),
5333 NULL_RTX, 1);
5334
5335 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5336 MODE. */
5337 if (mode != VOIDmode && mode != BLKmode
5338 && mode != TYPE_MODE (TREE_TYPE (exp)))
5339 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5340
5341 /* If the modes of TARGET and TEMP are both BLKmode, both
5342 must be in memory and BITPOS must be aligned on a byte
5343 boundary. If so, we simply do a block copy. */
5344 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5345 {
5346 gcc_assert (MEM_P (target) && MEM_P (temp)
5347 && !(bitpos % BITS_PER_UNIT));
5348
5349 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5350 emit_block_move (target, temp,
5351 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5352 / BITS_PER_UNIT),
5353 BLOCK_OP_NORMAL);
5354
5355 return const0_rtx;
5356 }
5357
5358 /* Store the value in the bitfield. */
5359 store_bit_field (target, bitsize, bitpos, mode, temp);
5360
5361 return const0_rtx;
5362 }
5363 else
5364 {
5365 /* Now build a reference to just the desired component. */
5366 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5367
5368 if (to_rtx == target)
5369 to_rtx = copy_rtx (to_rtx);
5370
5371 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5372 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5373 set_mem_alias_set (to_rtx, alias_set);
5374
5375 return store_expr (exp, to_rtx, 0);
5376 }
5377 }
5378 \f
5379 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5380 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5381 codes and find the ultimate containing object, which we return.
5382
5383 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5384 bit position, and *PUNSIGNEDP to the signedness of the field.
5385 If the position of the field is variable, we store a tree
5386 giving the variable offset (in units) in *POFFSET.
5387 This offset is in addition to the bit position.
5388 If the position is not variable, we store 0 in *POFFSET.
5389
5390 If any of the extraction expressions is volatile,
5391 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5392
5393 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5394 is a mode that can be used to access the field. In that case, *PBITSIZE
5395 is redundant.
5396
5397 If the field describes a variable-sized object, *PMODE is set to
5398 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5399 this case, but the address of the object can be found.
5400
5401 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5402 look through nodes that serve as markers of a greater alignment than
5403 the one that can be deduced from the expression. These nodes make it
5404 possible for front-ends to prevent temporaries from being created by
5405 the middle-end on alignment considerations. For that purpose, the
5406 normal operating mode at high-level is to always pass FALSE so that
5407 the ultimate containing object is really returned; moreover, the
5408 associated predicate handled_component_p will always return TRUE
5409 on these nodes, thus indicating that they are essentially handled
5410 by get_inner_reference. TRUE should only be passed when the caller
5411 is scanning the expression in order to build another representation
5412 and specifically knows how to handle these nodes; as such, this is
5413 the normal operating mode in the RTL expanders. */
5414
5415 tree
5416 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5417 HOST_WIDE_INT *pbitpos, tree *poffset,
5418 enum machine_mode *pmode, int *punsignedp,
5419 int *pvolatilep, bool keep_aligning)
5420 {
5421 tree size_tree = 0;
5422 enum machine_mode mode = VOIDmode;
5423 tree offset = size_zero_node;
5424 tree bit_offset = bitsize_zero_node;
5425 tree tem;
5426
5427 /* First get the mode, signedness, and size. We do this from just the
5428 outermost expression. */
5429 if (TREE_CODE (exp) == COMPONENT_REF)
5430 {
5431 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5432 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5433 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5434
5435 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5436 }
5437 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5438 {
5439 size_tree = TREE_OPERAND (exp, 1);
5440 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5441 }
5442 else
5443 {
5444 mode = TYPE_MODE (TREE_TYPE (exp));
5445 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5446
5447 if (mode == BLKmode)
5448 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5449 else
5450 *pbitsize = GET_MODE_BITSIZE (mode);
5451 }
5452
5453 if (size_tree != 0)
5454 {
5455 if (! host_integerp (size_tree, 1))
5456 mode = BLKmode, *pbitsize = -1;
5457 else
5458 *pbitsize = tree_low_cst (size_tree, 1);
5459 }
5460
5461 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5462 and find the ultimate containing object. */
5463 while (1)
5464 {
5465 switch (TREE_CODE (exp))
5466 {
5467 case BIT_FIELD_REF:
5468 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5469 TREE_OPERAND (exp, 2));
5470 break;
5471
5472 case COMPONENT_REF:
5473 {
5474 tree field = TREE_OPERAND (exp, 1);
5475 tree this_offset = component_ref_field_offset (exp);
5476
5477 /* If this field hasn't been filled in yet, don't go past it.
5478 This should only happen when folding expressions made during
5479 type construction. */
5480 if (this_offset == 0)
5481 break;
5482
5483 offset = size_binop (PLUS_EXPR, offset, this_offset);
5484 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5485 DECL_FIELD_BIT_OFFSET (field));
5486
5487 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5488 }
5489 break;
5490
5491 case ARRAY_REF:
5492 case ARRAY_RANGE_REF:
5493 {
5494 tree index = TREE_OPERAND (exp, 1);
5495 tree low_bound = array_ref_low_bound (exp);
5496 tree unit_size = array_ref_element_size (exp);
5497
5498 /* We assume all arrays have sizes that are a multiple of a byte.
5499 First subtract the lower bound, if any, in the type of the
5500 index, then convert to sizetype and multiply by the size of
5501 the array element. */
5502 if (! integer_zerop (low_bound))
5503 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5504 index, low_bound);
5505
5506 offset = size_binop (PLUS_EXPR, offset,
5507 size_binop (MULT_EXPR,
5508 convert (sizetype, index),
5509 unit_size));
5510 }
5511 break;
5512
5513 case REALPART_EXPR:
5514 break;
5515
5516 case IMAGPART_EXPR:
5517 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5518 bitsize_int (*pbitsize));
5519 break;
5520
5521 case VIEW_CONVERT_EXPR:
5522 if (keep_aligning && STRICT_ALIGNMENT
5523 && (TYPE_ALIGN (TREE_TYPE (exp))
5524 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5525 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5526 < BIGGEST_ALIGNMENT)
5527 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5528 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5529 goto done;
5530 break;
5531
5532 default:
5533 goto done;
5534 }
5535
5536 /* If any reference in the chain is volatile, the effect is volatile. */
5537 if (TREE_THIS_VOLATILE (exp))
5538 *pvolatilep = 1;
5539
5540 exp = TREE_OPERAND (exp, 0);
5541 }
5542 done:
5543
5544 /* If OFFSET is constant, see if we can return the whole thing as a
5545 constant bit position. Otherwise, split it up. */
5546 if (host_integerp (offset, 0)
5547 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5548 bitsize_unit_node))
5549 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5550 && host_integerp (tem, 0))
5551 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5552 else
5553 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5554
5555 *pmode = mode;
5556 return exp;
5557 }
5558
5559 /* Return a tree of sizetype representing the size, in bytes, of the element
5560 of EXP, an ARRAY_REF. */
5561
5562 tree
5563 array_ref_element_size (tree exp)
5564 {
5565 tree aligned_size = TREE_OPERAND (exp, 3);
5566 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5567
5568 /* If a size was specified in the ARRAY_REF, it's the size measured
5569 in alignment units of the element type. So multiply by that value. */
5570 if (aligned_size)
5571 {
5572 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5573 sizetype from another type of the same width and signedness. */
5574 if (TREE_TYPE (aligned_size) != sizetype)
5575 aligned_size = fold_convert (sizetype, aligned_size);
5576 return size_binop (MULT_EXPR, aligned_size,
5577 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5578 }
5579
5580 /* Otherwise, take the size from that of the element type. Substitute
5581 any PLACEHOLDER_EXPR that we have. */
5582 else
5583 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5584 }
5585
5586 /* Return a tree representing the lower bound of the array mentioned in
5587 EXP, an ARRAY_REF. */
5588
5589 tree
5590 array_ref_low_bound (tree exp)
5591 {
5592 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5593
5594 /* If a lower bound is specified in EXP, use it. */
5595 if (TREE_OPERAND (exp, 2))
5596 return TREE_OPERAND (exp, 2);
5597
5598 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5599 substituting for a PLACEHOLDER_EXPR as needed. */
5600 if (domain_type && TYPE_MIN_VALUE (domain_type))
5601 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5602
5603 /* Otherwise, return a zero of the appropriate type. */
5604 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5605 }
5606
5607 /* Return a tree representing the upper bound of the array mentioned in
5608 EXP, an ARRAY_REF. */
5609
5610 tree
5611 array_ref_up_bound (tree exp)
5612 {
5613 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5614
5615 /* If there is a domain type and it has an upper bound, use it, substituting
5616 for a PLACEHOLDER_EXPR as needed. */
5617 if (domain_type && TYPE_MAX_VALUE (domain_type))
5618 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5619
5620 /* Otherwise fail. */
5621 return NULL_TREE;
5622 }
5623
5624 /* Return a tree representing the offset, in bytes, of the field referenced
5625 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5626
5627 tree
5628 component_ref_field_offset (tree exp)
5629 {
5630 tree aligned_offset = TREE_OPERAND (exp, 2);
5631 tree field = TREE_OPERAND (exp, 1);
5632
5633 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5634 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5635 value. */
5636 if (aligned_offset)
5637 {
5638 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5639 sizetype from another type of the same width and signedness. */
5640 if (TREE_TYPE (aligned_offset) != sizetype)
5641 aligned_offset = fold_convert (sizetype, aligned_offset);
5642 return size_binop (MULT_EXPR, aligned_offset,
5643 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5644 }
5645
5646 /* Otherwise, take the offset from that of the field. Substitute
5647 any PLACEHOLDER_EXPR that we have. */
5648 else
5649 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5650 }
5651
5652 /* Return 1 if T is an expression that get_inner_reference handles. */
5653
5654 int
5655 handled_component_p (tree t)
5656 {
5657 switch (TREE_CODE (t))
5658 {
5659 case BIT_FIELD_REF:
5660 case COMPONENT_REF:
5661 case ARRAY_REF:
5662 case ARRAY_RANGE_REF:
5663 case VIEW_CONVERT_EXPR:
5664 case REALPART_EXPR:
5665 case IMAGPART_EXPR:
5666 return 1;
5667
5668 default:
5669 return 0;
5670 }
5671 }
5672 \f
5673 /* Given an rtx VALUE that may contain additions and multiplications, return
5674 an equivalent value that just refers to a register, memory, or constant.
5675 This is done by generating instructions to perform the arithmetic and
5676 returning a pseudo-register containing the value.
5677
5678 The returned value may be a REG, SUBREG, MEM or constant. */
5679
5680 rtx
5681 force_operand (rtx value, rtx target)
5682 {
5683 rtx op1, op2;
5684 /* Use subtarget as the target for operand 0 of a binary operation. */
5685 rtx subtarget = get_subtarget (target);
5686 enum rtx_code code = GET_CODE (value);
5687
5688 /* Check for subreg applied to an expression produced by loop optimizer. */
5689 if (code == SUBREG
5690 && !REG_P (SUBREG_REG (value))
5691 && !MEM_P (SUBREG_REG (value)))
5692 {
5693 value = simplify_gen_subreg (GET_MODE (value),
5694 force_reg (GET_MODE (SUBREG_REG (value)),
5695 force_operand (SUBREG_REG (value),
5696 NULL_RTX)),
5697 GET_MODE (SUBREG_REG (value)),
5698 SUBREG_BYTE (value));
5699 code = GET_CODE (value);
5700 }
5701
5702 /* Check for a PIC address load. */
5703 if ((code == PLUS || code == MINUS)
5704 && XEXP (value, 0) == pic_offset_table_rtx
5705 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5706 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5707 || GET_CODE (XEXP (value, 1)) == CONST))
5708 {
5709 if (!subtarget)
5710 subtarget = gen_reg_rtx (GET_MODE (value));
5711 emit_move_insn (subtarget, value);
5712 return subtarget;
5713 }
5714
5715 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5716 {
5717 if (!target)
5718 target = gen_reg_rtx (GET_MODE (value));
5719 convert_move (target, force_operand (XEXP (value, 0), NULL),
5720 code == ZERO_EXTEND);
5721 return target;
5722 }
5723
5724 if (ARITHMETIC_P (value))
5725 {
5726 op2 = XEXP (value, 1);
5727 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5728 subtarget = 0;
5729 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5730 {
5731 code = PLUS;
5732 op2 = negate_rtx (GET_MODE (value), op2);
5733 }
5734
5735 /* Check for an addition with OP2 a constant integer and our first
5736 operand a PLUS of a virtual register and something else. In that
5737 case, we want to emit the sum of the virtual register and the
5738 constant first and then add the other value. This allows virtual
5739 register instantiation to simply modify the constant rather than
5740 creating another one around this addition. */
5741 if (code == PLUS && GET_CODE (op2) == CONST_INT
5742 && GET_CODE (XEXP (value, 0)) == PLUS
5743 && REG_P (XEXP (XEXP (value, 0), 0))
5744 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5745 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5746 {
5747 rtx temp = expand_simple_binop (GET_MODE (value), code,
5748 XEXP (XEXP (value, 0), 0), op2,
5749 subtarget, 0, OPTAB_LIB_WIDEN);
5750 return expand_simple_binop (GET_MODE (value), code, temp,
5751 force_operand (XEXP (XEXP (value,
5752 0), 1), 0),
5753 target, 0, OPTAB_LIB_WIDEN);
5754 }
5755
5756 op1 = force_operand (XEXP (value, 0), subtarget);
5757 op2 = force_operand (op2, NULL_RTX);
5758 switch (code)
5759 {
5760 case MULT:
5761 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5762 case DIV:
5763 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5764 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5765 target, 1, OPTAB_LIB_WIDEN);
5766 else
5767 return expand_divmod (0,
5768 FLOAT_MODE_P (GET_MODE (value))
5769 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5770 GET_MODE (value), op1, op2, target, 0);
5771 break;
5772 case MOD:
5773 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5774 target, 0);
5775 break;
5776 case UDIV:
5777 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5778 target, 1);
5779 break;
5780 case UMOD:
5781 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5782 target, 1);
5783 break;
5784 case ASHIFTRT:
5785 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5786 target, 0, OPTAB_LIB_WIDEN);
5787 break;
5788 default:
5789 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5790 target, 1, OPTAB_LIB_WIDEN);
5791 }
5792 }
5793 if (UNARY_P (value))
5794 {
5795 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5796 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5797 }
5798
5799 #ifdef INSN_SCHEDULING
5800 /* On machines that have insn scheduling, we want all memory reference to be
5801 explicit, so we need to deal with such paradoxical SUBREGs. */
5802 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5803 && (GET_MODE_SIZE (GET_MODE (value))
5804 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5805 value
5806 = simplify_gen_subreg (GET_MODE (value),
5807 force_reg (GET_MODE (SUBREG_REG (value)),
5808 force_operand (SUBREG_REG (value),
5809 NULL_RTX)),
5810 GET_MODE (SUBREG_REG (value)),
5811 SUBREG_BYTE (value));
5812 #endif
5813
5814 return value;
5815 }
5816 \f
5817 /* Subroutine of expand_expr: return nonzero iff there is no way that
5818 EXP can reference X, which is being modified. TOP_P is nonzero if this
5819 call is going to be used to determine whether we need a temporary
5820 for EXP, as opposed to a recursive call to this function.
5821
5822 It is always safe for this routine to return zero since it merely
5823 searches for optimization opportunities. */
5824
5825 int
5826 safe_from_p (rtx x, tree exp, int top_p)
5827 {
5828 rtx exp_rtl = 0;
5829 int i, nops;
5830
5831 if (x == 0
5832 /* If EXP has varying size, we MUST use a target since we currently
5833 have no way of allocating temporaries of variable size
5834 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5835 So we assume here that something at a higher level has prevented a
5836 clash. This is somewhat bogus, but the best we can do. Only
5837 do this when X is BLKmode and when we are at the top level. */
5838 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5839 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5840 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5841 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5842 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5843 != INTEGER_CST)
5844 && GET_MODE (x) == BLKmode)
5845 /* If X is in the outgoing argument area, it is always safe. */
5846 || (MEM_P (x)
5847 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5848 || (GET_CODE (XEXP (x, 0)) == PLUS
5849 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5850 return 1;
5851
5852 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5853 find the underlying pseudo. */
5854 if (GET_CODE (x) == SUBREG)
5855 {
5856 x = SUBREG_REG (x);
5857 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5858 return 0;
5859 }
5860
5861 /* Now look at our tree code and possibly recurse. */
5862 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5863 {
5864 case tcc_declaration:
5865 exp_rtl = DECL_RTL_IF_SET (exp);
5866 break;
5867
5868 case tcc_constant:
5869 return 1;
5870
5871 case tcc_exceptional:
5872 if (TREE_CODE (exp) == TREE_LIST)
5873 {
5874 while (1)
5875 {
5876 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5877 return 0;
5878 exp = TREE_CHAIN (exp);
5879 if (!exp)
5880 return 1;
5881 if (TREE_CODE (exp) != TREE_LIST)
5882 return safe_from_p (x, exp, 0);
5883 }
5884 }
5885 else if (TREE_CODE (exp) == ERROR_MARK)
5886 return 1; /* An already-visited SAVE_EXPR? */
5887 else
5888 return 0;
5889
5890 case tcc_statement:
5891 /* The only case we look at here is the DECL_INITIAL inside a
5892 DECL_EXPR. */
5893 return (TREE_CODE (exp) != DECL_EXPR
5894 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5895 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5896 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5897
5898 case tcc_binary:
5899 case tcc_comparison:
5900 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5901 return 0;
5902 /* Fall through. */
5903
5904 case tcc_unary:
5905 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5906
5907 case tcc_expression:
5908 case tcc_reference:
5909 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5910 the expression. If it is set, we conflict iff we are that rtx or
5911 both are in memory. Otherwise, we check all operands of the
5912 expression recursively. */
5913
5914 switch (TREE_CODE (exp))
5915 {
5916 case ADDR_EXPR:
5917 /* If the operand is static or we are static, we can't conflict.
5918 Likewise if we don't conflict with the operand at all. */
5919 if (staticp (TREE_OPERAND (exp, 0))
5920 || TREE_STATIC (exp)
5921 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5922 return 1;
5923
5924 /* Otherwise, the only way this can conflict is if we are taking
5925 the address of a DECL a that address if part of X, which is
5926 very rare. */
5927 exp = TREE_OPERAND (exp, 0);
5928 if (DECL_P (exp))
5929 {
5930 if (!DECL_RTL_SET_P (exp)
5931 || !MEM_P (DECL_RTL (exp)))
5932 return 0;
5933 else
5934 exp_rtl = XEXP (DECL_RTL (exp), 0);
5935 }
5936 break;
5937
5938 case MISALIGNED_INDIRECT_REF:
5939 case ALIGN_INDIRECT_REF:
5940 case INDIRECT_REF:
5941 if (MEM_P (x)
5942 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5943 get_alias_set (exp)))
5944 return 0;
5945 break;
5946
5947 case CALL_EXPR:
5948 /* Assume that the call will clobber all hard registers and
5949 all of memory. */
5950 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5951 || MEM_P (x))
5952 return 0;
5953 break;
5954
5955 case WITH_CLEANUP_EXPR:
5956 case CLEANUP_POINT_EXPR:
5957 /* Lowered by gimplify.c. */
5958 gcc_unreachable ();
5959
5960 case SAVE_EXPR:
5961 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5962
5963 default:
5964 break;
5965 }
5966
5967 /* If we have an rtx, we do not need to scan our operands. */
5968 if (exp_rtl)
5969 break;
5970
5971 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
5972 for (i = 0; i < nops; i++)
5973 if (TREE_OPERAND (exp, i) != 0
5974 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5975 return 0;
5976
5977 /* If this is a language-specific tree code, it may require
5978 special handling. */
5979 if ((unsigned int) TREE_CODE (exp)
5980 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5981 && !lang_hooks.safe_from_p (x, exp))
5982 return 0;
5983 break;
5984
5985 case tcc_type:
5986 /* Should never get a type here. */
5987 gcc_unreachable ();
5988 }
5989
5990 /* If we have an rtl, find any enclosed object. Then see if we conflict
5991 with it. */
5992 if (exp_rtl)
5993 {
5994 if (GET_CODE (exp_rtl) == SUBREG)
5995 {
5996 exp_rtl = SUBREG_REG (exp_rtl);
5997 if (REG_P (exp_rtl)
5998 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5999 return 0;
6000 }
6001
6002 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6003 are memory and they conflict. */
6004 return ! (rtx_equal_p (x, exp_rtl)
6005 || (MEM_P (x) && MEM_P (exp_rtl)
6006 && true_dependence (exp_rtl, VOIDmode, x,
6007 rtx_addr_varies_p)));
6008 }
6009
6010 /* If we reach here, it is safe. */
6011 return 1;
6012 }
6013
6014 \f
6015 /* Return the highest power of two that EXP is known to be a multiple of.
6016 This is used in updating alignment of MEMs in array references. */
6017
6018 static unsigned HOST_WIDE_INT
6019 highest_pow2_factor (tree exp)
6020 {
6021 unsigned HOST_WIDE_INT c0, c1;
6022
6023 switch (TREE_CODE (exp))
6024 {
6025 case INTEGER_CST:
6026 /* We can find the lowest bit that's a one. If the low
6027 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6028 We need to handle this case since we can find it in a COND_EXPR,
6029 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6030 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6031 later ICE. */
6032 if (TREE_CONSTANT_OVERFLOW (exp))
6033 return BIGGEST_ALIGNMENT;
6034 else
6035 {
6036 /* Note: tree_low_cst is intentionally not used here,
6037 we don't care about the upper bits. */
6038 c0 = TREE_INT_CST_LOW (exp);
6039 c0 &= -c0;
6040 return c0 ? c0 : BIGGEST_ALIGNMENT;
6041 }
6042 break;
6043
6044 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6045 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6046 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6047 return MIN (c0, c1);
6048
6049 case MULT_EXPR:
6050 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6051 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6052 return c0 * c1;
6053
6054 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6055 case CEIL_DIV_EXPR:
6056 if (integer_pow2p (TREE_OPERAND (exp, 1))
6057 && host_integerp (TREE_OPERAND (exp, 1), 1))
6058 {
6059 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6060 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6061 return MAX (1, c0 / c1);
6062 }
6063 break;
6064
6065 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6066 case SAVE_EXPR:
6067 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6068
6069 case COMPOUND_EXPR:
6070 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6071
6072 case COND_EXPR:
6073 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6074 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6075 return MIN (c0, c1);
6076
6077 default:
6078 break;
6079 }
6080
6081 return 1;
6082 }
6083
6084 /* Similar, except that the alignment requirements of TARGET are
6085 taken into account. Assume it is at least as aligned as its
6086 type, unless it is a COMPONENT_REF in which case the layout of
6087 the structure gives the alignment. */
6088
6089 static unsigned HOST_WIDE_INT
6090 highest_pow2_factor_for_target (tree target, tree exp)
6091 {
6092 unsigned HOST_WIDE_INT target_align, factor;
6093
6094 factor = highest_pow2_factor (exp);
6095 if (TREE_CODE (target) == COMPONENT_REF)
6096 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6097 else
6098 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6099 return MAX (factor, target_align);
6100 }
6101 \f
6102 /* Expands variable VAR. */
6103
6104 void
6105 expand_var (tree var)
6106 {
6107 if (DECL_EXTERNAL (var))
6108 return;
6109
6110 if (TREE_STATIC (var))
6111 /* If this is an inlined copy of a static local variable,
6112 look up the original decl. */
6113 var = DECL_ORIGIN (var);
6114
6115 if (TREE_STATIC (var)
6116 ? !TREE_ASM_WRITTEN (var)
6117 : !DECL_RTL_SET_P (var))
6118 {
6119 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6120 /* Should be ignored. */;
6121 else if (lang_hooks.expand_decl (var))
6122 /* OK. */;
6123 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6124 expand_decl (var);
6125 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6126 rest_of_decl_compilation (var, 0, 0);
6127 else
6128 /* No expansion needed. */
6129 gcc_assert (TREE_CODE (var) == TYPE_DECL
6130 || TREE_CODE (var) == CONST_DECL
6131 || TREE_CODE (var) == FUNCTION_DECL
6132 || TREE_CODE (var) == LABEL_DECL);
6133 }
6134 }
6135
6136 /* Subroutine of expand_expr. Expand the two operands of a binary
6137 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6138 The value may be stored in TARGET if TARGET is nonzero. The
6139 MODIFIER argument is as documented by expand_expr. */
6140
6141 static void
6142 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6143 enum expand_modifier modifier)
6144 {
6145 if (! safe_from_p (target, exp1, 1))
6146 target = 0;
6147 if (operand_equal_p (exp0, exp1, 0))
6148 {
6149 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6150 *op1 = copy_rtx (*op0);
6151 }
6152 else
6153 {
6154 /* If we need to preserve evaluation order, copy exp0 into its own
6155 temporary variable so that it can't be clobbered by exp1. */
6156 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6157 exp0 = save_expr (exp0);
6158 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6159 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6160 }
6161 }
6162
6163 \f
6164 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6165 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6166
6167 static rtx
6168 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6169 enum expand_modifier modifier)
6170 {
6171 rtx result, subtarget;
6172 tree inner, offset;
6173 HOST_WIDE_INT bitsize, bitpos;
6174 int volatilep, unsignedp;
6175 enum machine_mode mode1;
6176
6177 /* If we are taking the address of a constant and are at the top level,
6178 we have to use output_constant_def since we can't call force_const_mem
6179 at top level. */
6180 /* ??? This should be considered a front-end bug. We should not be
6181 generating ADDR_EXPR of something that isn't an LVALUE. The only
6182 exception here is STRING_CST. */
6183 if (TREE_CODE (exp) == CONSTRUCTOR
6184 || CONSTANT_CLASS_P (exp))
6185 return XEXP (output_constant_def (exp, 0), 0);
6186
6187 /* Everything must be something allowed by is_gimple_addressable. */
6188 switch (TREE_CODE (exp))
6189 {
6190 case INDIRECT_REF:
6191 /* This case will happen via recursion for &a->b. */
6192 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, EXPAND_NORMAL);
6193
6194 case CONST_DECL:
6195 /* Recurse and make the output_constant_def clause above handle this. */
6196 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6197 tmode, modifier);
6198
6199 case REALPART_EXPR:
6200 /* The real part of the complex number is always first, therefore
6201 the address is the same as the address of the parent object. */
6202 offset = 0;
6203 bitpos = 0;
6204 inner = TREE_OPERAND (exp, 0);
6205 break;
6206
6207 case IMAGPART_EXPR:
6208 /* The imaginary part of the complex number is always second.
6209 The expression is therefore always offset by the size of the
6210 scalar type. */
6211 offset = 0;
6212 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6213 inner = TREE_OPERAND (exp, 0);
6214 break;
6215
6216 default:
6217 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6218 expand_expr, as that can have various side effects; LABEL_DECLs for
6219 example, may not have their DECL_RTL set yet. Assume language
6220 specific tree nodes can be expanded in some interesting way. */
6221 if (DECL_P (exp)
6222 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6223 {
6224 result = expand_expr (exp, target, tmode,
6225 modifier == EXPAND_INITIALIZER
6226 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6227
6228 /* If the DECL isn't in memory, then the DECL wasn't properly
6229 marked TREE_ADDRESSABLE, which will be either a front-end
6230 or a tree optimizer bug. */
6231 gcc_assert (MEM_P (result));
6232 result = XEXP (result, 0);
6233
6234 /* ??? Is this needed anymore? */
6235 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6236 {
6237 assemble_external (exp);
6238 TREE_USED (exp) = 1;
6239 }
6240
6241 if (modifier != EXPAND_INITIALIZER
6242 && modifier != EXPAND_CONST_ADDRESS)
6243 result = force_operand (result, target);
6244 return result;
6245 }
6246
6247 /* Pass FALSE as the last argument to get_inner_reference although
6248 we are expanding to RTL. The rationale is that we know how to
6249 handle "aligning nodes" here: we can just bypass them because
6250 they won't change the final object whose address will be returned
6251 (they actually exist only for that purpose). */
6252 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6253 &mode1, &unsignedp, &volatilep, false);
6254 break;
6255 }
6256
6257 /* We must have made progress. */
6258 gcc_assert (inner != exp);
6259
6260 subtarget = offset || bitpos ? NULL_RTX : target;
6261 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6262
6263 if (offset)
6264 {
6265 rtx tmp;
6266
6267 if (modifier != EXPAND_NORMAL)
6268 result = force_operand (result, NULL);
6269 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6270
6271 result = convert_memory_address (tmode, result);
6272 tmp = convert_memory_address (tmode, tmp);
6273
6274 if (modifier == EXPAND_SUM)
6275 result = gen_rtx_PLUS (tmode, result, tmp);
6276 else
6277 {
6278 subtarget = bitpos ? NULL_RTX : target;
6279 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6280 1, OPTAB_LIB_WIDEN);
6281 }
6282 }
6283
6284 if (bitpos)
6285 {
6286 /* Someone beforehand should have rejected taking the address
6287 of such an object. */
6288 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6289
6290 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6291 if (modifier < EXPAND_SUM)
6292 result = force_operand (result, target);
6293 }
6294
6295 return result;
6296 }
6297
6298 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6299 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6300
6301 static rtx
6302 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6303 enum expand_modifier modifier)
6304 {
6305 enum machine_mode rmode;
6306 rtx result;
6307
6308 /* Target mode of VOIDmode says "whatever's natural". */
6309 if (tmode == VOIDmode)
6310 tmode = TYPE_MODE (TREE_TYPE (exp));
6311
6312 /* We can get called with some Weird Things if the user does silliness
6313 like "(short) &a". In that case, convert_memory_address won't do
6314 the right thing, so ignore the given target mode. */
6315 if (tmode != Pmode && tmode != ptr_mode)
6316 tmode = Pmode;
6317
6318 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6319 tmode, modifier);
6320
6321 /* Despite expand_expr claims concerning ignoring TMODE when not
6322 strictly convenient, stuff breaks if we don't honor it. Note
6323 that combined with the above, we only do this for pointer modes. */
6324 rmode = GET_MODE (result);
6325 if (rmode == VOIDmode)
6326 rmode = tmode;
6327 if (rmode != tmode)
6328 result = convert_memory_address (tmode, result);
6329
6330 return result;
6331 }
6332
6333
6334 /* expand_expr: generate code for computing expression EXP.
6335 An rtx for the computed value is returned. The value is never null.
6336 In the case of a void EXP, const0_rtx is returned.
6337
6338 The value may be stored in TARGET if TARGET is nonzero.
6339 TARGET is just a suggestion; callers must assume that
6340 the rtx returned may not be the same as TARGET.
6341
6342 If TARGET is CONST0_RTX, it means that the value will be ignored.
6343
6344 If TMODE is not VOIDmode, it suggests generating the
6345 result in mode TMODE. But this is done only when convenient.
6346 Otherwise, TMODE is ignored and the value generated in its natural mode.
6347 TMODE is just a suggestion; callers must assume that
6348 the rtx returned may not have mode TMODE.
6349
6350 Note that TARGET may have neither TMODE nor MODE. In that case, it
6351 probably will not be used.
6352
6353 If MODIFIER is EXPAND_SUM then when EXP is an addition
6354 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6355 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6356 products as above, or REG or MEM, or constant.
6357 Ordinarily in such cases we would output mul or add instructions
6358 and then return a pseudo reg containing the sum.
6359
6360 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6361 it also marks a label as absolutely required (it can't be dead).
6362 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6363 This is used for outputting expressions used in initializers.
6364
6365 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6366 with a constant address even if that address is not normally legitimate.
6367 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6368
6369 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6370 a call parameter. Such targets require special care as we haven't yet
6371 marked TARGET so that it's safe from being trashed by libcalls. We
6372 don't want to use TARGET for anything but the final result;
6373 Intermediate values must go elsewhere. Additionally, calls to
6374 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6375
6376 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6377 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6378 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6379 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6380 recursively. */
6381
6382 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6383 enum expand_modifier, rtx *);
6384
6385 rtx
6386 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6387 enum expand_modifier modifier, rtx *alt_rtl)
6388 {
6389 int rn = -1;
6390 rtx ret, last = NULL;
6391
6392 /* Handle ERROR_MARK before anybody tries to access its type. */
6393 if (TREE_CODE (exp) == ERROR_MARK
6394 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6395 {
6396 ret = CONST0_RTX (tmode);
6397 return ret ? ret : const0_rtx;
6398 }
6399
6400 if (flag_non_call_exceptions)
6401 {
6402 rn = lookup_stmt_eh_region (exp);
6403 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6404 if (rn >= 0)
6405 last = get_last_insn ();
6406 }
6407
6408 /* If this is an expression of some kind and it has an associated line
6409 number, then emit the line number before expanding the expression.
6410
6411 We need to save and restore the file and line information so that
6412 errors discovered during expansion are emitted with the right
6413 information. It would be better of the diagnostic routines
6414 used the file/line information embedded in the tree nodes rather
6415 than globals. */
6416 if (cfun && EXPR_HAS_LOCATION (exp))
6417 {
6418 location_t saved_location = input_location;
6419 input_location = EXPR_LOCATION (exp);
6420 emit_line_note (input_location);
6421
6422 /* Record where the insns produced belong. */
6423 record_block_change (TREE_BLOCK (exp));
6424
6425 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6426
6427 input_location = saved_location;
6428 }
6429 else
6430 {
6431 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6432 }
6433
6434 /* If using non-call exceptions, mark all insns that may trap.
6435 expand_call() will mark CALL_INSNs before we get to this code,
6436 but it doesn't handle libcalls, and these may trap. */
6437 if (rn >= 0)
6438 {
6439 rtx insn;
6440 for (insn = next_real_insn (last); insn;
6441 insn = next_real_insn (insn))
6442 {
6443 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6444 /* If we want exceptions for non-call insns, any
6445 may_trap_p instruction may throw. */
6446 && GET_CODE (PATTERN (insn)) != CLOBBER
6447 && GET_CODE (PATTERN (insn)) != USE
6448 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6449 {
6450 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6451 REG_NOTES (insn));
6452 }
6453 }
6454 }
6455
6456 return ret;
6457 }
6458
6459 static rtx
6460 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6461 enum expand_modifier modifier, rtx *alt_rtl)
6462 {
6463 rtx op0, op1, temp;
6464 tree type = TREE_TYPE (exp);
6465 int unsignedp;
6466 enum machine_mode mode;
6467 enum tree_code code = TREE_CODE (exp);
6468 optab this_optab;
6469 rtx subtarget, original_target;
6470 int ignore;
6471 tree context;
6472 bool reduce_bit_field = false;
6473 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6474 ? reduce_to_bit_field_precision ((expr), \
6475 target, \
6476 type) \
6477 : (expr))
6478
6479 mode = TYPE_MODE (type);
6480 unsignedp = TYPE_UNSIGNED (type);
6481 if (lang_hooks.reduce_bit_field_operations
6482 && TREE_CODE (type) == INTEGER_TYPE
6483 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6484 {
6485 /* An operation in what may be a bit-field type needs the
6486 result to be reduced to the precision of the bit-field type,
6487 which is narrower than that of the type's mode. */
6488 reduce_bit_field = true;
6489 if (modifier == EXPAND_STACK_PARM)
6490 target = 0;
6491 }
6492
6493 /* Use subtarget as the target for operand 0 of a binary operation. */
6494 subtarget = get_subtarget (target);
6495 original_target = target;
6496 ignore = (target == const0_rtx
6497 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6498 || code == CONVERT_EXPR || code == COND_EXPR
6499 || code == VIEW_CONVERT_EXPR)
6500 && TREE_CODE (type) == VOID_TYPE));
6501
6502 /* If we are going to ignore this result, we need only do something
6503 if there is a side-effect somewhere in the expression. If there
6504 is, short-circuit the most common cases here. Note that we must
6505 not call expand_expr with anything but const0_rtx in case this
6506 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6507
6508 if (ignore)
6509 {
6510 if (! TREE_SIDE_EFFECTS (exp))
6511 return const0_rtx;
6512
6513 /* Ensure we reference a volatile object even if value is ignored, but
6514 don't do this if all we are doing is taking its address. */
6515 if (TREE_THIS_VOLATILE (exp)
6516 && TREE_CODE (exp) != FUNCTION_DECL
6517 && mode != VOIDmode && mode != BLKmode
6518 && modifier != EXPAND_CONST_ADDRESS)
6519 {
6520 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6521 if (MEM_P (temp))
6522 temp = copy_to_reg (temp);
6523 return const0_rtx;
6524 }
6525
6526 if (TREE_CODE_CLASS (code) == tcc_unary
6527 || code == COMPONENT_REF || code == INDIRECT_REF)
6528 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6529 modifier);
6530
6531 else if (TREE_CODE_CLASS (code) == tcc_binary
6532 || TREE_CODE_CLASS (code) == tcc_comparison
6533 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6534 {
6535 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6536 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6537 return const0_rtx;
6538 }
6539 else if (code == BIT_FIELD_REF)
6540 {
6541 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6542 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6543 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6544 return const0_rtx;
6545 }
6546
6547 target = 0;
6548 }
6549
6550 /* If will do cse, generate all results into pseudo registers
6551 since 1) that allows cse to find more things
6552 and 2) otherwise cse could produce an insn the machine
6553 cannot support. An exception is a CONSTRUCTOR into a multi-word
6554 MEM: that's much more likely to be most efficient into the MEM.
6555 Another is a CALL_EXPR which must return in memory. */
6556
6557 if (! cse_not_expected && mode != BLKmode && target
6558 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
6559 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6560 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6561 target = 0;
6562
6563 switch (code)
6564 {
6565 case LABEL_DECL:
6566 {
6567 tree function = decl_function_context (exp);
6568
6569 temp = label_rtx (exp);
6570 temp = gen_rtx_LABEL_REF (Pmode, temp);
6571
6572 if (function != current_function_decl
6573 && function != 0)
6574 LABEL_REF_NONLOCAL_P (temp) = 1;
6575
6576 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6577 return temp;
6578 }
6579
6580 case SSA_NAME:
6581 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6582 NULL);
6583
6584 case PARM_DECL:
6585 case VAR_DECL:
6586 /* If a static var's type was incomplete when the decl was written,
6587 but the type is complete now, lay out the decl now. */
6588 if (DECL_SIZE (exp) == 0
6589 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6590 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6591 layout_decl (exp, 0);
6592
6593 /* ... fall through ... */
6594
6595 case FUNCTION_DECL:
6596 case RESULT_DECL:
6597 gcc_assert (DECL_RTL (exp));
6598
6599 /* Ensure variable marked as used even if it doesn't go through
6600 a parser. If it hasn't be used yet, write out an external
6601 definition. */
6602 if (! TREE_USED (exp))
6603 {
6604 assemble_external (exp);
6605 TREE_USED (exp) = 1;
6606 }
6607
6608 /* Show we haven't gotten RTL for this yet. */
6609 temp = 0;
6610
6611 /* Variables inherited from containing functions should have
6612 been lowered by this point. */
6613 context = decl_function_context (exp);
6614 gcc_assert (!context
6615 || context == current_function_decl
6616 || TREE_STATIC (exp)
6617 /* ??? C++ creates functions that are not TREE_STATIC. */
6618 || TREE_CODE (exp) == FUNCTION_DECL);
6619
6620 /* This is the case of an array whose size is to be determined
6621 from its initializer, while the initializer is still being parsed.
6622 See expand_decl. */
6623
6624 if (MEM_P (DECL_RTL (exp))
6625 && REG_P (XEXP (DECL_RTL (exp), 0)))
6626 temp = validize_mem (DECL_RTL (exp));
6627
6628 /* If DECL_RTL is memory, we are in the normal case and either
6629 the address is not valid or it is not a register and -fforce-addr
6630 is specified, get the address into a register. */
6631
6632 else if (MEM_P (DECL_RTL (exp))
6633 && modifier != EXPAND_CONST_ADDRESS
6634 && modifier != EXPAND_SUM
6635 && modifier != EXPAND_INITIALIZER
6636 && (! memory_address_p (DECL_MODE (exp),
6637 XEXP (DECL_RTL (exp), 0))
6638 || (flag_force_addr
6639 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6640 {
6641 if (alt_rtl)
6642 *alt_rtl = DECL_RTL (exp);
6643 temp = replace_equiv_address (DECL_RTL (exp),
6644 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6645 }
6646
6647 /* If we got something, return it. But first, set the alignment
6648 if the address is a register. */
6649 if (temp != 0)
6650 {
6651 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6652 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6653
6654 return temp;
6655 }
6656
6657 /* If the mode of DECL_RTL does not match that of the decl, it
6658 must be a promoted value. We return a SUBREG of the wanted mode,
6659 but mark it so that we know that it was already extended. */
6660
6661 if (REG_P (DECL_RTL (exp))
6662 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6663 {
6664 enum machine_mode pmode;
6665
6666 /* Get the signedness used for this variable. Ensure we get the
6667 same mode we got when the variable was declared. */
6668 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6669 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0));
6670 gcc_assert (GET_MODE (DECL_RTL (exp)) == pmode);
6671
6672 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6673 SUBREG_PROMOTED_VAR_P (temp) = 1;
6674 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6675 return temp;
6676 }
6677
6678 return DECL_RTL (exp);
6679
6680 case INTEGER_CST:
6681 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6682 TREE_INT_CST_HIGH (exp), mode);
6683
6684 /* ??? If overflow is set, fold will have done an incomplete job,
6685 which can result in (plus xx (const_int 0)), which can get
6686 simplified by validate_replace_rtx during virtual register
6687 instantiation, which can result in unrecognizable insns.
6688 Avoid this by forcing all overflows into registers. */
6689 if (TREE_CONSTANT_OVERFLOW (exp)
6690 && modifier != EXPAND_INITIALIZER)
6691 temp = force_reg (mode, temp);
6692
6693 return temp;
6694
6695 case VECTOR_CST:
6696 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6697 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6698 return const_vector_from_tree (exp);
6699 else
6700 return expand_expr (build1 (CONSTRUCTOR, TREE_TYPE (exp),
6701 TREE_VECTOR_CST_ELTS (exp)),
6702 ignore ? const0_rtx : target, tmode, modifier);
6703
6704 case CONST_DECL:
6705 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6706
6707 case REAL_CST:
6708 /* If optimized, generate immediate CONST_DOUBLE
6709 which will be turned into memory by reload if necessary.
6710
6711 We used to force a register so that loop.c could see it. But
6712 this does not allow gen_* patterns to perform optimizations with
6713 the constants. It also produces two insns in cases like "x = 1.0;".
6714 On most machines, floating-point constants are not permitted in
6715 many insns, so we'd end up copying it to a register in any case.
6716
6717 Now, we do the copying in expand_binop, if appropriate. */
6718 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6719 TYPE_MODE (TREE_TYPE (exp)));
6720
6721 case COMPLEX_CST:
6722 /* Handle evaluating a complex constant in a CONCAT target. */
6723 if (original_target && GET_CODE (original_target) == CONCAT)
6724 {
6725 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6726 rtx rtarg, itarg;
6727
6728 rtarg = XEXP (original_target, 0);
6729 itarg = XEXP (original_target, 1);
6730
6731 /* Move the real and imaginary parts separately. */
6732 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6733 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6734
6735 if (op0 != rtarg)
6736 emit_move_insn (rtarg, op0);
6737 if (op1 != itarg)
6738 emit_move_insn (itarg, op1);
6739
6740 return original_target;
6741 }
6742
6743 /* ... fall through ... */
6744
6745 case STRING_CST:
6746 temp = output_constant_def (exp, 1);
6747
6748 /* temp contains a constant address.
6749 On RISC machines where a constant address isn't valid,
6750 make some insns to get that address into a register. */
6751 if (modifier != EXPAND_CONST_ADDRESS
6752 && modifier != EXPAND_INITIALIZER
6753 && modifier != EXPAND_SUM
6754 && (! memory_address_p (mode, XEXP (temp, 0))
6755 || flag_force_addr))
6756 return replace_equiv_address (temp,
6757 copy_rtx (XEXP (temp, 0)));
6758 return temp;
6759
6760 case SAVE_EXPR:
6761 {
6762 tree val = TREE_OPERAND (exp, 0);
6763 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6764
6765 if (!SAVE_EXPR_RESOLVED_P (exp))
6766 {
6767 /* We can indeed still hit this case, typically via builtin
6768 expanders calling save_expr immediately before expanding
6769 something. Assume this means that we only have to deal
6770 with non-BLKmode values. */
6771 gcc_assert (GET_MODE (ret) != BLKmode);
6772
6773 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6774 DECL_ARTIFICIAL (val) = 1;
6775 DECL_IGNORED_P (val) = 1;
6776 TREE_OPERAND (exp, 0) = val;
6777 SAVE_EXPR_RESOLVED_P (exp) = 1;
6778
6779 if (!CONSTANT_P (ret))
6780 ret = copy_to_reg (ret);
6781 SET_DECL_RTL (val, ret);
6782 }
6783
6784 return ret;
6785 }
6786
6787 case GOTO_EXPR:
6788 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6789 expand_goto (TREE_OPERAND (exp, 0));
6790 else
6791 expand_computed_goto (TREE_OPERAND (exp, 0));
6792 return const0_rtx;
6793
6794 case CONSTRUCTOR:
6795 /* If we don't need the result, just ensure we evaluate any
6796 subexpressions. */
6797 if (ignore)
6798 {
6799 tree elt;
6800
6801 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6802 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6803
6804 return const0_rtx;
6805 }
6806
6807 /* All elts simple constants => refer to a constant in memory. But
6808 if this is a non-BLKmode mode, let it store a field at a time
6809 since that should make a CONST_INT or CONST_DOUBLE when we
6810 fold. Likewise, if we have a target we can use, it is best to
6811 store directly into the target unless the type is large enough
6812 that memcpy will be used. If we are making an initializer and
6813 all operands are constant, put it in memory as well.
6814
6815 FIXME: Avoid trying to fill vector constructors piece-meal.
6816 Output them with output_constant_def below unless we're sure
6817 they're zeros. This should go away when vector initializers
6818 are treated like VECTOR_CST instead of arrays.
6819 */
6820 else if ((TREE_STATIC (exp)
6821 && ((mode == BLKmode
6822 && ! (target != 0 && safe_from_p (target, exp, 1)))
6823 || TREE_ADDRESSABLE (exp)
6824 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6825 && (! MOVE_BY_PIECES_P
6826 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6827 TYPE_ALIGN (type)))
6828 && ! mostly_zeros_p (exp))))
6829 || ((modifier == EXPAND_INITIALIZER
6830 || modifier == EXPAND_CONST_ADDRESS)
6831 && TREE_CONSTANT (exp)))
6832 {
6833 rtx constructor = output_constant_def (exp, 1);
6834
6835 if (modifier != EXPAND_CONST_ADDRESS
6836 && modifier != EXPAND_INITIALIZER
6837 && modifier != EXPAND_SUM)
6838 constructor = validize_mem (constructor);
6839
6840 return constructor;
6841 }
6842 else
6843 {
6844 /* Handle calls that pass values in multiple non-contiguous
6845 locations. The Irix 6 ABI has examples of this. */
6846 if (target == 0 || ! safe_from_p (target, exp, 1)
6847 || GET_CODE (target) == PARALLEL
6848 || modifier == EXPAND_STACK_PARM)
6849 target
6850 = assign_temp (build_qualified_type (type,
6851 (TYPE_QUALS (type)
6852 | (TREE_READONLY (exp)
6853 * TYPE_QUAL_CONST))),
6854 0, TREE_ADDRESSABLE (exp), 1);
6855
6856 store_constructor (exp, target, 0, int_expr_size (exp));
6857 return target;
6858 }
6859
6860 case MISALIGNED_INDIRECT_REF:
6861 case ALIGN_INDIRECT_REF:
6862 case INDIRECT_REF:
6863 {
6864 tree exp1 = TREE_OPERAND (exp, 0);
6865
6866 if (modifier != EXPAND_WRITE)
6867 {
6868 tree t;
6869
6870 t = fold_read_from_constant_string (exp);
6871 if (t)
6872 return expand_expr (t, target, tmode, modifier);
6873 }
6874
6875 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6876 op0 = memory_address (mode, op0);
6877
6878 if (code == ALIGN_INDIRECT_REF)
6879 {
6880 int align = TYPE_ALIGN_UNIT (type);
6881 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
6882 op0 = memory_address (mode, op0);
6883 }
6884
6885 temp = gen_rtx_MEM (mode, op0);
6886
6887 set_mem_attributes (temp, exp, 0);
6888
6889 /* Resolve the misalignment now, so that we don't have to remember
6890 to resolve it later. Of course, this only works for reads. */
6891 /* ??? When we get around to supporting writes, we'll have to handle
6892 this in store_expr directly. The vectorizer isn't generating
6893 those yet, however. */
6894 if (code == MISALIGNED_INDIRECT_REF)
6895 {
6896 int icode;
6897 rtx reg, insn;
6898
6899 gcc_assert (modifier == EXPAND_NORMAL);
6900
6901 /* The vectorizer should have already checked the mode. */
6902 icode = movmisalign_optab->handlers[mode].insn_code;
6903 gcc_assert (icode != CODE_FOR_nothing);
6904
6905 /* We've already validated the memory, and we're creating a
6906 new pseudo destination. The predicates really can't fail. */
6907 reg = gen_reg_rtx (mode);
6908
6909 /* Nor can the insn generator. */
6910 insn = GEN_FCN (icode) (reg, temp);
6911 emit_insn (insn);
6912
6913 return reg;
6914 }
6915
6916 return temp;
6917 }
6918
6919 case TARGET_MEM_REF:
6920 {
6921 struct mem_address addr;
6922
6923 get_address_description (exp, &addr);
6924 op0 = addr_for_mem_ref (&addr, true);
6925 op0 = memory_address (mode, op0);
6926 temp = gen_rtx_MEM (mode, op0);
6927 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
6928 }
6929 return temp;
6930
6931 case ARRAY_REF:
6932
6933 {
6934 tree array = TREE_OPERAND (exp, 0);
6935 tree index = TREE_OPERAND (exp, 1);
6936
6937 /* Fold an expression like: "foo"[2].
6938 This is not done in fold so it won't happen inside &.
6939 Don't fold if this is for wide characters since it's too
6940 difficult to do correctly and this is a very rare case. */
6941
6942 if (modifier != EXPAND_CONST_ADDRESS
6943 && modifier != EXPAND_INITIALIZER
6944 && modifier != EXPAND_MEMORY)
6945 {
6946 tree t = fold_read_from_constant_string (exp);
6947
6948 if (t)
6949 return expand_expr (t, target, tmode, modifier);
6950 }
6951
6952 /* If this is a constant index into a constant array,
6953 just get the value from the array. Handle both the cases when
6954 we have an explicit constructor and when our operand is a variable
6955 that was declared const. */
6956
6957 if (modifier != EXPAND_CONST_ADDRESS
6958 && modifier != EXPAND_INITIALIZER
6959 && modifier != EXPAND_MEMORY
6960 && TREE_CODE (array) == CONSTRUCTOR
6961 && ! TREE_SIDE_EFFECTS (array)
6962 && TREE_CODE (index) == INTEGER_CST)
6963 {
6964 tree elem;
6965
6966 for (elem = CONSTRUCTOR_ELTS (array);
6967 (elem && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6968 elem = TREE_CHAIN (elem))
6969 ;
6970
6971 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6972 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6973 modifier);
6974 }
6975
6976 else if (optimize >= 1
6977 && modifier != EXPAND_CONST_ADDRESS
6978 && modifier != EXPAND_INITIALIZER
6979 && modifier != EXPAND_MEMORY
6980 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6981 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6982 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6983 && targetm.binds_local_p (array))
6984 {
6985 if (TREE_CODE (index) == INTEGER_CST)
6986 {
6987 tree init = DECL_INITIAL (array);
6988
6989 if (TREE_CODE (init) == CONSTRUCTOR)
6990 {
6991 tree elem;
6992
6993 for (elem = CONSTRUCTOR_ELTS (init);
6994 (elem
6995 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6996 elem = TREE_CHAIN (elem))
6997 ;
6998
6999 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7000 return expand_expr (fold (TREE_VALUE (elem)), target,
7001 tmode, modifier);
7002 }
7003 else if (TREE_CODE (init) == STRING_CST
7004 && 0 > compare_tree_int (index,
7005 TREE_STRING_LENGTH (init)))
7006 {
7007 tree type = TREE_TYPE (TREE_TYPE (init));
7008 enum machine_mode mode = TYPE_MODE (type);
7009
7010 if (GET_MODE_CLASS (mode) == MODE_INT
7011 && GET_MODE_SIZE (mode) == 1)
7012 return gen_int_mode (TREE_STRING_POINTER (init)
7013 [TREE_INT_CST_LOW (index)], mode);
7014 }
7015 }
7016 }
7017 }
7018 goto normal_inner_ref;
7019
7020 case COMPONENT_REF:
7021 /* If the operand is a CONSTRUCTOR, we can just extract the
7022 appropriate field if it is present. */
7023 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7024 {
7025 tree elt;
7026
7027 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7028 elt = TREE_CHAIN (elt))
7029 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7030 /* We can normally use the value of the field in the
7031 CONSTRUCTOR. However, if this is a bitfield in
7032 an integral mode that we can fit in a HOST_WIDE_INT,
7033 we must mask only the number of bits in the bitfield,
7034 since this is done implicitly by the constructor. If
7035 the bitfield does not meet either of those conditions,
7036 we can't do this optimization. */
7037 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7038 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7039 == MODE_INT)
7040 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7041 <= HOST_BITS_PER_WIDE_INT))))
7042 {
7043 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7044 && modifier == EXPAND_STACK_PARM)
7045 target = 0;
7046 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7047 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7048 {
7049 HOST_WIDE_INT bitsize
7050 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7051 enum machine_mode imode
7052 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7053
7054 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7055 {
7056 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7057 op0 = expand_and (imode, op0, op1, target);
7058 }
7059 else
7060 {
7061 tree count
7062 = build_int_cst (NULL_TREE,
7063 GET_MODE_BITSIZE (imode) - bitsize);
7064
7065 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7066 target, 0);
7067 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7068 target, 0);
7069 }
7070 }
7071
7072 return op0;
7073 }
7074 }
7075 goto normal_inner_ref;
7076
7077 case BIT_FIELD_REF:
7078 case ARRAY_RANGE_REF:
7079 normal_inner_ref:
7080 {
7081 enum machine_mode mode1;
7082 HOST_WIDE_INT bitsize, bitpos;
7083 tree offset;
7084 int volatilep = 0;
7085 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7086 &mode1, &unsignedp, &volatilep, true);
7087 rtx orig_op0;
7088
7089 /* If we got back the original object, something is wrong. Perhaps
7090 we are evaluating an expression too early. In any event, don't
7091 infinitely recurse. */
7092 gcc_assert (tem != exp);
7093
7094 /* If TEM's type is a union of variable size, pass TARGET to the inner
7095 computation, since it will need a temporary and TARGET is known
7096 to have to do. This occurs in unchecked conversion in Ada. */
7097
7098 orig_op0 = op0
7099 = expand_expr (tem,
7100 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7101 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7102 != INTEGER_CST)
7103 && modifier != EXPAND_STACK_PARM
7104 ? target : NULL_RTX),
7105 VOIDmode,
7106 (modifier == EXPAND_INITIALIZER
7107 || modifier == EXPAND_CONST_ADDRESS
7108 || modifier == EXPAND_STACK_PARM)
7109 ? modifier : EXPAND_NORMAL);
7110
7111 /* If this is a constant, put it into a register if it is a
7112 legitimate constant and OFFSET is 0 and memory if it isn't. */
7113 if (CONSTANT_P (op0))
7114 {
7115 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7116 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7117 && offset == 0)
7118 op0 = force_reg (mode, op0);
7119 else
7120 op0 = validize_mem (force_const_mem (mode, op0));
7121 }
7122
7123 /* Otherwise, if this object not in memory and we either have an
7124 offset or a BLKmode result, put it there. This case can't occur in
7125 C, but can in Ada if we have unchecked conversion of an expression
7126 from a scalar type to an array or record type or for an
7127 ARRAY_RANGE_REF whose type is BLKmode. */
7128 else if (!MEM_P (op0)
7129 && (offset != 0
7130 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7131 {
7132 tree nt = build_qualified_type (TREE_TYPE (tem),
7133 (TYPE_QUALS (TREE_TYPE (tem))
7134 | TYPE_QUAL_CONST));
7135 rtx memloc = assign_temp (nt, 1, 1, 1);
7136
7137 emit_move_insn (memloc, op0);
7138 op0 = memloc;
7139 }
7140
7141 if (offset != 0)
7142 {
7143 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7144 EXPAND_SUM);
7145
7146 gcc_assert (MEM_P (op0));
7147
7148 #ifdef POINTERS_EXTEND_UNSIGNED
7149 if (GET_MODE (offset_rtx) != Pmode)
7150 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7151 #else
7152 if (GET_MODE (offset_rtx) != ptr_mode)
7153 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7154 #endif
7155
7156 if (GET_MODE (op0) == BLKmode
7157 /* A constant address in OP0 can have VOIDmode, we must
7158 not try to call force_reg in that case. */
7159 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7160 && bitsize != 0
7161 && (bitpos % bitsize) == 0
7162 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7163 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7164 {
7165 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7166 bitpos = 0;
7167 }
7168
7169 op0 = offset_address (op0, offset_rtx,
7170 highest_pow2_factor (offset));
7171 }
7172
7173 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7174 record its alignment as BIGGEST_ALIGNMENT. */
7175 if (MEM_P (op0) && bitpos == 0 && offset != 0
7176 && is_aligning_offset (offset, tem))
7177 set_mem_align (op0, BIGGEST_ALIGNMENT);
7178
7179 /* Don't forget about volatility even if this is a bitfield. */
7180 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7181 {
7182 if (op0 == orig_op0)
7183 op0 = copy_rtx (op0);
7184
7185 MEM_VOLATILE_P (op0) = 1;
7186 }
7187
7188 /* The following code doesn't handle CONCAT.
7189 Assume only bitpos == 0 can be used for CONCAT, due to
7190 one element arrays having the same mode as its element. */
7191 if (GET_CODE (op0) == CONCAT)
7192 {
7193 gcc_assert (bitpos == 0
7194 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7195 return op0;
7196 }
7197
7198 /* In cases where an aligned union has an unaligned object
7199 as a field, we might be extracting a BLKmode value from
7200 an integer-mode (e.g., SImode) object. Handle this case
7201 by doing the extract into an object as wide as the field
7202 (which we know to be the width of a basic mode), then
7203 storing into memory, and changing the mode to BLKmode. */
7204 if (mode1 == VOIDmode
7205 || REG_P (op0) || GET_CODE (op0) == SUBREG
7206 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7207 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7208 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7209 && modifier != EXPAND_CONST_ADDRESS
7210 && modifier != EXPAND_INITIALIZER)
7211 /* If the field isn't aligned enough to fetch as a memref,
7212 fetch it as a bit field. */
7213 || (mode1 != BLKmode
7214 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7215 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7216 || (MEM_P (op0)
7217 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7218 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7219 && ((modifier == EXPAND_CONST_ADDRESS
7220 || modifier == EXPAND_INITIALIZER)
7221 ? STRICT_ALIGNMENT
7222 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7223 || (bitpos % BITS_PER_UNIT != 0)))
7224 /* If the type and the field are a constant size and the
7225 size of the type isn't the same size as the bitfield,
7226 we must use bitfield operations. */
7227 || (bitsize >= 0
7228 && TYPE_SIZE (TREE_TYPE (exp))
7229 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7230 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7231 bitsize)))
7232 {
7233 enum machine_mode ext_mode = mode;
7234
7235 if (ext_mode == BLKmode
7236 && ! (target != 0 && MEM_P (op0)
7237 && MEM_P (target)
7238 && bitpos % BITS_PER_UNIT == 0))
7239 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7240
7241 if (ext_mode == BLKmode)
7242 {
7243 if (target == 0)
7244 target = assign_temp (type, 0, 1, 1);
7245
7246 if (bitsize == 0)
7247 return target;
7248
7249 /* In this case, BITPOS must start at a byte boundary and
7250 TARGET, if specified, must be a MEM. */
7251 gcc_assert (MEM_P (op0)
7252 && (!target || MEM_P (target))
7253 && !(bitpos % BITS_PER_UNIT));
7254
7255 emit_block_move (target,
7256 adjust_address (op0, VOIDmode,
7257 bitpos / BITS_PER_UNIT),
7258 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7259 / BITS_PER_UNIT),
7260 (modifier == EXPAND_STACK_PARM
7261 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7262
7263 return target;
7264 }
7265
7266 op0 = validize_mem (op0);
7267
7268 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7269 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7270
7271 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7272 (modifier == EXPAND_STACK_PARM
7273 ? NULL_RTX : target),
7274 ext_mode, ext_mode);
7275
7276 /* If the result is a record type and BITSIZE is narrower than
7277 the mode of OP0, an integral mode, and this is a big endian
7278 machine, we must put the field into the high-order bits. */
7279 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7280 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7281 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7282 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7283 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7284 - bitsize),
7285 op0, 1);
7286
7287 /* If the result type is BLKmode, store the data into a temporary
7288 of the appropriate type, but with the mode corresponding to the
7289 mode for the data we have (op0's mode). It's tempting to make
7290 this a constant type, since we know it's only being stored once,
7291 but that can cause problems if we are taking the address of this
7292 COMPONENT_REF because the MEM of any reference via that address
7293 will have flags corresponding to the type, which will not
7294 necessarily be constant. */
7295 if (mode == BLKmode)
7296 {
7297 rtx new
7298 = assign_stack_temp_for_type
7299 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7300
7301 emit_move_insn (new, op0);
7302 op0 = copy_rtx (new);
7303 PUT_MODE (op0, BLKmode);
7304 set_mem_attributes (op0, exp, 1);
7305 }
7306
7307 return op0;
7308 }
7309
7310 /* If the result is BLKmode, use that to access the object
7311 now as well. */
7312 if (mode == BLKmode)
7313 mode1 = BLKmode;
7314
7315 /* Get a reference to just this component. */
7316 if (modifier == EXPAND_CONST_ADDRESS
7317 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7318 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7319 else
7320 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7321
7322 if (op0 == orig_op0)
7323 op0 = copy_rtx (op0);
7324
7325 set_mem_attributes (op0, exp, 0);
7326 if (REG_P (XEXP (op0, 0)))
7327 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7328
7329 MEM_VOLATILE_P (op0) |= volatilep;
7330 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7331 || modifier == EXPAND_CONST_ADDRESS
7332 || modifier == EXPAND_INITIALIZER)
7333 return op0;
7334 else if (target == 0)
7335 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7336
7337 convert_move (target, op0, unsignedp);
7338 return target;
7339 }
7340
7341 case OBJ_TYPE_REF:
7342 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7343
7344 case CALL_EXPR:
7345 /* Check for a built-in function. */
7346 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7347 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7348 == FUNCTION_DECL)
7349 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7350 {
7351 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7352 == BUILT_IN_FRONTEND)
7353 return lang_hooks.expand_expr (exp, original_target,
7354 tmode, modifier,
7355 alt_rtl);
7356 else
7357 return expand_builtin (exp, target, subtarget, tmode, ignore);
7358 }
7359
7360 return expand_call (exp, target, ignore);
7361
7362 case NON_LVALUE_EXPR:
7363 case NOP_EXPR:
7364 case CONVERT_EXPR:
7365 if (TREE_OPERAND (exp, 0) == error_mark_node)
7366 return const0_rtx;
7367
7368 if (TREE_CODE (type) == UNION_TYPE)
7369 {
7370 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7371
7372 /* If both input and output are BLKmode, this conversion isn't doing
7373 anything except possibly changing memory attribute. */
7374 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7375 {
7376 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7377 modifier);
7378
7379 result = copy_rtx (result);
7380 set_mem_attributes (result, exp, 0);
7381 return result;
7382 }
7383
7384 if (target == 0)
7385 {
7386 if (TYPE_MODE (type) != BLKmode)
7387 target = gen_reg_rtx (TYPE_MODE (type));
7388 else
7389 target = assign_temp (type, 0, 1, 1);
7390 }
7391
7392 if (MEM_P (target))
7393 /* Store data into beginning of memory target. */
7394 store_expr (TREE_OPERAND (exp, 0),
7395 adjust_address (target, TYPE_MODE (valtype), 0),
7396 modifier == EXPAND_STACK_PARM);
7397
7398 else
7399 {
7400 gcc_assert (REG_P (target));
7401
7402 /* Store this field into a union of the proper type. */
7403 store_field (target,
7404 MIN ((int_size_in_bytes (TREE_TYPE
7405 (TREE_OPERAND (exp, 0)))
7406 * BITS_PER_UNIT),
7407 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7408 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7409 type, 0);
7410 }
7411
7412 /* Return the entire union. */
7413 return target;
7414 }
7415
7416 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7417 {
7418 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7419 modifier);
7420
7421 /* If the signedness of the conversion differs and OP0 is
7422 a promoted SUBREG, clear that indication since we now
7423 have to do the proper extension. */
7424 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7425 && GET_CODE (op0) == SUBREG)
7426 SUBREG_PROMOTED_VAR_P (op0) = 0;
7427
7428 return REDUCE_BIT_FIELD (op0);
7429 }
7430
7431 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7432 if (GET_MODE (op0) == mode)
7433 ;
7434
7435 /* If OP0 is a constant, just convert it into the proper mode. */
7436 else if (CONSTANT_P (op0))
7437 {
7438 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7439 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7440
7441 if (modifier == EXPAND_INITIALIZER)
7442 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7443 subreg_lowpart_offset (mode,
7444 inner_mode));
7445 else
7446 op0= convert_modes (mode, inner_mode, op0,
7447 TYPE_UNSIGNED (inner_type));
7448 }
7449
7450 else if (modifier == EXPAND_INITIALIZER)
7451 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7452
7453 else if (target == 0)
7454 op0 = convert_to_mode (mode, op0,
7455 TYPE_UNSIGNED (TREE_TYPE
7456 (TREE_OPERAND (exp, 0))));
7457 else
7458 {
7459 convert_move (target, op0,
7460 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7461 op0 = target;
7462 }
7463
7464 return REDUCE_BIT_FIELD (op0);
7465
7466 case VIEW_CONVERT_EXPR:
7467 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7468
7469 /* If the input and output modes are both the same, we are done.
7470 Otherwise, if neither mode is BLKmode and both are integral and within
7471 a word, we can use gen_lowpart. If neither is true, make sure the
7472 operand is in memory and convert the MEM to the new mode. */
7473 if (TYPE_MODE (type) == GET_MODE (op0))
7474 ;
7475 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7476 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7477 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7478 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7479 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7480 op0 = gen_lowpart (TYPE_MODE (type), op0);
7481 else if (!MEM_P (op0))
7482 {
7483 /* If the operand is not a MEM, force it into memory. Since we
7484 are going to be be changing the mode of the MEM, don't call
7485 force_const_mem for constants because we don't allow pool
7486 constants to change mode. */
7487 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7488
7489 gcc_assert (!TREE_ADDRESSABLE (exp));
7490
7491 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7492 target
7493 = assign_stack_temp_for_type
7494 (TYPE_MODE (inner_type),
7495 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7496
7497 emit_move_insn (target, op0);
7498 op0 = target;
7499 }
7500
7501 /* At this point, OP0 is in the correct mode. If the output type is such
7502 that the operand is known to be aligned, indicate that it is.
7503 Otherwise, we need only be concerned about alignment for non-BLKmode
7504 results. */
7505 if (MEM_P (op0))
7506 {
7507 op0 = copy_rtx (op0);
7508
7509 if (TYPE_ALIGN_OK (type))
7510 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7511 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7512 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7513 {
7514 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7515 HOST_WIDE_INT temp_size
7516 = MAX (int_size_in_bytes (inner_type),
7517 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7518 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7519 temp_size, 0, type);
7520 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7521
7522 gcc_assert (!TREE_ADDRESSABLE (exp));
7523
7524 if (GET_MODE (op0) == BLKmode)
7525 emit_block_move (new_with_op0_mode, op0,
7526 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7527 (modifier == EXPAND_STACK_PARM
7528 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7529 else
7530 emit_move_insn (new_with_op0_mode, op0);
7531
7532 op0 = new;
7533 }
7534
7535 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7536 }
7537
7538 return op0;
7539
7540 case PLUS_EXPR:
7541 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7542 something else, make sure we add the register to the constant and
7543 then to the other thing. This case can occur during strength
7544 reduction and doing it this way will produce better code if the
7545 frame pointer or argument pointer is eliminated.
7546
7547 fold-const.c will ensure that the constant is always in the inner
7548 PLUS_EXPR, so the only case we need to do anything about is if
7549 sp, ap, or fp is our second argument, in which case we must swap
7550 the innermost first argument and our second argument. */
7551
7552 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7553 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7554 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7555 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7556 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7557 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7558 {
7559 tree t = TREE_OPERAND (exp, 1);
7560
7561 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7562 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7563 }
7564
7565 /* If the result is to be ptr_mode and we are adding an integer to
7566 something, we might be forming a constant. So try to use
7567 plus_constant. If it produces a sum and we can't accept it,
7568 use force_operand. This allows P = &ARR[const] to generate
7569 efficient code on machines where a SYMBOL_REF is not a valid
7570 address.
7571
7572 If this is an EXPAND_SUM call, always return the sum. */
7573 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7574 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7575 {
7576 if (modifier == EXPAND_STACK_PARM)
7577 target = 0;
7578 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7579 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7580 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7581 {
7582 rtx constant_part;
7583
7584 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7585 EXPAND_SUM);
7586 /* Use immed_double_const to ensure that the constant is
7587 truncated according to the mode of OP1, then sign extended
7588 to a HOST_WIDE_INT. Using the constant directly can result
7589 in non-canonical RTL in a 64x32 cross compile. */
7590 constant_part
7591 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7592 (HOST_WIDE_INT) 0,
7593 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7594 op1 = plus_constant (op1, INTVAL (constant_part));
7595 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7596 op1 = force_operand (op1, target);
7597 return REDUCE_BIT_FIELD (op1);
7598 }
7599
7600 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7601 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7602 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7603 {
7604 rtx constant_part;
7605
7606 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7607 (modifier == EXPAND_INITIALIZER
7608 ? EXPAND_INITIALIZER : EXPAND_SUM));
7609 if (! CONSTANT_P (op0))
7610 {
7611 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7612 VOIDmode, modifier);
7613 /* Return a PLUS if modifier says it's OK. */
7614 if (modifier == EXPAND_SUM
7615 || modifier == EXPAND_INITIALIZER)
7616 return simplify_gen_binary (PLUS, mode, op0, op1);
7617 goto binop2;
7618 }
7619 /* Use immed_double_const to ensure that the constant is
7620 truncated according to the mode of OP1, then sign extended
7621 to a HOST_WIDE_INT. Using the constant directly can result
7622 in non-canonical RTL in a 64x32 cross compile. */
7623 constant_part
7624 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7625 (HOST_WIDE_INT) 0,
7626 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7627 op0 = plus_constant (op0, INTVAL (constant_part));
7628 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7629 op0 = force_operand (op0, target);
7630 return REDUCE_BIT_FIELD (op0);
7631 }
7632 }
7633
7634 /* No sense saving up arithmetic to be done
7635 if it's all in the wrong mode to form part of an address.
7636 And force_operand won't know whether to sign-extend or
7637 zero-extend. */
7638 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7639 || mode != ptr_mode)
7640 {
7641 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7642 subtarget, &op0, &op1, 0);
7643 if (op0 == const0_rtx)
7644 return op1;
7645 if (op1 == const0_rtx)
7646 return op0;
7647 goto binop2;
7648 }
7649
7650 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7651 subtarget, &op0, &op1, modifier);
7652 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7653
7654 case MINUS_EXPR:
7655 /* For initializers, we are allowed to return a MINUS of two
7656 symbolic constants. Here we handle all cases when both operands
7657 are constant. */
7658 /* Handle difference of two symbolic constants,
7659 for the sake of an initializer. */
7660 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7661 && really_constant_p (TREE_OPERAND (exp, 0))
7662 && really_constant_p (TREE_OPERAND (exp, 1)))
7663 {
7664 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7665 NULL_RTX, &op0, &op1, modifier);
7666
7667 /* If the last operand is a CONST_INT, use plus_constant of
7668 the negated constant. Else make the MINUS. */
7669 if (GET_CODE (op1) == CONST_INT)
7670 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7671 else
7672 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7673 }
7674
7675 /* No sense saving up arithmetic to be done
7676 if it's all in the wrong mode to form part of an address.
7677 And force_operand won't know whether to sign-extend or
7678 zero-extend. */
7679 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7680 || mode != ptr_mode)
7681 goto binop;
7682
7683 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7684 subtarget, &op0, &op1, modifier);
7685
7686 /* Convert A - const to A + (-const). */
7687 if (GET_CODE (op1) == CONST_INT)
7688 {
7689 op1 = negate_rtx (mode, op1);
7690 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7691 }
7692
7693 goto binop2;
7694
7695 case MULT_EXPR:
7696 /* If first operand is constant, swap them.
7697 Thus the following special case checks need only
7698 check the second operand. */
7699 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7700 {
7701 tree t1 = TREE_OPERAND (exp, 0);
7702 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7703 TREE_OPERAND (exp, 1) = t1;
7704 }
7705
7706 /* Attempt to return something suitable for generating an
7707 indexed address, for machines that support that. */
7708
7709 if (modifier == EXPAND_SUM && mode == ptr_mode
7710 && host_integerp (TREE_OPERAND (exp, 1), 0))
7711 {
7712 tree exp1 = TREE_OPERAND (exp, 1);
7713
7714 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7715 EXPAND_SUM);
7716
7717 if (!REG_P (op0))
7718 op0 = force_operand (op0, NULL_RTX);
7719 if (!REG_P (op0))
7720 op0 = copy_to_mode_reg (mode, op0);
7721
7722 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7723 gen_int_mode (tree_low_cst (exp1, 0),
7724 TYPE_MODE (TREE_TYPE (exp1)))));
7725 }
7726
7727 if (modifier == EXPAND_STACK_PARM)
7728 target = 0;
7729
7730 /* Check for multiplying things that have been extended
7731 from a narrower type. If this machine supports multiplying
7732 in that narrower type with a result in the desired type,
7733 do it that way, and avoid the explicit type-conversion. */
7734 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7735 && TREE_CODE (type) == INTEGER_TYPE
7736 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7737 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7738 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7739 && int_fits_type_p (TREE_OPERAND (exp, 1),
7740 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7741 /* Don't use a widening multiply if a shift will do. */
7742 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7743 > HOST_BITS_PER_WIDE_INT)
7744 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7745 ||
7746 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7747 && (TYPE_PRECISION (TREE_TYPE
7748 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7749 == TYPE_PRECISION (TREE_TYPE
7750 (TREE_OPERAND
7751 (TREE_OPERAND (exp, 0), 0))))
7752 /* If both operands are extended, they must either both
7753 be zero-extended or both be sign-extended. */
7754 && (TYPE_UNSIGNED (TREE_TYPE
7755 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7756 == TYPE_UNSIGNED (TREE_TYPE
7757 (TREE_OPERAND
7758 (TREE_OPERAND (exp, 0), 0)))))))
7759 {
7760 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7761 enum machine_mode innermode = TYPE_MODE (op0type);
7762 bool zextend_p = TYPE_UNSIGNED (op0type);
7763 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7764 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7765
7766 if (mode == GET_MODE_2XWIDER_MODE (innermode))
7767 {
7768 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7769 {
7770 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7771 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7772 TREE_OPERAND (exp, 1),
7773 NULL_RTX, &op0, &op1, 0);
7774 else
7775 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7776 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7777 NULL_RTX, &op0, &op1, 0);
7778 goto binop3;
7779 }
7780 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7781 && innermode == word_mode)
7782 {
7783 rtx htem, hipart;
7784 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7785 NULL_RTX, VOIDmode, 0);
7786 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7787 op1 = convert_modes (innermode, mode,
7788 expand_expr (TREE_OPERAND (exp, 1),
7789 NULL_RTX, VOIDmode, 0),
7790 unsignedp);
7791 else
7792 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7793 NULL_RTX, VOIDmode, 0);
7794 temp = expand_binop (mode, other_optab, op0, op1, target,
7795 unsignedp, OPTAB_LIB_WIDEN);
7796 hipart = gen_highpart (innermode, temp);
7797 htem = expand_mult_highpart_adjust (innermode, hipart,
7798 op0, op1, hipart,
7799 zextend_p);
7800 if (htem != hipart)
7801 emit_move_insn (hipart, htem);
7802 return REDUCE_BIT_FIELD (temp);
7803 }
7804 }
7805 }
7806 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7807 subtarget, &op0, &op1, 0);
7808 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7809
7810 case TRUNC_DIV_EXPR:
7811 case FLOOR_DIV_EXPR:
7812 case CEIL_DIV_EXPR:
7813 case ROUND_DIV_EXPR:
7814 case EXACT_DIV_EXPR:
7815 if (modifier == EXPAND_STACK_PARM)
7816 target = 0;
7817 /* Possible optimization: compute the dividend with EXPAND_SUM
7818 then if the divisor is constant can optimize the case
7819 where some terms of the dividend have coeffs divisible by it. */
7820 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7821 subtarget, &op0, &op1, 0);
7822 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7823
7824 case RDIV_EXPR:
7825 goto binop;
7826
7827 case TRUNC_MOD_EXPR:
7828 case FLOOR_MOD_EXPR:
7829 case CEIL_MOD_EXPR:
7830 case ROUND_MOD_EXPR:
7831 if (modifier == EXPAND_STACK_PARM)
7832 target = 0;
7833 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7834 subtarget, &op0, &op1, 0);
7835 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7836
7837 case FIX_ROUND_EXPR:
7838 case FIX_FLOOR_EXPR:
7839 case FIX_CEIL_EXPR:
7840 gcc_unreachable (); /* Not used for C. */
7841
7842 case FIX_TRUNC_EXPR:
7843 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7844 if (target == 0 || modifier == EXPAND_STACK_PARM)
7845 target = gen_reg_rtx (mode);
7846 expand_fix (target, op0, unsignedp);
7847 return target;
7848
7849 case FLOAT_EXPR:
7850 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7851 if (target == 0 || modifier == EXPAND_STACK_PARM)
7852 target = gen_reg_rtx (mode);
7853 /* expand_float can't figure out what to do if FROM has VOIDmode.
7854 So give it the correct mode. With -O, cse will optimize this. */
7855 if (GET_MODE (op0) == VOIDmode)
7856 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7857 op0);
7858 expand_float (target, op0,
7859 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7860 return target;
7861
7862 case NEGATE_EXPR:
7863 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7864 if (modifier == EXPAND_STACK_PARM)
7865 target = 0;
7866 temp = expand_unop (mode,
7867 optab_for_tree_code (NEGATE_EXPR, type),
7868 op0, target, 0);
7869 gcc_assert (temp);
7870 return REDUCE_BIT_FIELD (temp);
7871
7872 case ABS_EXPR:
7873 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7874 if (modifier == EXPAND_STACK_PARM)
7875 target = 0;
7876
7877 /* ABS_EXPR is not valid for complex arguments. */
7878 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7879 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7880
7881 /* Unsigned abs is simply the operand. Testing here means we don't
7882 risk generating incorrect code below. */
7883 if (TYPE_UNSIGNED (type))
7884 return op0;
7885
7886 return expand_abs (mode, op0, target, unsignedp,
7887 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7888
7889 case MAX_EXPR:
7890 case MIN_EXPR:
7891 target = original_target;
7892 if (target == 0
7893 || modifier == EXPAND_STACK_PARM
7894 || (MEM_P (target) && MEM_VOLATILE_P (target))
7895 || GET_MODE (target) != mode
7896 || (REG_P (target)
7897 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7898 target = gen_reg_rtx (mode);
7899 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7900 target, &op0, &op1, 0);
7901
7902 /* First try to do it with a special MIN or MAX instruction.
7903 If that does not win, use a conditional jump to select the proper
7904 value. */
7905 this_optab = optab_for_tree_code (code, type);
7906 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7907 OPTAB_WIDEN);
7908 if (temp != 0)
7909 return temp;
7910
7911 /* At this point, a MEM target is no longer useful; we will get better
7912 code without it. */
7913
7914 if (! REG_P (target))
7915 target = gen_reg_rtx (mode);
7916
7917 /* If op1 was placed in target, swap op0 and op1. */
7918 if (target != op0 && target == op1)
7919 {
7920 temp = op0;
7921 op0 = op1;
7922 op1 = temp;
7923 }
7924
7925 /* We generate better code and avoid problems with op1 mentioning
7926 target by forcing op1 into a pseudo if it isn't a constant. */
7927 if (! CONSTANT_P (op1))
7928 op1 = force_reg (mode, op1);
7929
7930 #ifdef HAVE_conditional_move
7931 /* Use a conditional move if possible. */
7932 if (can_conditionally_move_p (mode))
7933 {
7934 enum rtx_code comparison_code;
7935 rtx insn;
7936
7937 if (code == MAX_EXPR)
7938 comparison_code = unsignedp ? GEU : GE;
7939 else
7940 comparison_code = unsignedp ? LEU : LE;
7941
7942 /* ??? Same problem as in expmed.c: emit_conditional_move
7943 forces a stack adjustment via compare_from_rtx, and we
7944 lose the stack adjustment if the sequence we are about
7945 to create is discarded. */
7946 do_pending_stack_adjust ();
7947
7948 start_sequence ();
7949
7950 /* Try to emit the conditional move. */
7951 insn = emit_conditional_move (target, comparison_code,
7952 op0, op1, mode,
7953 op0, op1, mode,
7954 unsignedp);
7955
7956 /* If we could do the conditional move, emit the sequence,
7957 and return. */
7958 if (insn)
7959 {
7960 rtx seq = get_insns ();
7961 end_sequence ();
7962 emit_insn (seq);
7963 return target;
7964 }
7965
7966 /* Otherwise discard the sequence and fall back to code with
7967 branches. */
7968 end_sequence ();
7969 }
7970 #endif
7971 if (target != op0)
7972 emit_move_insn (target, op0);
7973
7974 temp = gen_label_rtx ();
7975
7976 /* If this mode is an integer too wide to compare properly,
7977 compare word by word. Rely on cse to optimize constant cases. */
7978 if (GET_MODE_CLASS (mode) == MODE_INT
7979 && ! can_compare_p (GE, mode, ccp_jump))
7980 {
7981 if (code == MAX_EXPR)
7982 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
7983 NULL_RTX, temp);
7984 else
7985 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
7986 NULL_RTX, temp);
7987 }
7988 else
7989 {
7990 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7991 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
7992 }
7993 emit_move_insn (target, op1);
7994 emit_label (temp);
7995 return target;
7996
7997 case BIT_NOT_EXPR:
7998 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7999 if (modifier == EXPAND_STACK_PARM)
8000 target = 0;
8001 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8002 gcc_assert (temp);
8003 return temp;
8004
8005 /* ??? Can optimize bitwise operations with one arg constant.
8006 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8007 and (a bitwise1 b) bitwise2 b (etc)
8008 but that is probably not worth while. */
8009
8010 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8011 boolean values when we want in all cases to compute both of them. In
8012 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8013 as actual zero-or-1 values and then bitwise anding. In cases where
8014 there cannot be any side effects, better code would be made by
8015 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8016 how to recognize those cases. */
8017
8018 case TRUTH_AND_EXPR:
8019 code = BIT_AND_EXPR;
8020 case BIT_AND_EXPR:
8021 goto binop;
8022
8023 case TRUTH_OR_EXPR:
8024 code = BIT_IOR_EXPR;
8025 case BIT_IOR_EXPR:
8026 goto binop;
8027
8028 case TRUTH_XOR_EXPR:
8029 code = BIT_XOR_EXPR;
8030 case BIT_XOR_EXPR:
8031 goto binop;
8032
8033 case LSHIFT_EXPR:
8034 case RSHIFT_EXPR:
8035 case LROTATE_EXPR:
8036 case RROTATE_EXPR:
8037 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8038 subtarget = 0;
8039 if (modifier == EXPAND_STACK_PARM)
8040 target = 0;
8041 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8042 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8043 unsignedp);
8044
8045 /* Could determine the answer when only additive constants differ. Also,
8046 the addition of one can be handled by changing the condition. */
8047 case LT_EXPR:
8048 case LE_EXPR:
8049 case GT_EXPR:
8050 case GE_EXPR:
8051 case EQ_EXPR:
8052 case NE_EXPR:
8053 case UNORDERED_EXPR:
8054 case ORDERED_EXPR:
8055 case UNLT_EXPR:
8056 case UNLE_EXPR:
8057 case UNGT_EXPR:
8058 case UNGE_EXPR:
8059 case UNEQ_EXPR:
8060 case LTGT_EXPR:
8061 temp = do_store_flag (exp,
8062 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8063 tmode != VOIDmode ? tmode : mode, 0);
8064 if (temp != 0)
8065 return temp;
8066
8067 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8068 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8069 && original_target
8070 && REG_P (original_target)
8071 && (GET_MODE (original_target)
8072 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8073 {
8074 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8075 VOIDmode, 0);
8076
8077 /* If temp is constant, we can just compute the result. */
8078 if (GET_CODE (temp) == CONST_INT)
8079 {
8080 if (INTVAL (temp) != 0)
8081 emit_move_insn (target, const1_rtx);
8082 else
8083 emit_move_insn (target, const0_rtx);
8084
8085 return target;
8086 }
8087
8088 if (temp != original_target)
8089 {
8090 enum machine_mode mode1 = GET_MODE (temp);
8091 if (mode1 == VOIDmode)
8092 mode1 = tmode != VOIDmode ? tmode : mode;
8093
8094 temp = copy_to_mode_reg (mode1, temp);
8095 }
8096
8097 op1 = gen_label_rtx ();
8098 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8099 GET_MODE (temp), unsignedp, op1);
8100 emit_move_insn (temp, const1_rtx);
8101 emit_label (op1);
8102 return temp;
8103 }
8104
8105 /* If no set-flag instruction, must generate a conditional store
8106 into a temporary variable. Drop through and handle this
8107 like && and ||. */
8108
8109 if (! ignore
8110 && (target == 0
8111 || modifier == EXPAND_STACK_PARM
8112 || ! safe_from_p (target, exp, 1)
8113 /* Make sure we don't have a hard reg (such as function's return
8114 value) live across basic blocks, if not optimizing. */
8115 || (!optimize && REG_P (target)
8116 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8117 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8118
8119 if (target)
8120 emit_move_insn (target, const0_rtx);
8121
8122 op1 = gen_label_rtx ();
8123 jumpifnot (exp, op1);
8124
8125 if (target)
8126 emit_move_insn (target, const1_rtx);
8127
8128 emit_label (op1);
8129 return ignore ? const0_rtx : target;
8130
8131 case TRUTH_NOT_EXPR:
8132 if (modifier == EXPAND_STACK_PARM)
8133 target = 0;
8134 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8135 /* The parser is careful to generate TRUTH_NOT_EXPR
8136 only with operands that are always zero or one. */
8137 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8138 target, 1, OPTAB_LIB_WIDEN);
8139 gcc_assert (temp);
8140 return temp;
8141
8142 case STATEMENT_LIST:
8143 {
8144 tree_stmt_iterator iter;
8145
8146 gcc_assert (ignore);
8147
8148 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8149 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8150 }
8151 return const0_rtx;
8152
8153 case COND_EXPR:
8154 /* A COND_EXPR with its type being VOID_TYPE represents a
8155 conditional jump and is handled in
8156 expand_gimple_cond_expr. */
8157 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8158
8159 /* Note that COND_EXPRs whose type is a structure or union
8160 are required to be constructed to contain assignments of
8161 a temporary variable, so that we can evaluate them here
8162 for side effect only. If type is void, we must do likewise. */
8163
8164 gcc_assert (!TREE_ADDRESSABLE (type)
8165 && !ignore
8166 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8167 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8168
8169 /* If we are not to produce a result, we have no target. Otherwise,
8170 if a target was specified use it; it will not be used as an
8171 intermediate target unless it is safe. If no target, use a
8172 temporary. */
8173
8174 if (modifier != EXPAND_STACK_PARM
8175 && original_target
8176 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8177 && GET_MODE (original_target) == mode
8178 #ifdef HAVE_conditional_move
8179 && (! can_conditionally_move_p (mode)
8180 || REG_P (original_target))
8181 #endif
8182 && !MEM_P (original_target))
8183 temp = original_target;
8184 else
8185 temp = assign_temp (type, 0, 0, 1);
8186
8187 do_pending_stack_adjust ();
8188 NO_DEFER_POP;
8189 op0 = gen_label_rtx ();
8190 op1 = gen_label_rtx ();
8191 jumpifnot (TREE_OPERAND (exp, 0), op0);
8192 store_expr (TREE_OPERAND (exp, 1), temp,
8193 modifier == EXPAND_STACK_PARM);
8194
8195 emit_jump_insn (gen_jump (op1));
8196 emit_barrier ();
8197 emit_label (op0);
8198 store_expr (TREE_OPERAND (exp, 2), temp,
8199 modifier == EXPAND_STACK_PARM);
8200
8201 emit_label (op1);
8202 OK_DEFER_POP;
8203 return temp;
8204
8205 case VEC_COND_EXPR:
8206 target = expand_vec_cond_expr (exp, target);
8207 return target;
8208
8209 case MODIFY_EXPR:
8210 {
8211 tree lhs = TREE_OPERAND (exp, 0);
8212 tree rhs = TREE_OPERAND (exp, 1);
8213
8214 gcc_assert (ignore);
8215
8216 /* Check for |= or &= of a bitfield of size one into another bitfield
8217 of size 1. In this case, (unless we need the result of the
8218 assignment) we can do this more efficiently with a
8219 test followed by an assignment, if necessary.
8220
8221 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8222 things change so we do, this code should be enhanced to
8223 support it. */
8224 if (TREE_CODE (lhs) == COMPONENT_REF
8225 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8226 || TREE_CODE (rhs) == BIT_AND_EXPR)
8227 && TREE_OPERAND (rhs, 0) == lhs
8228 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8229 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8230 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8231 {
8232 rtx label = gen_label_rtx ();
8233
8234 do_jump (TREE_OPERAND (rhs, 1),
8235 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8236 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8237 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8238 (TREE_CODE (rhs) == BIT_IOR_EXPR
8239 ? integer_one_node
8240 : integer_zero_node)));
8241 do_pending_stack_adjust ();
8242 emit_label (label);
8243 return const0_rtx;
8244 }
8245
8246 expand_assignment (lhs, rhs);
8247
8248 return const0_rtx;
8249 }
8250
8251 case RETURN_EXPR:
8252 if (!TREE_OPERAND (exp, 0))
8253 expand_null_return ();
8254 else
8255 expand_return (TREE_OPERAND (exp, 0));
8256 return const0_rtx;
8257
8258 case ADDR_EXPR:
8259 return expand_expr_addr_expr (exp, target, tmode, modifier);
8260
8261 case COMPLEX_EXPR:
8262 /* Get the rtx code of the operands. */
8263 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8264 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8265
8266 if (!target)
8267 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8268
8269 /* Move the real (op0) and imaginary (op1) parts to their location. */
8270 write_complex_part (target, op0, false);
8271 write_complex_part (target, op1, true);
8272
8273 return target;
8274
8275 case REALPART_EXPR:
8276 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8277 return read_complex_part (op0, false);
8278
8279 case IMAGPART_EXPR:
8280 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8281 return read_complex_part (op0, true);
8282
8283 case RESX_EXPR:
8284 expand_resx_expr (exp);
8285 return const0_rtx;
8286
8287 case TRY_CATCH_EXPR:
8288 case CATCH_EXPR:
8289 case EH_FILTER_EXPR:
8290 case TRY_FINALLY_EXPR:
8291 /* Lowered by tree-eh.c. */
8292 gcc_unreachable ();
8293
8294 case WITH_CLEANUP_EXPR:
8295 case CLEANUP_POINT_EXPR:
8296 case TARGET_EXPR:
8297 case CASE_LABEL_EXPR:
8298 case VA_ARG_EXPR:
8299 case BIND_EXPR:
8300 case INIT_EXPR:
8301 case CONJ_EXPR:
8302 case COMPOUND_EXPR:
8303 case PREINCREMENT_EXPR:
8304 case PREDECREMENT_EXPR:
8305 case POSTINCREMENT_EXPR:
8306 case POSTDECREMENT_EXPR:
8307 case LOOP_EXPR:
8308 case EXIT_EXPR:
8309 case TRUTH_ANDIF_EXPR:
8310 case TRUTH_ORIF_EXPR:
8311 /* Lowered by gimplify.c. */
8312 gcc_unreachable ();
8313
8314 case EXC_PTR_EXPR:
8315 return get_exception_pointer (cfun);
8316
8317 case FILTER_EXPR:
8318 return get_exception_filter (cfun);
8319
8320 case FDESC_EXPR:
8321 /* Function descriptors are not valid except for as
8322 initialization constants, and should not be expanded. */
8323 gcc_unreachable ();
8324
8325 case SWITCH_EXPR:
8326 expand_case (exp);
8327 return const0_rtx;
8328
8329 case LABEL_EXPR:
8330 expand_label (TREE_OPERAND (exp, 0));
8331 return const0_rtx;
8332
8333 case ASM_EXPR:
8334 expand_asm_expr (exp);
8335 return const0_rtx;
8336
8337 case WITH_SIZE_EXPR:
8338 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8339 have pulled out the size to use in whatever context it needed. */
8340 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8341 modifier, alt_rtl);
8342
8343 case REALIGN_LOAD_EXPR:
8344 {
8345 tree oprnd0 = TREE_OPERAND (exp, 0);
8346 tree oprnd1 = TREE_OPERAND (exp, 1);
8347 tree oprnd2 = TREE_OPERAND (exp, 2);
8348 rtx op2;
8349
8350 this_optab = optab_for_tree_code (code, type);
8351 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8352 op2 = expand_expr (oprnd2, NULL_RTX, VOIDmode, 0);
8353 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8354 target, unsignedp);
8355 gcc_assert (temp);
8356 return temp;
8357 }
8358
8359 case REDUC_MAX_EXPR:
8360 case REDUC_MIN_EXPR:
8361 case REDUC_PLUS_EXPR:
8362 {
8363 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8364 this_optab = optab_for_tree_code (code, type);
8365 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8366 gcc_assert (temp);
8367 return temp;
8368 }
8369
8370 case VEC_LSHIFT_EXPR:
8371 case VEC_RSHIFT_EXPR:
8372 {
8373 target = expand_vec_shift_expr (exp, target);
8374 return target;
8375 }
8376
8377 default:
8378 return lang_hooks.expand_expr (exp, original_target, tmode,
8379 modifier, alt_rtl);
8380 }
8381
8382 /* Here to do an ordinary binary operator. */
8383 binop:
8384 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8385 subtarget, &op0, &op1, 0);
8386 binop2:
8387 this_optab = optab_for_tree_code (code, type);
8388 binop3:
8389 if (modifier == EXPAND_STACK_PARM)
8390 target = 0;
8391 temp = expand_binop (mode, this_optab, op0, op1, target,
8392 unsignedp, OPTAB_LIB_WIDEN);
8393 gcc_assert (temp);
8394 return REDUCE_BIT_FIELD (temp);
8395 }
8396 #undef REDUCE_BIT_FIELD
8397 \f
8398 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8399 signedness of TYPE), possibly returning the result in TARGET. */
8400 static rtx
8401 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8402 {
8403 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8404 if (target && GET_MODE (target) != GET_MODE (exp))
8405 target = 0;
8406 if (TYPE_UNSIGNED (type))
8407 {
8408 rtx mask;
8409 if (prec < HOST_BITS_PER_WIDE_INT)
8410 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8411 GET_MODE (exp));
8412 else
8413 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8414 ((unsigned HOST_WIDE_INT) 1
8415 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8416 GET_MODE (exp));
8417 return expand_and (GET_MODE (exp), exp, mask, target);
8418 }
8419 else
8420 {
8421 tree count = build_int_cst (NULL_TREE,
8422 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8423 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8424 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8425 }
8426 }
8427 \f
8428 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8429 when applied to the address of EXP produces an address known to be
8430 aligned more than BIGGEST_ALIGNMENT. */
8431
8432 static int
8433 is_aligning_offset (tree offset, tree exp)
8434 {
8435 /* Strip off any conversions. */
8436 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8437 || TREE_CODE (offset) == NOP_EXPR
8438 || TREE_CODE (offset) == CONVERT_EXPR)
8439 offset = TREE_OPERAND (offset, 0);
8440
8441 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8442 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8443 if (TREE_CODE (offset) != BIT_AND_EXPR
8444 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8445 || compare_tree_int (TREE_OPERAND (offset, 1),
8446 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8447 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8448 return 0;
8449
8450 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8451 It must be NEGATE_EXPR. Then strip any more conversions. */
8452 offset = TREE_OPERAND (offset, 0);
8453 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8454 || TREE_CODE (offset) == NOP_EXPR
8455 || TREE_CODE (offset) == CONVERT_EXPR)
8456 offset = TREE_OPERAND (offset, 0);
8457
8458 if (TREE_CODE (offset) != NEGATE_EXPR)
8459 return 0;
8460
8461 offset = TREE_OPERAND (offset, 0);
8462 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8463 || TREE_CODE (offset) == NOP_EXPR
8464 || TREE_CODE (offset) == CONVERT_EXPR)
8465 offset = TREE_OPERAND (offset, 0);
8466
8467 /* This must now be the address of EXP. */
8468 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8469 }
8470 \f
8471 /* Return the tree node if an ARG corresponds to a string constant or zero
8472 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8473 in bytes within the string that ARG is accessing. The type of the
8474 offset will be `sizetype'. */
8475
8476 tree
8477 string_constant (tree arg, tree *ptr_offset)
8478 {
8479 tree array, offset;
8480 STRIP_NOPS (arg);
8481
8482 if (TREE_CODE (arg) == ADDR_EXPR)
8483 {
8484 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8485 {
8486 *ptr_offset = size_zero_node;
8487 return TREE_OPERAND (arg, 0);
8488 }
8489 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8490 {
8491 array = TREE_OPERAND (arg, 0);
8492 offset = size_zero_node;
8493 }
8494 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8495 {
8496 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8497 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8498 if (TREE_CODE (array) != STRING_CST
8499 && TREE_CODE (array) != VAR_DECL)
8500 return 0;
8501 }
8502 else
8503 return 0;
8504 }
8505 else if (TREE_CODE (arg) == PLUS_EXPR)
8506 {
8507 tree arg0 = TREE_OPERAND (arg, 0);
8508 tree arg1 = TREE_OPERAND (arg, 1);
8509
8510 STRIP_NOPS (arg0);
8511 STRIP_NOPS (arg1);
8512
8513 if (TREE_CODE (arg0) == ADDR_EXPR
8514 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8515 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8516 {
8517 array = TREE_OPERAND (arg0, 0);
8518 offset = arg1;
8519 }
8520 else if (TREE_CODE (arg1) == ADDR_EXPR
8521 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8522 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8523 {
8524 array = TREE_OPERAND (arg1, 0);
8525 offset = arg0;
8526 }
8527 else
8528 return 0;
8529 }
8530 else
8531 return 0;
8532
8533 if (TREE_CODE (array) == STRING_CST)
8534 {
8535 *ptr_offset = convert (sizetype, offset);
8536 return array;
8537 }
8538 else if (TREE_CODE (array) == VAR_DECL)
8539 {
8540 int length;
8541
8542 /* Variables initialized to string literals can be handled too. */
8543 if (DECL_INITIAL (array) == NULL_TREE
8544 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8545 return 0;
8546
8547 /* If they are read-only, non-volatile and bind locally. */
8548 if (! TREE_READONLY (array)
8549 || TREE_SIDE_EFFECTS (array)
8550 || ! targetm.binds_local_p (array))
8551 return 0;
8552
8553 /* Avoid const char foo[4] = "abcde"; */
8554 if (DECL_SIZE_UNIT (array) == NULL_TREE
8555 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8556 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8557 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8558 return 0;
8559
8560 /* If variable is bigger than the string literal, OFFSET must be constant
8561 and inside of the bounds of the string literal. */
8562 offset = convert (sizetype, offset);
8563 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8564 && (! host_integerp (offset, 1)
8565 || compare_tree_int (offset, length) >= 0))
8566 return 0;
8567
8568 *ptr_offset = offset;
8569 return DECL_INITIAL (array);
8570 }
8571
8572 return 0;
8573 }
8574 \f
8575 /* Generate code to calculate EXP using a store-flag instruction
8576 and return an rtx for the result. EXP is either a comparison
8577 or a TRUTH_NOT_EXPR whose operand is a comparison.
8578
8579 If TARGET is nonzero, store the result there if convenient.
8580
8581 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8582 cheap.
8583
8584 Return zero if there is no suitable set-flag instruction
8585 available on this machine.
8586
8587 Once expand_expr has been called on the arguments of the comparison,
8588 we are committed to doing the store flag, since it is not safe to
8589 re-evaluate the expression. We emit the store-flag insn by calling
8590 emit_store_flag, but only expand the arguments if we have a reason
8591 to believe that emit_store_flag will be successful. If we think that
8592 it will, but it isn't, we have to simulate the store-flag with a
8593 set/jump/set sequence. */
8594
8595 static rtx
8596 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8597 {
8598 enum rtx_code code;
8599 tree arg0, arg1, type;
8600 tree tem;
8601 enum machine_mode operand_mode;
8602 int invert = 0;
8603 int unsignedp;
8604 rtx op0, op1;
8605 enum insn_code icode;
8606 rtx subtarget = target;
8607 rtx result, label;
8608
8609 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8610 result at the end. We can't simply invert the test since it would
8611 have already been inverted if it were valid. This case occurs for
8612 some floating-point comparisons. */
8613
8614 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8615 invert = 1, exp = TREE_OPERAND (exp, 0);
8616
8617 arg0 = TREE_OPERAND (exp, 0);
8618 arg1 = TREE_OPERAND (exp, 1);
8619
8620 /* Don't crash if the comparison was erroneous. */
8621 if (arg0 == error_mark_node || arg1 == error_mark_node)
8622 return const0_rtx;
8623
8624 type = TREE_TYPE (arg0);
8625 operand_mode = TYPE_MODE (type);
8626 unsignedp = TYPE_UNSIGNED (type);
8627
8628 /* We won't bother with BLKmode store-flag operations because it would mean
8629 passing a lot of information to emit_store_flag. */
8630 if (operand_mode == BLKmode)
8631 return 0;
8632
8633 /* We won't bother with store-flag operations involving function pointers
8634 when function pointers must be canonicalized before comparisons. */
8635 #ifdef HAVE_canonicalize_funcptr_for_compare
8636 if (HAVE_canonicalize_funcptr_for_compare
8637 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8638 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8639 == FUNCTION_TYPE))
8640 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8641 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8642 == FUNCTION_TYPE))))
8643 return 0;
8644 #endif
8645
8646 STRIP_NOPS (arg0);
8647 STRIP_NOPS (arg1);
8648
8649 /* Get the rtx comparison code to use. We know that EXP is a comparison
8650 operation of some type. Some comparisons against 1 and -1 can be
8651 converted to comparisons with zero. Do so here so that the tests
8652 below will be aware that we have a comparison with zero. These
8653 tests will not catch constants in the first operand, but constants
8654 are rarely passed as the first operand. */
8655
8656 switch (TREE_CODE (exp))
8657 {
8658 case EQ_EXPR:
8659 code = EQ;
8660 break;
8661 case NE_EXPR:
8662 code = NE;
8663 break;
8664 case LT_EXPR:
8665 if (integer_onep (arg1))
8666 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8667 else
8668 code = unsignedp ? LTU : LT;
8669 break;
8670 case LE_EXPR:
8671 if (! unsignedp && integer_all_onesp (arg1))
8672 arg1 = integer_zero_node, code = LT;
8673 else
8674 code = unsignedp ? LEU : LE;
8675 break;
8676 case GT_EXPR:
8677 if (! unsignedp && integer_all_onesp (arg1))
8678 arg1 = integer_zero_node, code = GE;
8679 else
8680 code = unsignedp ? GTU : GT;
8681 break;
8682 case GE_EXPR:
8683 if (integer_onep (arg1))
8684 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8685 else
8686 code = unsignedp ? GEU : GE;
8687 break;
8688
8689 case UNORDERED_EXPR:
8690 code = UNORDERED;
8691 break;
8692 case ORDERED_EXPR:
8693 code = ORDERED;
8694 break;
8695 case UNLT_EXPR:
8696 code = UNLT;
8697 break;
8698 case UNLE_EXPR:
8699 code = UNLE;
8700 break;
8701 case UNGT_EXPR:
8702 code = UNGT;
8703 break;
8704 case UNGE_EXPR:
8705 code = UNGE;
8706 break;
8707 case UNEQ_EXPR:
8708 code = UNEQ;
8709 break;
8710 case LTGT_EXPR:
8711 code = LTGT;
8712 break;
8713
8714 default:
8715 gcc_unreachable ();
8716 }
8717
8718 /* Put a constant second. */
8719 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8720 {
8721 tem = arg0; arg0 = arg1; arg1 = tem;
8722 code = swap_condition (code);
8723 }
8724
8725 /* If this is an equality or inequality test of a single bit, we can
8726 do this by shifting the bit being tested to the low-order bit and
8727 masking the result with the constant 1. If the condition was EQ,
8728 we xor it with 1. This does not require an scc insn and is faster
8729 than an scc insn even if we have it.
8730
8731 The code to make this transformation was moved into fold_single_bit_test,
8732 so we just call into the folder and expand its result. */
8733
8734 if ((code == NE || code == EQ)
8735 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8736 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8737 {
8738 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
8739 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
8740 arg0, arg1, type),
8741 target, VOIDmode, EXPAND_NORMAL);
8742 }
8743
8744 /* Now see if we are likely to be able to do this. Return if not. */
8745 if (! can_compare_p (code, operand_mode, ccp_store_flag))
8746 return 0;
8747
8748 icode = setcc_gen_code[(int) code];
8749 if (icode == CODE_FOR_nothing
8750 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
8751 {
8752 /* We can only do this if it is one of the special cases that
8753 can be handled without an scc insn. */
8754 if ((code == LT && integer_zerop (arg1))
8755 || (! only_cheap && code == GE && integer_zerop (arg1)))
8756 ;
8757 else if (! only_cheap && (code == NE || code == EQ)
8758 && TREE_CODE (type) != REAL_TYPE
8759 && ((abs_optab->handlers[(int) operand_mode].insn_code
8760 != CODE_FOR_nothing)
8761 || (ffs_optab->handlers[(int) operand_mode].insn_code
8762 != CODE_FOR_nothing)))
8763 ;
8764 else
8765 return 0;
8766 }
8767
8768 if (! get_subtarget (target)
8769 || GET_MODE (subtarget) != operand_mode)
8770 subtarget = 0;
8771
8772 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
8773
8774 if (target == 0)
8775 target = gen_reg_rtx (mode);
8776
8777 result = emit_store_flag (target, code, op0, op1,
8778 operand_mode, unsignedp, 1);
8779
8780 if (result)
8781 {
8782 if (invert)
8783 result = expand_binop (mode, xor_optab, result, const1_rtx,
8784 result, 0, OPTAB_LIB_WIDEN);
8785 return result;
8786 }
8787
8788 /* If this failed, we have to do this with set/compare/jump/set code. */
8789 if (!REG_P (target)
8790 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8791 target = gen_reg_rtx (GET_MODE (target));
8792
8793 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8794 result = compare_from_rtx (op0, op1, code, unsignedp,
8795 operand_mode, NULL_RTX);
8796 if (GET_CODE (result) == CONST_INT)
8797 return (((result == const0_rtx && ! invert)
8798 || (result != const0_rtx && invert))
8799 ? const0_rtx : const1_rtx);
8800
8801 /* The code of RESULT may not match CODE if compare_from_rtx
8802 decided to swap its operands and reverse the original code.
8803
8804 We know that compare_from_rtx returns either a CONST_INT or
8805 a new comparison code, so it is safe to just extract the
8806 code from RESULT. */
8807 code = GET_CODE (result);
8808
8809 label = gen_label_rtx ();
8810 gcc_assert (bcc_gen_fctn[(int) code]);
8811
8812 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8813 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8814 emit_label (label);
8815
8816 return target;
8817 }
8818 \f
8819
8820 /* Stubs in case we haven't got a casesi insn. */
8821 #ifndef HAVE_casesi
8822 # define HAVE_casesi 0
8823 # define gen_casesi(a, b, c, d, e) (0)
8824 # define CODE_FOR_casesi CODE_FOR_nothing
8825 #endif
8826
8827 /* If the machine does not have a case insn that compares the bounds,
8828 this means extra overhead for dispatch tables, which raises the
8829 threshold for using them. */
8830 #ifndef CASE_VALUES_THRESHOLD
8831 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
8832 #endif /* CASE_VALUES_THRESHOLD */
8833
8834 unsigned int
8835 case_values_threshold (void)
8836 {
8837 return CASE_VALUES_THRESHOLD;
8838 }
8839
8840 /* Attempt to generate a casesi instruction. Returns 1 if successful,
8841 0 otherwise (i.e. if there is no casesi instruction). */
8842 int
8843 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
8844 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
8845 {
8846 enum machine_mode index_mode = SImode;
8847 int index_bits = GET_MODE_BITSIZE (index_mode);
8848 rtx op1, op2, index;
8849 enum machine_mode op_mode;
8850
8851 if (! HAVE_casesi)
8852 return 0;
8853
8854 /* Convert the index to SImode. */
8855 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
8856 {
8857 enum machine_mode omode = TYPE_MODE (index_type);
8858 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
8859
8860 /* We must handle the endpoints in the original mode. */
8861 index_expr = build2 (MINUS_EXPR, index_type,
8862 index_expr, minval);
8863 minval = integer_zero_node;
8864 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8865 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
8866 omode, 1, default_label);
8867 /* Now we can safely truncate. */
8868 index = convert_to_mode (index_mode, index, 0);
8869 }
8870 else
8871 {
8872 if (TYPE_MODE (index_type) != index_mode)
8873 {
8874 index_expr = convert (lang_hooks.types.type_for_size
8875 (index_bits, 0), index_expr);
8876 index_type = TREE_TYPE (index_expr);
8877 }
8878
8879 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8880 }
8881
8882 do_pending_stack_adjust ();
8883
8884 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
8885 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
8886 (index, op_mode))
8887 index = copy_to_mode_reg (op_mode, index);
8888
8889 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
8890
8891 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
8892 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
8893 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
8894 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
8895 (op1, op_mode))
8896 op1 = copy_to_mode_reg (op_mode, op1);
8897
8898 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
8899
8900 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
8901 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
8902 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
8903 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
8904 (op2, op_mode))
8905 op2 = copy_to_mode_reg (op_mode, op2);
8906
8907 emit_jump_insn (gen_casesi (index, op1, op2,
8908 table_label, default_label));
8909 return 1;
8910 }
8911
8912 /* Attempt to generate a tablejump instruction; same concept. */
8913 #ifndef HAVE_tablejump
8914 #define HAVE_tablejump 0
8915 #define gen_tablejump(x, y) (0)
8916 #endif
8917
8918 /* Subroutine of the next function.
8919
8920 INDEX is the value being switched on, with the lowest value
8921 in the table already subtracted.
8922 MODE is its expected mode (needed if INDEX is constant).
8923 RANGE is the length of the jump table.
8924 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8925
8926 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8927 index value is out of range. */
8928
8929 static void
8930 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
8931 rtx default_label)
8932 {
8933 rtx temp, vector;
8934
8935 if (INTVAL (range) > cfun->max_jumptable_ents)
8936 cfun->max_jumptable_ents = INTVAL (range);
8937
8938 /* Do an unsigned comparison (in the proper mode) between the index
8939 expression and the value which represents the length of the range.
8940 Since we just finished subtracting the lower bound of the range
8941 from the index expression, this comparison allows us to simultaneously
8942 check that the original index expression value is both greater than
8943 or equal to the minimum value of the range and less than or equal to
8944 the maximum value of the range. */
8945
8946 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
8947 default_label);
8948
8949 /* If index is in range, it must fit in Pmode.
8950 Convert to Pmode so we can index with it. */
8951 if (mode != Pmode)
8952 index = convert_to_mode (Pmode, index, 1);
8953
8954 /* Don't let a MEM slip through, because then INDEX that comes
8955 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
8956 and break_out_memory_refs will go to work on it and mess it up. */
8957 #ifdef PIC_CASE_VECTOR_ADDRESS
8958 if (flag_pic && !REG_P (index))
8959 index = copy_to_mode_reg (Pmode, index);
8960 #endif
8961
8962 /* If flag_force_addr were to affect this address
8963 it could interfere with the tricky assumptions made
8964 about addresses that contain label-refs,
8965 which may be valid only very near the tablejump itself. */
8966 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8967 GET_MODE_SIZE, because this indicates how large insns are. The other
8968 uses should all be Pmode, because they are addresses. This code
8969 could fail if addresses and insns are not the same size. */
8970 index = gen_rtx_PLUS (Pmode,
8971 gen_rtx_MULT (Pmode, index,
8972 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
8973 gen_rtx_LABEL_REF (Pmode, table_label));
8974 #ifdef PIC_CASE_VECTOR_ADDRESS
8975 if (flag_pic)
8976 index = PIC_CASE_VECTOR_ADDRESS (index);
8977 else
8978 #endif
8979 index = memory_address_noforce (CASE_VECTOR_MODE, index);
8980 temp = gen_reg_rtx (CASE_VECTOR_MODE);
8981 vector = gen_const_mem (CASE_VECTOR_MODE, index);
8982 convert_move (temp, vector, 0);
8983
8984 emit_jump_insn (gen_tablejump (temp, table_label));
8985
8986 /* If we are generating PIC code or if the table is PC-relative, the
8987 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8988 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
8989 emit_barrier ();
8990 }
8991
8992 int
8993 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
8994 rtx table_label, rtx default_label)
8995 {
8996 rtx index;
8997
8998 if (! HAVE_tablejump)
8999 return 0;
9000
9001 index_expr = fold_build2 (MINUS_EXPR, index_type,
9002 convert (index_type, index_expr),
9003 convert (index_type, minval));
9004 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9005 do_pending_stack_adjust ();
9006
9007 do_tablejump (index, TYPE_MODE (index_type),
9008 convert_modes (TYPE_MODE (index_type),
9009 TYPE_MODE (TREE_TYPE (range)),
9010 expand_expr (range, NULL_RTX,
9011 VOIDmode, 0),
9012 TYPE_UNSIGNED (TREE_TYPE (range))),
9013 table_label, default_label);
9014 return 1;
9015 }
9016
9017 /* Nonzero if the mode is a valid vector mode for this architecture.
9018 This returns nonzero even if there is no hardware support for the
9019 vector mode, but we can emulate with narrower modes. */
9020
9021 int
9022 vector_mode_valid_p (enum machine_mode mode)
9023 {
9024 enum mode_class class = GET_MODE_CLASS (mode);
9025 enum machine_mode innermode;
9026
9027 /* Doh! What's going on? */
9028 if (class != MODE_VECTOR_INT
9029 && class != MODE_VECTOR_FLOAT)
9030 return 0;
9031
9032 /* Hardware support. Woo hoo! */
9033 if (targetm.vector_mode_supported_p (mode))
9034 return 1;
9035
9036 innermode = GET_MODE_INNER (mode);
9037
9038 /* We should probably return 1 if requesting V4DI and we have no DI,
9039 but we have V2DI, but this is probably very unlikely. */
9040
9041 /* If we have support for the inner mode, we can safely emulate it.
9042 We may not have V2DI, but me can emulate with a pair of DIs. */
9043 return targetm.scalar_mode_supported_p (innermode);
9044 }
9045
9046 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9047 static rtx
9048 const_vector_from_tree (tree exp)
9049 {
9050 rtvec v;
9051 int units, i;
9052 tree link, elt;
9053 enum machine_mode inner, mode;
9054
9055 mode = TYPE_MODE (TREE_TYPE (exp));
9056
9057 if (initializer_zerop (exp))
9058 return CONST0_RTX (mode);
9059
9060 units = GET_MODE_NUNITS (mode);
9061 inner = GET_MODE_INNER (mode);
9062
9063 v = rtvec_alloc (units);
9064
9065 link = TREE_VECTOR_CST_ELTS (exp);
9066 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9067 {
9068 elt = TREE_VALUE (link);
9069
9070 if (TREE_CODE (elt) == REAL_CST)
9071 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9072 inner);
9073 else
9074 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9075 TREE_INT_CST_HIGH (elt),
9076 inner);
9077 }
9078
9079 /* Initialize remaining elements to 0. */
9080 for (; i < units; ++i)
9081 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9082
9083 return gen_rtx_CONST_VECTOR (mode, v);
9084 }
9085 #include "gt-expr.h"