tree.h: Include vec.h
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
55
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
58
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
61
62 #ifdef PUSH_ROUNDING
63
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #endif
68 #endif
69
70 #endif
71
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
75 #else
76 #define STACK_PUSH_CODE PRE_INC
77 #endif
78 #endif
79
80
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87 int cse_not_expected;
88
89 /* This structure is used by move_by_pieces to describe the move to
90 be performed. */
91 struct move_by_pieces
92 {
93 rtx to;
94 rtx to_addr;
95 int autinc_to;
96 int explicit_inc_to;
97 rtx from;
98 rtx from_addr;
99 int autinc_from;
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
103 int reverse;
104 };
105
106 /* This structure is used by store_by_pieces to describe the clear to
107 be performed. */
108
109 struct store_by_pieces
110 {
111 rtx to;
112 rtx to_addr;
113 int autinc_to;
114 int explicit_inc_to;
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
118 void *constfundata;
119 int reverse;
120 };
121
122 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
123 unsigned int);
124 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
125 struct move_by_pieces *);
126 static bool block_move_libcall_safe_for_call_parm (void);
127 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
128 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
129 static tree emit_block_move_libcall_fn (int);
130 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
131 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
132 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
133 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
134 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
135 struct store_by_pieces *);
136 static bool clear_storage_via_clrmem (rtx, rtx, unsigned);
137 static rtx clear_storage_via_libcall (rtx, rtx);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, int);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
146 tree, enum machine_mode, int, tree, int);
147 static rtx var_rtx (tree);
148
149 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
150 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
151
152 static int is_aligning_offset (tree, tree);
153 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
154 enum expand_modifier);
155 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
156 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
157 #ifdef PUSH_ROUNDING
158 static void emit_single_push_insn (enum machine_mode, rtx, tree);
159 #endif
160 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
161 static rtx const_vector_from_tree (tree);
162
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
166
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
169
170 /* Record for each mode whether we can float-extend from memory. */
171
172 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
173
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
179 #endif
180
181 /* This macro is used to determine whether clear_by_pieces should be
182 called to clear storage. */
183 #ifndef CLEAR_BY_PIECES_P
184 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
185 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
186 #endif
187
188 /* This macro is used to determine whether store_by_pieces should be
189 called to "memset" storage with byte values other than zero, or
190 to "memcpy" storage when the source is a constant string. */
191 #ifndef STORE_BY_PIECES_P
192 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
193 #endif
194
195 /* This array records the insn_code of insns to perform block moves. */
196 enum insn_code movmem_optab[NUM_MACHINE_MODES];
197
198 /* This array records the insn_code of insns to perform block clears. */
199 enum insn_code clrmem_optab[NUM_MACHINE_MODES];
200
201 /* These arrays record the insn_code of two different kinds of insns
202 to perform block compares. */
203 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
204 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
205
206 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
207
208 #ifndef SLOW_UNALIGNED_ACCESS
209 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
210 #endif
211 \f
212 /* This is run once per compilation to set up which modes can be used
213 directly in memory and to initialize the block move optab. */
214
215 void
216 init_expr_once (void)
217 {
218 rtx insn, pat;
219 enum machine_mode mode;
220 int num_clobbers;
221 rtx mem, mem1;
222 rtx reg;
223
224 /* Try indexing by frame ptr and try by stack ptr.
225 It is known that on the Convex the stack ptr isn't a valid index.
226 With luck, one or the other is valid on any machine. */
227 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
228 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
229
230 /* A scratch register we can modify in-place below to avoid
231 useless RTL allocations. */
232 reg = gen_rtx_REG (VOIDmode, -1);
233
234 insn = rtx_alloc (INSN);
235 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
236 PATTERN (insn) = pat;
237
238 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
239 mode = (enum machine_mode) ((int) mode + 1))
240 {
241 int regno;
242
243 direct_load[(int) mode] = direct_store[(int) mode] = 0;
244 PUT_MODE (mem, mode);
245 PUT_MODE (mem1, mode);
246 PUT_MODE (reg, mode);
247
248 /* See if there is some register that can be used in this mode and
249 directly loaded or stored from memory. */
250
251 if (mode != VOIDmode && mode != BLKmode)
252 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
253 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
254 regno++)
255 {
256 if (! HARD_REGNO_MODE_OK (regno, mode))
257 continue;
258
259 REGNO (reg) = regno;
260
261 SET_SRC (pat) = mem;
262 SET_DEST (pat) = reg;
263 if (recog (pat, insn, &num_clobbers) >= 0)
264 direct_load[(int) mode] = 1;
265
266 SET_SRC (pat) = mem1;
267 SET_DEST (pat) = reg;
268 if (recog (pat, insn, &num_clobbers) >= 0)
269 direct_load[(int) mode] = 1;
270
271 SET_SRC (pat) = reg;
272 SET_DEST (pat) = mem;
273 if (recog (pat, insn, &num_clobbers) >= 0)
274 direct_store[(int) mode] = 1;
275
276 SET_SRC (pat) = reg;
277 SET_DEST (pat) = mem1;
278 if (recog (pat, insn, &num_clobbers) >= 0)
279 direct_store[(int) mode] = 1;
280 }
281 }
282
283 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
284
285 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
286 mode = GET_MODE_WIDER_MODE (mode))
287 {
288 enum machine_mode srcmode;
289 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
290 srcmode = GET_MODE_WIDER_MODE (srcmode))
291 {
292 enum insn_code ic;
293
294 ic = can_extend_p (mode, srcmode, 0);
295 if (ic == CODE_FOR_nothing)
296 continue;
297
298 PUT_MODE (mem, srcmode);
299
300 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
301 float_extend_from_mem[mode][srcmode] = true;
302 }
303 }
304 }
305
306 /* This is run at the start of compiling a function. */
307
308 void
309 init_expr (void)
310 {
311 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
312 }
313 \f
314 /* Copy data from FROM to TO, where the machine modes are not the same.
315 Both modes may be integer, or both may be floating.
316 UNSIGNEDP should be nonzero if FROM is an unsigned type.
317 This causes zero-extension instead of sign-extension. */
318
319 void
320 convert_move (rtx to, rtx from, int unsignedp)
321 {
322 enum machine_mode to_mode = GET_MODE (to);
323 enum machine_mode from_mode = GET_MODE (from);
324 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
325 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
326 enum insn_code code;
327 rtx libcall;
328
329 /* rtx code for making an equivalent value. */
330 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
331 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
332
333
334 if (to_real != from_real)
335 abort ();
336
337 /* If the source and destination are already the same, then there's
338 nothing to do. */
339 if (to == from)
340 return;
341
342 /* If FROM is a SUBREG that indicates that we have already done at least
343 the required extension, strip it. We don't handle such SUBREGs as
344 TO here. */
345
346 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
347 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
348 >= GET_MODE_SIZE (to_mode))
349 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
350 from = gen_lowpart (to_mode, from), from_mode = to_mode;
351
352 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
353 abort ();
354
355 if (to_mode == from_mode
356 || (from_mode == VOIDmode && CONSTANT_P (from)))
357 {
358 emit_move_insn (to, from);
359 return;
360 }
361
362 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
363 {
364 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
365 abort ();
366
367 if (VECTOR_MODE_P (to_mode))
368 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
369 else
370 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
371
372 emit_move_insn (to, from);
373 return;
374 }
375
376 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
377 {
378 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
379 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
380 return;
381 }
382
383 if (to_real)
384 {
385 rtx value, insns;
386 convert_optab tab;
387
388 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
389 tab = sext_optab;
390 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
391 tab = trunc_optab;
392 else
393 abort ();
394
395 /* Try converting directly if the insn is supported. */
396
397 code = tab->handlers[to_mode][from_mode].insn_code;
398 if (code != CODE_FOR_nothing)
399 {
400 emit_unop_insn (code, to, from,
401 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
402 return;
403 }
404
405 /* Otherwise use a libcall. */
406 libcall = tab->handlers[to_mode][from_mode].libfunc;
407
408 if (!libcall)
409 /* This conversion is not implemented yet. */
410 abort ();
411
412 start_sequence ();
413 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
414 1, from, from_mode);
415 insns = get_insns ();
416 end_sequence ();
417 emit_libcall_block (insns, to, value,
418 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
419 from)
420 : gen_rtx_FLOAT_EXTEND (to_mode, from));
421 return;
422 }
423
424 /* Handle pointer conversion. */ /* SPEE 900220. */
425 /* Targets are expected to provide conversion insns between PxImode and
426 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
427 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
428 {
429 enum machine_mode full_mode
430 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
431
432 if (trunc_optab->handlers[to_mode][full_mode].insn_code
433 == CODE_FOR_nothing)
434 abort ();
435
436 if (full_mode != from_mode)
437 from = convert_to_mode (full_mode, from, unsignedp);
438 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
439 to, from, UNKNOWN);
440 return;
441 }
442 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
443 {
444 enum machine_mode full_mode
445 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
446
447 if (sext_optab->handlers[full_mode][from_mode].insn_code
448 == CODE_FOR_nothing)
449 abort ();
450
451 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
452 to, from, UNKNOWN);
453 if (to_mode == full_mode)
454 return;
455
456 /* else proceed to integer conversions below. */
457 from_mode = full_mode;
458 }
459
460 /* Now both modes are integers. */
461
462 /* Handle expanding beyond a word. */
463 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
464 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
465 {
466 rtx insns;
467 rtx lowpart;
468 rtx fill_value;
469 rtx lowfrom;
470 int i;
471 enum machine_mode lowpart_mode;
472 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
473
474 /* Try converting directly if the insn is supported. */
475 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
476 != CODE_FOR_nothing)
477 {
478 /* If FROM is a SUBREG, put it into a register. Do this
479 so that we always generate the same set of insns for
480 better cse'ing; if an intermediate assignment occurred,
481 we won't be doing the operation directly on the SUBREG. */
482 if (optimize > 0 && GET_CODE (from) == SUBREG)
483 from = force_reg (from_mode, from);
484 emit_unop_insn (code, to, from, equiv_code);
485 return;
486 }
487 /* Next, try converting via full word. */
488 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
489 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
490 != CODE_FOR_nothing))
491 {
492 if (REG_P (to))
493 {
494 if (reg_overlap_mentioned_p (to, from))
495 from = force_reg (from_mode, from);
496 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
497 }
498 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
499 emit_unop_insn (code, to,
500 gen_lowpart (word_mode, to), equiv_code);
501 return;
502 }
503
504 /* No special multiword conversion insn; do it by hand. */
505 start_sequence ();
506
507 /* Since we will turn this into a no conflict block, we must ensure
508 that the source does not overlap the target. */
509
510 if (reg_overlap_mentioned_p (to, from))
511 from = force_reg (from_mode, from);
512
513 /* Get a copy of FROM widened to a word, if necessary. */
514 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
515 lowpart_mode = word_mode;
516 else
517 lowpart_mode = from_mode;
518
519 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
520
521 lowpart = gen_lowpart (lowpart_mode, to);
522 emit_move_insn (lowpart, lowfrom);
523
524 /* Compute the value to put in each remaining word. */
525 if (unsignedp)
526 fill_value = const0_rtx;
527 else
528 {
529 #ifdef HAVE_slt
530 if (HAVE_slt
531 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
532 && STORE_FLAG_VALUE == -1)
533 {
534 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
535 lowpart_mode, 0);
536 fill_value = gen_reg_rtx (word_mode);
537 emit_insn (gen_slt (fill_value));
538 }
539 else
540 #endif
541 {
542 fill_value
543 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
544 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
545 NULL_RTX, 0);
546 fill_value = convert_to_mode (word_mode, fill_value, 1);
547 }
548 }
549
550 /* Fill the remaining words. */
551 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
552 {
553 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
554 rtx subword = operand_subword (to, index, 1, to_mode);
555
556 if (subword == 0)
557 abort ();
558
559 if (fill_value != subword)
560 emit_move_insn (subword, fill_value);
561 }
562
563 insns = get_insns ();
564 end_sequence ();
565
566 emit_no_conflict_block (insns, to, from, NULL_RTX,
567 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
568 return;
569 }
570
571 /* Truncating multi-word to a word or less. */
572 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
573 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
574 {
575 if (!((MEM_P (from)
576 && ! MEM_VOLATILE_P (from)
577 && direct_load[(int) to_mode]
578 && ! mode_dependent_address_p (XEXP (from, 0)))
579 || REG_P (from)
580 || GET_CODE (from) == SUBREG))
581 from = force_reg (from_mode, from);
582 convert_move (to, gen_lowpart (word_mode, from), 0);
583 return;
584 }
585
586 /* Now follow all the conversions between integers
587 no more than a word long. */
588
589 /* For truncation, usually we can just refer to FROM in a narrower mode. */
590 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
591 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
592 GET_MODE_BITSIZE (from_mode)))
593 {
594 if (!((MEM_P (from)
595 && ! MEM_VOLATILE_P (from)
596 && direct_load[(int) to_mode]
597 && ! mode_dependent_address_p (XEXP (from, 0)))
598 || REG_P (from)
599 || GET_CODE (from) == SUBREG))
600 from = force_reg (from_mode, from);
601 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
602 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
603 from = copy_to_reg (from);
604 emit_move_insn (to, gen_lowpart (to_mode, from));
605 return;
606 }
607
608 /* Handle extension. */
609 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
610 {
611 /* Convert directly if that works. */
612 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
613 != CODE_FOR_nothing)
614 {
615 if (flag_force_mem)
616 from = force_not_mem (from);
617
618 emit_unop_insn (code, to, from, equiv_code);
619 return;
620 }
621 else
622 {
623 enum machine_mode intermediate;
624 rtx tmp;
625 tree shift_amount;
626
627 /* Search for a mode to convert via. */
628 for (intermediate = from_mode; intermediate != VOIDmode;
629 intermediate = GET_MODE_WIDER_MODE (intermediate))
630 if (((can_extend_p (to_mode, intermediate, unsignedp)
631 != CODE_FOR_nothing)
632 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
633 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
634 GET_MODE_BITSIZE (intermediate))))
635 && (can_extend_p (intermediate, from_mode, unsignedp)
636 != CODE_FOR_nothing))
637 {
638 convert_move (to, convert_to_mode (intermediate, from,
639 unsignedp), unsignedp);
640 return;
641 }
642
643 /* No suitable intermediate mode.
644 Generate what we need with shifts. */
645 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
646 - GET_MODE_BITSIZE (from_mode), 0);
647 from = gen_lowpart (to_mode, force_reg (from_mode, from));
648 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
649 to, unsignedp);
650 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
651 to, unsignedp);
652 if (tmp != to)
653 emit_move_insn (to, tmp);
654 return;
655 }
656 }
657
658 /* Support special truncate insns for certain modes. */
659 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
660 {
661 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
662 to, from, UNKNOWN);
663 return;
664 }
665
666 /* Handle truncation of volatile memrefs, and so on;
667 the things that couldn't be truncated directly,
668 and for which there was no special instruction.
669
670 ??? Code above formerly short-circuited this, for most integer
671 mode pairs, with a force_reg in from_mode followed by a recursive
672 call to this routine. Appears always to have been wrong. */
673 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
674 {
675 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
676 emit_move_insn (to, temp);
677 return;
678 }
679
680 /* Mode combination is not recognized. */
681 abort ();
682 }
683
684 /* Return an rtx for a value that would result
685 from converting X to mode MODE.
686 Both X and MODE may be floating, or both integer.
687 UNSIGNEDP is nonzero if X is an unsigned value.
688 This can be done by referring to a part of X in place
689 or by copying to a new temporary with conversion. */
690
691 rtx
692 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
693 {
694 return convert_modes (mode, VOIDmode, x, unsignedp);
695 }
696
697 /* Return an rtx for a value that would result
698 from converting X from mode OLDMODE to mode MODE.
699 Both modes may be floating, or both integer.
700 UNSIGNEDP is nonzero if X is an unsigned value.
701
702 This can be done by referring to a part of X in place
703 or by copying to a new temporary with conversion.
704
705 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
706
707 rtx
708 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
709 {
710 rtx temp;
711
712 /* If FROM is a SUBREG that indicates that we have already done at least
713 the required extension, strip it. */
714
715 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
716 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
717 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
718 x = gen_lowpart (mode, x);
719
720 if (GET_MODE (x) != VOIDmode)
721 oldmode = GET_MODE (x);
722
723 if (mode == oldmode)
724 return x;
725
726 /* There is one case that we must handle specially: If we are converting
727 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
728 we are to interpret the constant as unsigned, gen_lowpart will do
729 the wrong if the constant appears negative. What we want to do is
730 make the high-order word of the constant zero, not all ones. */
731
732 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
733 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
734 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
735 {
736 HOST_WIDE_INT val = INTVAL (x);
737
738 if (oldmode != VOIDmode
739 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
740 {
741 int width = GET_MODE_BITSIZE (oldmode);
742
743 /* We need to zero extend VAL. */
744 val &= ((HOST_WIDE_INT) 1 << width) - 1;
745 }
746
747 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
748 }
749
750 /* We can do this with a gen_lowpart if both desired and current modes
751 are integer, and this is either a constant integer, a register, or a
752 non-volatile MEM. Except for the constant case where MODE is no
753 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
754
755 if ((GET_CODE (x) == CONST_INT
756 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
757 || (GET_MODE_CLASS (mode) == MODE_INT
758 && GET_MODE_CLASS (oldmode) == MODE_INT
759 && (GET_CODE (x) == CONST_DOUBLE
760 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
761 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
762 && direct_load[(int) mode])
763 || (REG_P (x)
764 && (! HARD_REGISTER_P (x)
765 || HARD_REGNO_MODE_OK (REGNO (x), mode))
766 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
767 GET_MODE_BITSIZE (GET_MODE (x)))))))))
768 {
769 /* ?? If we don't know OLDMODE, we have to assume here that
770 X does not need sign- or zero-extension. This may not be
771 the case, but it's the best we can do. */
772 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
773 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
774 {
775 HOST_WIDE_INT val = INTVAL (x);
776 int width = GET_MODE_BITSIZE (oldmode);
777
778 /* We must sign or zero-extend in this case. Start by
779 zero-extending, then sign extend if we need to. */
780 val &= ((HOST_WIDE_INT) 1 << width) - 1;
781 if (! unsignedp
782 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
783 val |= (HOST_WIDE_INT) (-1) << width;
784
785 return gen_int_mode (val, mode);
786 }
787
788 return gen_lowpart (mode, x);
789 }
790
791 /* Converting from integer constant into mode is always equivalent to an
792 subreg operation. */
793 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
794 {
795 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
796 abort ();
797 return simplify_gen_subreg (mode, x, oldmode, 0);
798 }
799
800 temp = gen_reg_rtx (mode);
801 convert_move (temp, x, unsignedp);
802 return temp;
803 }
804 \f
805 /* STORE_MAX_PIECES is the number of bytes at a time that we can
806 store efficiently. Due to internal GCC limitations, this is
807 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
808 for an immediate constant. */
809
810 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
811
812 /* Determine whether the LEN bytes can be moved by using several move
813 instructions. Return nonzero if a call to move_by_pieces should
814 succeed. */
815
816 int
817 can_move_by_pieces (unsigned HOST_WIDE_INT len,
818 unsigned int align ATTRIBUTE_UNUSED)
819 {
820 return MOVE_BY_PIECES_P (len, align);
821 }
822
823 /* Generate several move instructions to copy LEN bytes from block FROM to
824 block TO. (These are MEM rtx's with BLKmode).
825
826 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
827 used to push FROM to the stack.
828
829 ALIGN is maximum stack alignment we can assume.
830
831 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
832 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
833 stpcpy. */
834
835 rtx
836 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
837 unsigned int align, int endp)
838 {
839 struct move_by_pieces data;
840 rtx to_addr, from_addr = XEXP (from, 0);
841 unsigned int max_size = MOVE_MAX_PIECES + 1;
842 enum machine_mode mode = VOIDmode, tmode;
843 enum insn_code icode;
844
845 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
846
847 data.offset = 0;
848 data.from_addr = from_addr;
849 if (to)
850 {
851 to_addr = XEXP (to, 0);
852 data.to = to;
853 data.autinc_to
854 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
855 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
856 data.reverse
857 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
858 }
859 else
860 {
861 to_addr = NULL_RTX;
862 data.to = NULL_RTX;
863 data.autinc_to = 1;
864 #ifdef STACK_GROWS_DOWNWARD
865 data.reverse = 1;
866 #else
867 data.reverse = 0;
868 #endif
869 }
870 data.to_addr = to_addr;
871 data.from = from;
872 data.autinc_from
873 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
874 || GET_CODE (from_addr) == POST_INC
875 || GET_CODE (from_addr) == POST_DEC);
876
877 data.explicit_inc_from = 0;
878 data.explicit_inc_to = 0;
879 if (data.reverse) data.offset = len;
880 data.len = len;
881
882 /* If copying requires more than two move insns,
883 copy addresses to registers (to make displacements shorter)
884 and use post-increment if available. */
885 if (!(data.autinc_from && data.autinc_to)
886 && move_by_pieces_ninsns (len, align) > 2)
887 {
888 /* Find the mode of the largest move... */
889 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
890 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
891 if (GET_MODE_SIZE (tmode) < max_size)
892 mode = tmode;
893
894 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
895 {
896 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
897 data.autinc_from = 1;
898 data.explicit_inc_from = -1;
899 }
900 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
901 {
902 data.from_addr = copy_addr_to_reg (from_addr);
903 data.autinc_from = 1;
904 data.explicit_inc_from = 1;
905 }
906 if (!data.autinc_from && CONSTANT_P (from_addr))
907 data.from_addr = copy_addr_to_reg (from_addr);
908 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
909 {
910 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
911 data.autinc_to = 1;
912 data.explicit_inc_to = -1;
913 }
914 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
915 {
916 data.to_addr = copy_addr_to_reg (to_addr);
917 data.autinc_to = 1;
918 data.explicit_inc_to = 1;
919 }
920 if (!data.autinc_to && CONSTANT_P (to_addr))
921 data.to_addr = copy_addr_to_reg (to_addr);
922 }
923
924 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
925 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
926 align = MOVE_MAX * BITS_PER_UNIT;
927
928 /* First move what we can in the largest integer mode, then go to
929 successively smaller modes. */
930
931 while (max_size > 1)
932 {
933 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
934 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
935 if (GET_MODE_SIZE (tmode) < max_size)
936 mode = tmode;
937
938 if (mode == VOIDmode)
939 break;
940
941 icode = mov_optab->handlers[(int) mode].insn_code;
942 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
943 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
944
945 max_size = GET_MODE_SIZE (mode);
946 }
947
948 /* The code above should have handled everything. */
949 if (data.len > 0)
950 abort ();
951
952 if (endp)
953 {
954 rtx to1;
955
956 if (data.reverse)
957 abort ();
958 if (data.autinc_to)
959 {
960 if (endp == 2)
961 {
962 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
963 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
964 else
965 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
966 -1));
967 }
968 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
969 data.offset);
970 }
971 else
972 {
973 if (endp == 2)
974 --data.offset;
975 to1 = adjust_address (data.to, QImode, data.offset);
976 }
977 return to1;
978 }
979 else
980 return data.to;
981 }
982
983 /* Return number of insns required to move L bytes by pieces.
984 ALIGN (in bits) is maximum alignment we can assume. */
985
986 static unsigned HOST_WIDE_INT
987 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
988 {
989 unsigned HOST_WIDE_INT n_insns = 0;
990 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
991
992 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
993 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
994 align = MOVE_MAX * BITS_PER_UNIT;
995
996 while (max_size > 1)
997 {
998 enum machine_mode mode = VOIDmode, tmode;
999 enum insn_code icode;
1000
1001 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1002 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1003 if (GET_MODE_SIZE (tmode) < max_size)
1004 mode = tmode;
1005
1006 if (mode == VOIDmode)
1007 break;
1008
1009 icode = mov_optab->handlers[(int) mode].insn_code;
1010 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1011 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1012
1013 max_size = GET_MODE_SIZE (mode);
1014 }
1015
1016 if (l)
1017 abort ();
1018 return n_insns;
1019 }
1020
1021 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1022 with move instructions for mode MODE. GENFUN is the gen_... function
1023 to make a move insn for that mode. DATA has all the other info. */
1024
1025 static void
1026 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1027 struct move_by_pieces *data)
1028 {
1029 unsigned int size = GET_MODE_SIZE (mode);
1030 rtx to1 = NULL_RTX, from1;
1031
1032 while (data->len >= size)
1033 {
1034 if (data->reverse)
1035 data->offset -= size;
1036
1037 if (data->to)
1038 {
1039 if (data->autinc_to)
1040 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1041 data->offset);
1042 else
1043 to1 = adjust_address (data->to, mode, data->offset);
1044 }
1045
1046 if (data->autinc_from)
1047 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1048 data->offset);
1049 else
1050 from1 = adjust_address (data->from, mode, data->offset);
1051
1052 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1053 emit_insn (gen_add2_insn (data->to_addr,
1054 GEN_INT (-(HOST_WIDE_INT)size)));
1055 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1056 emit_insn (gen_add2_insn (data->from_addr,
1057 GEN_INT (-(HOST_WIDE_INT)size)));
1058
1059 if (data->to)
1060 emit_insn ((*genfun) (to1, from1));
1061 else
1062 {
1063 #ifdef PUSH_ROUNDING
1064 emit_single_push_insn (mode, from1, NULL);
1065 #else
1066 abort ();
1067 #endif
1068 }
1069
1070 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1071 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1072 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1073 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1074
1075 if (! data->reverse)
1076 data->offset += size;
1077
1078 data->len -= size;
1079 }
1080 }
1081 \f
1082 /* Emit code to move a block Y to a block X. This may be done with
1083 string-move instructions, with multiple scalar move instructions,
1084 or with a library call.
1085
1086 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1087 SIZE is an rtx that says how long they are.
1088 ALIGN is the maximum alignment we can assume they have.
1089 METHOD describes what kind of copy this is, and what mechanisms may be used.
1090
1091 Return the address of the new block, if memcpy is called and returns it,
1092 0 otherwise. */
1093
1094 rtx
1095 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1096 {
1097 bool may_use_call;
1098 rtx retval = 0;
1099 unsigned int align;
1100
1101 switch (method)
1102 {
1103 case BLOCK_OP_NORMAL:
1104 may_use_call = true;
1105 break;
1106
1107 case BLOCK_OP_CALL_PARM:
1108 may_use_call = block_move_libcall_safe_for_call_parm ();
1109
1110 /* Make inhibit_defer_pop nonzero around the library call
1111 to force it to pop the arguments right away. */
1112 NO_DEFER_POP;
1113 break;
1114
1115 case BLOCK_OP_NO_LIBCALL:
1116 may_use_call = false;
1117 break;
1118
1119 default:
1120 abort ();
1121 }
1122
1123 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1124
1125 if (!MEM_P (x))
1126 abort ();
1127 if (!MEM_P (y))
1128 abort ();
1129 if (size == 0)
1130 abort ();
1131
1132 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1133 block copy is more efficient for other large modes, e.g. DCmode. */
1134 x = adjust_address (x, BLKmode, 0);
1135 y = adjust_address (y, BLKmode, 0);
1136
1137 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1138 can be incorrect is coming from __builtin_memcpy. */
1139 if (GET_CODE (size) == CONST_INT)
1140 {
1141 if (INTVAL (size) == 0)
1142 return 0;
1143
1144 x = shallow_copy_rtx (x);
1145 y = shallow_copy_rtx (y);
1146 set_mem_size (x, size);
1147 set_mem_size (y, size);
1148 }
1149
1150 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1151 move_by_pieces (x, y, INTVAL (size), align, 0);
1152 else if (emit_block_move_via_movmem (x, y, size, align))
1153 ;
1154 else if (may_use_call)
1155 retval = emit_block_move_via_libcall (x, y, size);
1156 else
1157 emit_block_move_via_loop (x, y, size, align);
1158
1159 if (method == BLOCK_OP_CALL_PARM)
1160 OK_DEFER_POP;
1161
1162 return retval;
1163 }
1164
1165 /* A subroutine of emit_block_move. Returns true if calling the
1166 block move libcall will not clobber any parameters which may have
1167 already been placed on the stack. */
1168
1169 static bool
1170 block_move_libcall_safe_for_call_parm (void)
1171 {
1172 /* If arguments are pushed on the stack, then they're safe. */
1173 if (PUSH_ARGS)
1174 return true;
1175
1176 /* If registers go on the stack anyway, any argument is sure to clobber
1177 an outgoing argument. */
1178 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1179 {
1180 tree fn = emit_block_move_libcall_fn (false);
1181 (void) fn;
1182 if (REG_PARM_STACK_SPACE (fn) != 0)
1183 return false;
1184 }
1185 #endif
1186
1187 /* If any argument goes in memory, then it might clobber an outgoing
1188 argument. */
1189 {
1190 CUMULATIVE_ARGS args_so_far;
1191 tree fn, arg;
1192
1193 fn = emit_block_move_libcall_fn (false);
1194 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1195
1196 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1197 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1198 {
1199 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1200 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1201 if (!tmp || !REG_P (tmp))
1202 return false;
1203 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1204 NULL_TREE, 1))
1205 return false;
1206 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1207 }
1208 }
1209 return true;
1210 }
1211
1212 /* A subroutine of emit_block_move. Expand a movmem pattern;
1213 return true if successful. */
1214
1215 static bool
1216 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1217 {
1218 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1219 int save_volatile_ok = volatile_ok;
1220 enum machine_mode mode;
1221
1222 /* Since this is a move insn, we don't care about volatility. */
1223 volatile_ok = 1;
1224
1225 /* Try the most limited insn first, because there's no point
1226 including more than one in the machine description unless
1227 the more limited one has some advantage. */
1228
1229 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1230 mode = GET_MODE_WIDER_MODE (mode))
1231 {
1232 enum insn_code code = movmem_optab[(int) mode];
1233 insn_operand_predicate_fn pred;
1234
1235 if (code != CODE_FOR_nothing
1236 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1237 here because if SIZE is less than the mode mask, as it is
1238 returned by the macro, it will definitely be less than the
1239 actual mode mask. */
1240 && ((GET_CODE (size) == CONST_INT
1241 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1242 <= (GET_MODE_MASK (mode) >> 1)))
1243 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1244 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1245 || (*pred) (x, BLKmode))
1246 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1247 || (*pred) (y, BLKmode))
1248 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1249 || (*pred) (opalign, VOIDmode)))
1250 {
1251 rtx op2;
1252 rtx last = get_last_insn ();
1253 rtx pat;
1254
1255 op2 = convert_to_mode (mode, size, 1);
1256 pred = insn_data[(int) code].operand[2].predicate;
1257 if (pred != 0 && ! (*pred) (op2, mode))
1258 op2 = copy_to_mode_reg (mode, op2);
1259
1260 /* ??? When called via emit_block_move_for_call, it'd be
1261 nice if there were some way to inform the backend, so
1262 that it doesn't fail the expansion because it thinks
1263 emitting the libcall would be more efficient. */
1264
1265 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1266 if (pat)
1267 {
1268 emit_insn (pat);
1269 volatile_ok = save_volatile_ok;
1270 return true;
1271 }
1272 else
1273 delete_insns_since (last);
1274 }
1275 }
1276
1277 volatile_ok = save_volatile_ok;
1278 return false;
1279 }
1280
1281 /* A subroutine of emit_block_move. Expand a call to memcpy.
1282 Return the return value from memcpy, 0 otherwise. */
1283
1284 static rtx
1285 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1286 {
1287 rtx dst_addr, src_addr;
1288 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1289 enum machine_mode size_mode;
1290 rtx retval;
1291
1292 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1293 pseudos. We can then place those new pseudos into a VAR_DECL and
1294 use them later. */
1295
1296 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1297 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1298
1299 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1300 src_addr = convert_memory_address (ptr_mode, src_addr);
1301
1302 dst_tree = make_tree (ptr_type_node, dst_addr);
1303 src_tree = make_tree (ptr_type_node, src_addr);
1304
1305 size_mode = TYPE_MODE (sizetype);
1306
1307 size = convert_to_mode (size_mode, size, 1);
1308 size = copy_to_mode_reg (size_mode, size);
1309
1310 /* It is incorrect to use the libcall calling conventions to call
1311 memcpy in this context. This could be a user call to memcpy and
1312 the user may wish to examine the return value from memcpy. For
1313 targets where libcalls and normal calls have different conventions
1314 for returning pointers, we could end up generating incorrect code. */
1315
1316 size_tree = make_tree (sizetype, size);
1317
1318 fn = emit_block_move_libcall_fn (true);
1319 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1320 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1321 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1322
1323 /* Now we have to build up the CALL_EXPR itself. */
1324 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1325 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1326 call_expr, arg_list, NULL_TREE);
1327
1328 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1329
1330 /* If we are initializing a readonly value, show the above call clobbered
1331 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1332 the delay slot scheduler might overlook conflicts and take nasty
1333 decisions. */
1334 if (RTX_UNCHANGING_P (dst))
1335 add_function_usage_to
1336 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1337 gen_rtx_CLOBBER (VOIDmode, dst),
1338 NULL_RTX));
1339
1340 return retval;
1341 }
1342
1343 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1344 for the function we use for block copies. The first time FOR_CALL
1345 is true, we call assemble_external. */
1346
1347 static GTY(()) tree block_move_fn;
1348
1349 void
1350 init_block_move_fn (const char *asmspec)
1351 {
1352 if (!block_move_fn)
1353 {
1354 tree args, fn;
1355
1356 fn = get_identifier ("memcpy");
1357 args = build_function_type_list (ptr_type_node, ptr_type_node,
1358 const_ptr_type_node, sizetype,
1359 NULL_TREE);
1360
1361 fn = build_decl (FUNCTION_DECL, fn, args);
1362 DECL_EXTERNAL (fn) = 1;
1363 TREE_PUBLIC (fn) = 1;
1364 DECL_ARTIFICIAL (fn) = 1;
1365 TREE_NOTHROW (fn) = 1;
1366
1367 block_move_fn = fn;
1368 }
1369
1370 if (asmspec)
1371 {
1372 SET_DECL_RTL (block_move_fn, NULL_RTX);
1373 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1374 }
1375 }
1376
1377 static tree
1378 emit_block_move_libcall_fn (int for_call)
1379 {
1380 static bool emitted_extern;
1381
1382 if (!block_move_fn)
1383 init_block_move_fn (NULL);
1384
1385 if (for_call && !emitted_extern)
1386 {
1387 emitted_extern = true;
1388 make_decl_rtl (block_move_fn, NULL);
1389 assemble_external (block_move_fn);
1390 }
1391
1392 return block_move_fn;
1393 }
1394
1395 /* A subroutine of emit_block_move. Copy the data via an explicit
1396 loop. This is used only when libcalls are forbidden. */
1397 /* ??? It'd be nice to copy in hunks larger than QImode. */
1398
1399 static void
1400 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1401 unsigned int align ATTRIBUTE_UNUSED)
1402 {
1403 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1404 enum machine_mode iter_mode;
1405
1406 iter_mode = GET_MODE (size);
1407 if (iter_mode == VOIDmode)
1408 iter_mode = word_mode;
1409
1410 top_label = gen_label_rtx ();
1411 cmp_label = gen_label_rtx ();
1412 iter = gen_reg_rtx (iter_mode);
1413
1414 emit_move_insn (iter, const0_rtx);
1415
1416 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1417 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1418 do_pending_stack_adjust ();
1419
1420 emit_jump (cmp_label);
1421 emit_label (top_label);
1422
1423 tmp = convert_modes (Pmode, iter_mode, iter, true);
1424 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1425 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1426 x = change_address (x, QImode, x_addr);
1427 y = change_address (y, QImode, y_addr);
1428
1429 emit_move_insn (x, y);
1430
1431 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1432 true, OPTAB_LIB_WIDEN);
1433 if (tmp != iter)
1434 emit_move_insn (iter, tmp);
1435
1436 emit_label (cmp_label);
1437
1438 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1439 true, top_label);
1440 }
1441 \f
1442 /* Copy all or part of a value X into registers starting at REGNO.
1443 The number of registers to be filled is NREGS. */
1444
1445 void
1446 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1447 {
1448 int i;
1449 #ifdef HAVE_load_multiple
1450 rtx pat;
1451 rtx last;
1452 #endif
1453
1454 if (nregs == 0)
1455 return;
1456
1457 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1458 x = validize_mem (force_const_mem (mode, x));
1459
1460 /* See if the machine can do this with a load multiple insn. */
1461 #ifdef HAVE_load_multiple
1462 if (HAVE_load_multiple)
1463 {
1464 last = get_last_insn ();
1465 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1466 GEN_INT (nregs));
1467 if (pat)
1468 {
1469 emit_insn (pat);
1470 return;
1471 }
1472 else
1473 delete_insns_since (last);
1474 }
1475 #endif
1476
1477 for (i = 0; i < nregs; i++)
1478 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1479 operand_subword_force (x, i, mode));
1480 }
1481
1482 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1483 The number of registers to be filled is NREGS. */
1484
1485 void
1486 move_block_from_reg (int regno, rtx x, int nregs)
1487 {
1488 int i;
1489
1490 if (nregs == 0)
1491 return;
1492
1493 /* See if the machine can do this with a store multiple insn. */
1494 #ifdef HAVE_store_multiple
1495 if (HAVE_store_multiple)
1496 {
1497 rtx last = get_last_insn ();
1498 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1499 GEN_INT (nregs));
1500 if (pat)
1501 {
1502 emit_insn (pat);
1503 return;
1504 }
1505 else
1506 delete_insns_since (last);
1507 }
1508 #endif
1509
1510 for (i = 0; i < nregs; i++)
1511 {
1512 rtx tem = operand_subword (x, i, 1, BLKmode);
1513
1514 if (tem == 0)
1515 abort ();
1516
1517 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1518 }
1519 }
1520
1521 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1522 ORIG, where ORIG is a non-consecutive group of registers represented by
1523 a PARALLEL. The clone is identical to the original except in that the
1524 original set of registers is replaced by a new set of pseudo registers.
1525 The new set has the same modes as the original set. */
1526
1527 rtx
1528 gen_group_rtx (rtx orig)
1529 {
1530 int i, length;
1531 rtx *tmps;
1532
1533 if (GET_CODE (orig) != PARALLEL)
1534 abort ();
1535
1536 length = XVECLEN (orig, 0);
1537 tmps = alloca (sizeof (rtx) * length);
1538
1539 /* Skip a NULL entry in first slot. */
1540 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1541
1542 if (i)
1543 tmps[0] = 0;
1544
1545 for (; i < length; i++)
1546 {
1547 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1548 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1549
1550 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1551 }
1552
1553 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1554 }
1555
1556 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1557 where DST is non-consecutive registers represented by a PARALLEL.
1558 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1559 if not known. */
1560
1561 void
1562 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1563 {
1564 rtx *tmps, src;
1565 int start, i;
1566
1567 if (GET_CODE (dst) != PARALLEL)
1568 abort ();
1569
1570 /* Check for a NULL entry, used to indicate that the parameter goes
1571 both on the stack and in registers. */
1572 if (XEXP (XVECEXP (dst, 0, 0), 0))
1573 start = 0;
1574 else
1575 start = 1;
1576
1577 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1578
1579 /* Process the pieces. */
1580 for (i = start; i < XVECLEN (dst, 0); i++)
1581 {
1582 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1583 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1584 unsigned int bytelen = GET_MODE_SIZE (mode);
1585 int shift = 0;
1586
1587 /* Handle trailing fragments that run over the size of the struct. */
1588 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1589 {
1590 /* Arrange to shift the fragment to where it belongs.
1591 extract_bit_field loads to the lsb of the reg. */
1592 if (
1593 #ifdef BLOCK_REG_PADDING
1594 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1595 == (BYTES_BIG_ENDIAN ? upward : downward)
1596 #else
1597 BYTES_BIG_ENDIAN
1598 #endif
1599 )
1600 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1601 bytelen = ssize - bytepos;
1602 if (bytelen <= 0)
1603 abort ();
1604 }
1605
1606 /* If we won't be loading directly from memory, protect the real source
1607 from strange tricks we might play; but make sure that the source can
1608 be loaded directly into the destination. */
1609 src = orig_src;
1610 if (!MEM_P (orig_src)
1611 && (!CONSTANT_P (orig_src)
1612 || (GET_MODE (orig_src) != mode
1613 && GET_MODE (orig_src) != VOIDmode)))
1614 {
1615 if (GET_MODE (orig_src) == VOIDmode)
1616 src = gen_reg_rtx (mode);
1617 else
1618 src = gen_reg_rtx (GET_MODE (orig_src));
1619
1620 emit_move_insn (src, orig_src);
1621 }
1622
1623 /* Optimize the access just a bit. */
1624 if (MEM_P (src)
1625 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1626 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1627 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1628 && bytelen == GET_MODE_SIZE (mode))
1629 {
1630 tmps[i] = gen_reg_rtx (mode);
1631 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1632 }
1633 else if (GET_CODE (src) == CONCAT)
1634 {
1635 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1636 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1637
1638 if ((bytepos == 0 && bytelen == slen0)
1639 || (bytepos != 0 && bytepos + bytelen <= slen))
1640 {
1641 /* The following assumes that the concatenated objects all
1642 have the same size. In this case, a simple calculation
1643 can be used to determine the object and the bit field
1644 to be extracted. */
1645 tmps[i] = XEXP (src, bytepos / slen0);
1646 if (! CONSTANT_P (tmps[i])
1647 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1648 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1649 (bytepos % slen0) * BITS_PER_UNIT,
1650 1, NULL_RTX, mode, mode);
1651 }
1652 else if (bytepos == 0)
1653 {
1654 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1655 emit_move_insn (mem, src);
1656 tmps[i] = adjust_address (mem, mode, 0);
1657 }
1658 else
1659 abort ();
1660 }
1661 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1662 SIMD register, which is currently broken. While we get GCC
1663 to emit proper RTL for these cases, let's dump to memory. */
1664 else if (VECTOR_MODE_P (GET_MODE (dst))
1665 && REG_P (src))
1666 {
1667 int slen = GET_MODE_SIZE (GET_MODE (src));
1668 rtx mem;
1669
1670 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1671 emit_move_insn (mem, src);
1672 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1673 }
1674 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1675 && XVECLEN (dst, 0) > 1)
1676 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1677 else if (CONSTANT_P (src)
1678 || (REG_P (src) && GET_MODE (src) == mode))
1679 tmps[i] = src;
1680 else
1681 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1682 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1683 mode, mode);
1684
1685 if (shift)
1686 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1687 build_int_2 (shift, 0), tmps[i], 0);
1688 }
1689
1690 /* Copy the extracted pieces into the proper (probable) hard regs. */
1691 for (i = start; i < XVECLEN (dst, 0); i++)
1692 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1693 }
1694
1695 /* Emit code to move a block SRC to block DST, where SRC and DST are
1696 non-consecutive groups of registers, each represented by a PARALLEL. */
1697
1698 void
1699 emit_group_move (rtx dst, rtx src)
1700 {
1701 int i;
1702
1703 if (GET_CODE (src) != PARALLEL
1704 || GET_CODE (dst) != PARALLEL
1705 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1706 abort ();
1707
1708 /* Skip first entry if NULL. */
1709 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1710 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1711 XEXP (XVECEXP (src, 0, i), 0));
1712 }
1713
1714 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1715 where SRC is non-consecutive registers represented by a PARALLEL.
1716 SSIZE represents the total size of block ORIG_DST, or -1 if not
1717 known. */
1718
1719 void
1720 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1721 {
1722 rtx *tmps, dst;
1723 int start, i;
1724
1725 if (GET_CODE (src) != PARALLEL)
1726 abort ();
1727
1728 /* Check for a NULL entry, used to indicate that the parameter goes
1729 both on the stack and in registers. */
1730 if (XEXP (XVECEXP (src, 0, 0), 0))
1731 start = 0;
1732 else
1733 start = 1;
1734
1735 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1736
1737 /* Copy the (probable) hard regs into pseudos. */
1738 for (i = start; i < XVECLEN (src, 0); i++)
1739 {
1740 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1741 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1742 emit_move_insn (tmps[i], reg);
1743 }
1744
1745 /* If we won't be storing directly into memory, protect the real destination
1746 from strange tricks we might play. */
1747 dst = orig_dst;
1748 if (GET_CODE (dst) == PARALLEL)
1749 {
1750 rtx temp;
1751
1752 /* We can get a PARALLEL dst if there is a conditional expression in
1753 a return statement. In that case, the dst and src are the same,
1754 so no action is necessary. */
1755 if (rtx_equal_p (dst, src))
1756 return;
1757
1758 /* It is unclear if we can ever reach here, but we may as well handle
1759 it. Allocate a temporary, and split this into a store/load to/from
1760 the temporary. */
1761
1762 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1763 emit_group_store (temp, src, type, ssize);
1764 emit_group_load (dst, temp, type, ssize);
1765 return;
1766 }
1767 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1768 {
1769 dst = gen_reg_rtx (GET_MODE (orig_dst));
1770 /* Make life a bit easier for combine. */
1771 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
1772 }
1773
1774 /* Process the pieces. */
1775 for (i = start; i < XVECLEN (src, 0); i++)
1776 {
1777 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1778 enum machine_mode mode = GET_MODE (tmps[i]);
1779 unsigned int bytelen = GET_MODE_SIZE (mode);
1780 rtx dest = dst;
1781
1782 /* Handle trailing fragments that run over the size of the struct. */
1783 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1784 {
1785 /* store_bit_field always takes its value from the lsb.
1786 Move the fragment to the lsb if it's not already there. */
1787 if (
1788 #ifdef BLOCK_REG_PADDING
1789 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1790 == (BYTES_BIG_ENDIAN ? upward : downward)
1791 #else
1792 BYTES_BIG_ENDIAN
1793 #endif
1794 )
1795 {
1796 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1797 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1798 build_int_2 (shift, 0), tmps[i], 0);
1799 }
1800 bytelen = ssize - bytepos;
1801 }
1802
1803 if (GET_CODE (dst) == CONCAT)
1804 {
1805 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1806 dest = XEXP (dst, 0);
1807 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1808 {
1809 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1810 dest = XEXP (dst, 1);
1811 }
1812 else if (bytepos == 0 && XVECLEN (src, 0))
1813 {
1814 dest = assign_stack_temp (GET_MODE (dest),
1815 GET_MODE_SIZE (GET_MODE (dest)), 0);
1816 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1817 tmps[i]);
1818 dst = dest;
1819 break;
1820 }
1821 else
1822 abort ();
1823 }
1824
1825 /* Optimize the access just a bit. */
1826 if (MEM_P (dest)
1827 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1828 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
1829 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1830 && bytelen == GET_MODE_SIZE (mode))
1831 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1832 else
1833 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1834 mode, tmps[i]);
1835 }
1836
1837 /* Copy from the pseudo into the (probable) hard reg. */
1838 if (orig_dst != dst)
1839 emit_move_insn (orig_dst, dst);
1840 }
1841
1842 /* Generate code to copy a BLKmode object of TYPE out of a
1843 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1844 is null, a stack temporary is created. TGTBLK is returned.
1845
1846 The purpose of this routine is to handle functions that return
1847 BLKmode structures in registers. Some machines (the PA for example)
1848 want to return all small structures in registers regardless of the
1849 structure's alignment. */
1850
1851 rtx
1852 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
1853 {
1854 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
1855 rtx src = NULL, dst = NULL;
1856 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
1857 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
1858
1859 if (tgtblk == 0)
1860 {
1861 tgtblk = assign_temp (build_qualified_type (type,
1862 (TYPE_QUALS (type)
1863 | TYPE_QUAL_CONST)),
1864 0, 1, 1);
1865 preserve_temp_slots (tgtblk);
1866 }
1867
1868 /* This code assumes srcreg is at least a full word. If it isn't, copy it
1869 into a new pseudo which is a full word. */
1870
1871 if (GET_MODE (srcreg) != BLKmode
1872 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
1873 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
1874
1875 /* If the structure doesn't take up a whole number of words, see whether
1876 SRCREG is padded on the left or on the right. If it's on the left,
1877 set PADDING_CORRECTION to the number of bits to skip.
1878
1879 In most ABIs, the structure will be returned at the least end of
1880 the register, which translates to right padding on little-endian
1881 targets and left padding on big-endian targets. The opposite
1882 holds if the structure is returned at the most significant
1883 end of the register. */
1884 if (bytes % UNITS_PER_WORD != 0
1885 && (targetm.calls.return_in_msb (type)
1886 ? !BYTES_BIG_ENDIAN
1887 : BYTES_BIG_ENDIAN))
1888 padding_correction
1889 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
1890
1891 /* Copy the structure BITSIZE bites at a time.
1892
1893 We could probably emit more efficient code for machines which do not use
1894 strict alignment, but it doesn't seem worth the effort at the current
1895 time. */
1896 for (bitpos = 0, xbitpos = padding_correction;
1897 bitpos < bytes * BITS_PER_UNIT;
1898 bitpos += bitsize, xbitpos += bitsize)
1899 {
1900 /* We need a new source operand each time xbitpos is on a
1901 word boundary and when xbitpos == padding_correction
1902 (the first time through). */
1903 if (xbitpos % BITS_PER_WORD == 0
1904 || xbitpos == padding_correction)
1905 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
1906 GET_MODE (srcreg));
1907
1908 /* We need a new destination operand each time bitpos is on
1909 a word boundary. */
1910 if (bitpos % BITS_PER_WORD == 0)
1911 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
1912
1913 /* Use xbitpos for the source extraction (right justified) and
1914 xbitpos for the destination store (left justified). */
1915 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
1916 extract_bit_field (src, bitsize,
1917 xbitpos % BITS_PER_WORD, 1,
1918 NULL_RTX, word_mode, word_mode));
1919 }
1920
1921 return tgtblk;
1922 }
1923
1924 /* Add a USE expression for REG to the (possibly empty) list pointed
1925 to by CALL_FUSAGE. REG must denote a hard register. */
1926
1927 void
1928 use_reg (rtx *call_fusage, rtx reg)
1929 {
1930 if (!REG_P (reg)
1931 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1932 abort ();
1933
1934 *call_fusage
1935 = gen_rtx_EXPR_LIST (VOIDmode,
1936 gen_rtx_USE (VOIDmode, reg), *call_fusage);
1937 }
1938
1939 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1940 starting at REGNO. All of these registers must be hard registers. */
1941
1942 void
1943 use_regs (rtx *call_fusage, int regno, int nregs)
1944 {
1945 int i;
1946
1947 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1948 abort ();
1949
1950 for (i = 0; i < nregs; i++)
1951 use_reg (call_fusage, regno_reg_rtx[regno + i]);
1952 }
1953
1954 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1955 PARALLEL REGS. This is for calls that pass values in multiple
1956 non-contiguous locations. The Irix 6 ABI has examples of this. */
1957
1958 void
1959 use_group_regs (rtx *call_fusage, rtx regs)
1960 {
1961 int i;
1962
1963 for (i = 0; i < XVECLEN (regs, 0); i++)
1964 {
1965 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
1966
1967 /* A NULL entry means the parameter goes both on the stack and in
1968 registers. This can also be a MEM for targets that pass values
1969 partially on the stack and partially in registers. */
1970 if (reg != 0 && REG_P (reg))
1971 use_reg (call_fusage, reg);
1972 }
1973 }
1974 \f
1975
1976 /* Determine whether the LEN bytes generated by CONSTFUN can be
1977 stored to memory using several move instructions. CONSTFUNDATA is
1978 a pointer which will be passed as argument in every CONSTFUN call.
1979 ALIGN is maximum alignment we can assume. Return nonzero if a
1980 call to store_by_pieces should succeed. */
1981
1982 int
1983 can_store_by_pieces (unsigned HOST_WIDE_INT len,
1984 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
1985 void *constfundata, unsigned int align)
1986 {
1987 unsigned HOST_WIDE_INT max_size, l;
1988 HOST_WIDE_INT offset = 0;
1989 enum machine_mode mode, tmode;
1990 enum insn_code icode;
1991 int reverse;
1992 rtx cst;
1993
1994 if (len == 0)
1995 return 1;
1996
1997 if (! STORE_BY_PIECES_P (len, align))
1998 return 0;
1999
2000 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2001 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2002 align = MOVE_MAX * BITS_PER_UNIT;
2003
2004 /* We would first store what we can in the largest integer mode, then go to
2005 successively smaller modes. */
2006
2007 for (reverse = 0;
2008 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2009 reverse++)
2010 {
2011 l = len;
2012 mode = VOIDmode;
2013 max_size = STORE_MAX_PIECES + 1;
2014 while (max_size > 1)
2015 {
2016 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2017 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2018 if (GET_MODE_SIZE (tmode) < max_size)
2019 mode = tmode;
2020
2021 if (mode == VOIDmode)
2022 break;
2023
2024 icode = mov_optab->handlers[(int) mode].insn_code;
2025 if (icode != CODE_FOR_nothing
2026 && align >= GET_MODE_ALIGNMENT (mode))
2027 {
2028 unsigned int size = GET_MODE_SIZE (mode);
2029
2030 while (l >= size)
2031 {
2032 if (reverse)
2033 offset -= size;
2034
2035 cst = (*constfun) (constfundata, offset, mode);
2036 if (!LEGITIMATE_CONSTANT_P (cst))
2037 return 0;
2038
2039 if (!reverse)
2040 offset += size;
2041
2042 l -= size;
2043 }
2044 }
2045
2046 max_size = GET_MODE_SIZE (mode);
2047 }
2048
2049 /* The code above should have handled everything. */
2050 if (l != 0)
2051 abort ();
2052 }
2053
2054 return 1;
2055 }
2056
2057 /* Generate several move instructions to store LEN bytes generated by
2058 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2059 pointer which will be passed as argument in every CONSTFUN call.
2060 ALIGN is maximum alignment we can assume.
2061 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2062 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2063 stpcpy. */
2064
2065 rtx
2066 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2067 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2068 void *constfundata, unsigned int align, int endp)
2069 {
2070 struct store_by_pieces data;
2071
2072 if (len == 0)
2073 {
2074 if (endp == 2)
2075 abort ();
2076 return to;
2077 }
2078
2079 if (! STORE_BY_PIECES_P (len, align))
2080 abort ();
2081 data.constfun = constfun;
2082 data.constfundata = constfundata;
2083 data.len = len;
2084 data.to = to;
2085 store_by_pieces_1 (&data, align);
2086 if (endp)
2087 {
2088 rtx to1;
2089
2090 if (data.reverse)
2091 abort ();
2092 if (data.autinc_to)
2093 {
2094 if (endp == 2)
2095 {
2096 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2097 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2098 else
2099 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2100 -1));
2101 }
2102 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2103 data.offset);
2104 }
2105 else
2106 {
2107 if (endp == 2)
2108 --data.offset;
2109 to1 = adjust_address (data.to, QImode, data.offset);
2110 }
2111 return to1;
2112 }
2113 else
2114 return data.to;
2115 }
2116
2117 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2118 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2119
2120 static void
2121 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2122 {
2123 struct store_by_pieces data;
2124
2125 if (len == 0)
2126 return;
2127
2128 data.constfun = clear_by_pieces_1;
2129 data.constfundata = NULL;
2130 data.len = len;
2131 data.to = to;
2132 store_by_pieces_1 (&data, align);
2133 }
2134
2135 /* Callback routine for clear_by_pieces.
2136 Return const0_rtx unconditionally. */
2137
2138 static rtx
2139 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2140 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2141 enum machine_mode mode ATTRIBUTE_UNUSED)
2142 {
2143 return const0_rtx;
2144 }
2145
2146 /* Subroutine of clear_by_pieces and store_by_pieces.
2147 Generate several move instructions to store LEN bytes of block TO. (A MEM
2148 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2149
2150 static void
2151 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2152 unsigned int align ATTRIBUTE_UNUSED)
2153 {
2154 rtx to_addr = XEXP (data->to, 0);
2155 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2156 enum machine_mode mode = VOIDmode, tmode;
2157 enum insn_code icode;
2158
2159 data->offset = 0;
2160 data->to_addr = to_addr;
2161 data->autinc_to
2162 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2163 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2164
2165 data->explicit_inc_to = 0;
2166 data->reverse
2167 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2168 if (data->reverse)
2169 data->offset = data->len;
2170
2171 /* If storing requires more than two move insns,
2172 copy addresses to registers (to make displacements shorter)
2173 and use post-increment if available. */
2174 if (!data->autinc_to
2175 && move_by_pieces_ninsns (data->len, align) > 2)
2176 {
2177 /* Determine the main mode we'll be using. */
2178 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2179 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2180 if (GET_MODE_SIZE (tmode) < max_size)
2181 mode = tmode;
2182
2183 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2184 {
2185 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2186 data->autinc_to = 1;
2187 data->explicit_inc_to = -1;
2188 }
2189
2190 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2191 && ! data->autinc_to)
2192 {
2193 data->to_addr = copy_addr_to_reg (to_addr);
2194 data->autinc_to = 1;
2195 data->explicit_inc_to = 1;
2196 }
2197
2198 if ( !data->autinc_to && CONSTANT_P (to_addr))
2199 data->to_addr = copy_addr_to_reg (to_addr);
2200 }
2201
2202 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2203 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2204 align = MOVE_MAX * BITS_PER_UNIT;
2205
2206 /* First store what we can in the largest integer mode, then go to
2207 successively smaller modes. */
2208
2209 while (max_size > 1)
2210 {
2211 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2212 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2213 if (GET_MODE_SIZE (tmode) < max_size)
2214 mode = tmode;
2215
2216 if (mode == VOIDmode)
2217 break;
2218
2219 icode = mov_optab->handlers[(int) mode].insn_code;
2220 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2221 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2222
2223 max_size = GET_MODE_SIZE (mode);
2224 }
2225
2226 /* The code above should have handled everything. */
2227 if (data->len != 0)
2228 abort ();
2229 }
2230
2231 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2232 with move instructions for mode MODE. GENFUN is the gen_... function
2233 to make a move insn for that mode. DATA has all the other info. */
2234
2235 static void
2236 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2237 struct store_by_pieces *data)
2238 {
2239 unsigned int size = GET_MODE_SIZE (mode);
2240 rtx to1, cst;
2241
2242 while (data->len >= size)
2243 {
2244 if (data->reverse)
2245 data->offset -= size;
2246
2247 if (data->autinc_to)
2248 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2249 data->offset);
2250 else
2251 to1 = adjust_address (data->to, mode, data->offset);
2252
2253 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2254 emit_insn (gen_add2_insn (data->to_addr,
2255 GEN_INT (-(HOST_WIDE_INT) size)));
2256
2257 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2258 emit_insn ((*genfun) (to1, cst));
2259
2260 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2261 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2262
2263 if (! data->reverse)
2264 data->offset += size;
2265
2266 data->len -= size;
2267 }
2268 }
2269 \f
2270 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2271 its length in bytes. */
2272
2273 rtx
2274 clear_storage (rtx object, rtx size)
2275 {
2276 rtx retval = 0;
2277 unsigned int align = (MEM_P (object) ? MEM_ALIGN (object)
2278 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2279
2280 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2281 just move a zero. Otherwise, do this a piece at a time. */
2282 if (GET_MODE (object) != BLKmode
2283 && GET_CODE (size) == CONST_INT
2284 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2285 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2286 else
2287 {
2288 if (size == const0_rtx)
2289 ;
2290 else if (GET_CODE (size) == CONST_INT
2291 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2292 clear_by_pieces (object, INTVAL (size), align);
2293 else if (clear_storage_via_clrmem (object, size, align))
2294 ;
2295 else
2296 retval = clear_storage_via_libcall (object, size);
2297 }
2298
2299 return retval;
2300 }
2301
2302 /* A subroutine of clear_storage. Expand a clrmem pattern;
2303 return true if successful. */
2304
2305 static bool
2306 clear_storage_via_clrmem (rtx object, rtx size, unsigned int align)
2307 {
2308 /* Try the most limited insn first, because there's no point
2309 including more than one in the machine description unless
2310 the more limited one has some advantage. */
2311
2312 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2313 enum machine_mode mode;
2314
2315 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2316 mode = GET_MODE_WIDER_MODE (mode))
2317 {
2318 enum insn_code code = clrmem_optab[(int) mode];
2319 insn_operand_predicate_fn pred;
2320
2321 if (code != CODE_FOR_nothing
2322 /* We don't need MODE to be narrower than
2323 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2324 the mode mask, as it is returned by the macro, it will
2325 definitely be less than the actual mode mask. */
2326 && ((GET_CODE (size) == CONST_INT
2327 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2328 <= (GET_MODE_MASK (mode) >> 1)))
2329 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2330 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2331 || (*pred) (object, BLKmode))
2332 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2333 || (*pred) (opalign, VOIDmode)))
2334 {
2335 rtx op1;
2336 rtx last = get_last_insn ();
2337 rtx pat;
2338
2339 op1 = convert_to_mode (mode, size, 1);
2340 pred = insn_data[(int) code].operand[1].predicate;
2341 if (pred != 0 && ! (*pred) (op1, mode))
2342 op1 = copy_to_mode_reg (mode, op1);
2343
2344 pat = GEN_FCN ((int) code) (object, op1, opalign);
2345 if (pat)
2346 {
2347 emit_insn (pat);
2348 return true;
2349 }
2350 else
2351 delete_insns_since (last);
2352 }
2353 }
2354
2355 return false;
2356 }
2357
2358 /* A subroutine of clear_storage. Expand a call to memset.
2359 Return the return value of memset, 0 otherwise. */
2360
2361 static rtx
2362 clear_storage_via_libcall (rtx object, rtx size)
2363 {
2364 tree call_expr, arg_list, fn, object_tree, size_tree;
2365 enum machine_mode size_mode;
2366 rtx retval;
2367
2368 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2369 place those into new pseudos into a VAR_DECL and use them later. */
2370
2371 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2372
2373 size_mode = TYPE_MODE (sizetype);
2374 size = convert_to_mode (size_mode, size, 1);
2375 size = copy_to_mode_reg (size_mode, size);
2376
2377 /* It is incorrect to use the libcall calling conventions to call
2378 memset in this context. This could be a user call to memset and
2379 the user may wish to examine the return value from memset. For
2380 targets where libcalls and normal calls have different conventions
2381 for returning pointers, we could end up generating incorrect code. */
2382
2383 object_tree = make_tree (ptr_type_node, object);
2384 size_tree = make_tree (sizetype, size);
2385
2386 fn = clear_storage_libcall_fn (true);
2387 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2388 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2389 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2390
2391 /* Now we have to build up the CALL_EXPR itself. */
2392 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2393 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2394 call_expr, arg_list, NULL_TREE);
2395
2396 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2397
2398 /* If we are initializing a readonly value, show the above call
2399 clobbered it. Otherwise, a load from it may erroneously be
2400 hoisted from a loop. */
2401 if (RTX_UNCHANGING_P (object))
2402 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2403
2404 return retval;
2405 }
2406
2407 /* A subroutine of clear_storage_via_libcall. Create the tree node
2408 for the function we use for block clears. The first time FOR_CALL
2409 is true, we call assemble_external. */
2410
2411 static GTY(()) tree block_clear_fn;
2412
2413 void
2414 init_block_clear_fn (const char *asmspec)
2415 {
2416 if (!block_clear_fn)
2417 {
2418 tree fn, args;
2419
2420 fn = get_identifier ("memset");
2421 args = build_function_type_list (ptr_type_node, ptr_type_node,
2422 integer_type_node, sizetype,
2423 NULL_TREE);
2424
2425 fn = build_decl (FUNCTION_DECL, fn, args);
2426 DECL_EXTERNAL (fn) = 1;
2427 TREE_PUBLIC (fn) = 1;
2428 DECL_ARTIFICIAL (fn) = 1;
2429 TREE_NOTHROW (fn) = 1;
2430
2431 block_clear_fn = fn;
2432 }
2433
2434 if (asmspec)
2435 {
2436 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2437 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2438 }
2439 }
2440
2441 static tree
2442 clear_storage_libcall_fn (int for_call)
2443 {
2444 static bool emitted_extern;
2445
2446 if (!block_clear_fn)
2447 init_block_clear_fn (NULL);
2448
2449 if (for_call && !emitted_extern)
2450 {
2451 emitted_extern = true;
2452 make_decl_rtl (block_clear_fn, NULL);
2453 assemble_external (block_clear_fn);
2454 }
2455
2456 return block_clear_fn;
2457 }
2458 \f
2459 /* Generate code to copy Y into X.
2460 Both Y and X must have the same mode, except that
2461 Y can be a constant with VOIDmode.
2462 This mode cannot be BLKmode; use emit_block_move for that.
2463
2464 Return the last instruction emitted. */
2465
2466 rtx
2467 emit_move_insn (rtx x, rtx y)
2468 {
2469 enum machine_mode mode = GET_MODE (x);
2470 rtx y_cst = NULL_RTX;
2471 rtx last_insn, set;
2472
2473 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2474 abort ();
2475
2476 if (CONSTANT_P (y))
2477 {
2478 if (optimize
2479 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2480 && (last_insn = compress_float_constant (x, y)))
2481 return last_insn;
2482
2483 y_cst = y;
2484
2485 if (!LEGITIMATE_CONSTANT_P (y))
2486 {
2487 y = force_const_mem (mode, y);
2488
2489 /* If the target's cannot_force_const_mem prevented the spill,
2490 assume that the target's move expanders will also take care
2491 of the non-legitimate constant. */
2492 if (!y)
2493 y = y_cst;
2494 }
2495 }
2496
2497 /* If X or Y are memory references, verify that their addresses are valid
2498 for the machine. */
2499 if (MEM_P (x)
2500 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2501 && ! push_operand (x, GET_MODE (x)))
2502 || (flag_force_addr
2503 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2504 x = validize_mem (x);
2505
2506 if (MEM_P (y)
2507 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2508 || (flag_force_addr
2509 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2510 y = validize_mem (y);
2511
2512 if (mode == BLKmode)
2513 abort ();
2514
2515 last_insn = emit_move_insn_1 (x, y);
2516
2517 if (y_cst && REG_P (x)
2518 && (set = single_set (last_insn)) != NULL_RTX
2519 && SET_DEST (set) == x
2520 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2521 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2522
2523 return last_insn;
2524 }
2525
2526 /* Low level part of emit_move_insn.
2527 Called just like emit_move_insn, but assumes X and Y
2528 are basically valid. */
2529
2530 rtx
2531 emit_move_insn_1 (rtx x, rtx y)
2532 {
2533 enum machine_mode mode = GET_MODE (x);
2534 enum machine_mode submode;
2535 enum mode_class class = GET_MODE_CLASS (mode);
2536
2537 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2538 abort ();
2539
2540 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2541 return
2542 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2543
2544 /* Expand complex moves by moving real part and imag part, if possible. */
2545 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2546 && BLKmode != (submode = GET_MODE_INNER (mode))
2547 && (mov_optab->handlers[(int) submode].insn_code
2548 != CODE_FOR_nothing))
2549 {
2550 /* Don't split destination if it is a stack push. */
2551 int stack = push_operand (x, GET_MODE (x));
2552
2553 #ifdef PUSH_ROUNDING
2554 /* In case we output to the stack, but the size is smaller than the
2555 machine can push exactly, we need to use move instructions. */
2556 if (stack
2557 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2558 != GET_MODE_SIZE (submode)))
2559 {
2560 rtx temp;
2561 HOST_WIDE_INT offset1, offset2;
2562
2563 /* Do not use anti_adjust_stack, since we don't want to update
2564 stack_pointer_delta. */
2565 temp = expand_binop (Pmode,
2566 #ifdef STACK_GROWS_DOWNWARD
2567 sub_optab,
2568 #else
2569 add_optab,
2570 #endif
2571 stack_pointer_rtx,
2572 GEN_INT
2573 (PUSH_ROUNDING
2574 (GET_MODE_SIZE (GET_MODE (x)))),
2575 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2576
2577 if (temp != stack_pointer_rtx)
2578 emit_move_insn (stack_pointer_rtx, temp);
2579
2580 #ifdef STACK_GROWS_DOWNWARD
2581 offset1 = 0;
2582 offset2 = GET_MODE_SIZE (submode);
2583 #else
2584 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2585 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2586 + GET_MODE_SIZE (submode));
2587 #endif
2588
2589 emit_move_insn (change_address (x, submode,
2590 gen_rtx_PLUS (Pmode,
2591 stack_pointer_rtx,
2592 GEN_INT (offset1))),
2593 gen_realpart (submode, y));
2594 emit_move_insn (change_address (x, submode,
2595 gen_rtx_PLUS (Pmode,
2596 stack_pointer_rtx,
2597 GEN_INT (offset2))),
2598 gen_imagpart (submode, y));
2599 }
2600 else
2601 #endif
2602 /* If this is a stack, push the highpart first, so it
2603 will be in the argument order.
2604
2605 In that case, change_address is used only to convert
2606 the mode, not to change the address. */
2607 if (stack)
2608 {
2609 /* Note that the real part always precedes the imag part in memory
2610 regardless of machine's endianness. */
2611 #ifdef STACK_GROWS_DOWNWARD
2612 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2613 gen_imagpart (submode, y));
2614 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2615 gen_realpart (submode, y));
2616 #else
2617 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2618 gen_realpart (submode, y));
2619 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2620 gen_imagpart (submode, y));
2621 #endif
2622 }
2623 else
2624 {
2625 rtx realpart_x, realpart_y;
2626 rtx imagpart_x, imagpart_y;
2627
2628 /* If this is a complex value with each part being smaller than a
2629 word, the usual calling sequence will likely pack the pieces into
2630 a single register. Unfortunately, SUBREG of hard registers only
2631 deals in terms of words, so we have a problem converting input
2632 arguments to the CONCAT of two registers that is used elsewhere
2633 for complex values. If this is before reload, we can copy it into
2634 memory and reload. FIXME, we should see about using extract and
2635 insert on integer registers, but complex short and complex char
2636 variables should be rarely used. */
2637 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2638 && (reload_in_progress | reload_completed) == 0)
2639 {
2640 int packed_dest_p
2641 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2642 int packed_src_p
2643 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2644
2645 if (packed_dest_p || packed_src_p)
2646 {
2647 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2648 ? MODE_FLOAT : MODE_INT);
2649
2650 enum machine_mode reg_mode
2651 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2652
2653 if (reg_mode != BLKmode)
2654 {
2655 rtx mem = assign_stack_temp (reg_mode,
2656 GET_MODE_SIZE (mode), 0);
2657 rtx cmem = adjust_address (mem, mode, 0);
2658
2659 if (packed_dest_p)
2660 {
2661 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2662
2663 emit_move_insn_1 (cmem, y);
2664 return emit_move_insn_1 (sreg, mem);
2665 }
2666 else
2667 {
2668 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2669
2670 emit_move_insn_1 (mem, sreg);
2671 return emit_move_insn_1 (x, cmem);
2672 }
2673 }
2674 }
2675 }
2676
2677 realpart_x = gen_realpart (submode, x);
2678 realpart_y = gen_realpart (submode, y);
2679 imagpart_x = gen_imagpart (submode, x);
2680 imagpart_y = gen_imagpart (submode, y);
2681
2682 /* Show the output dies here. This is necessary for SUBREGs
2683 of pseudos since we cannot track their lifetimes correctly;
2684 hard regs shouldn't appear here except as return values.
2685 We never want to emit such a clobber after reload. */
2686 if (x != y
2687 && ! (reload_in_progress || reload_completed)
2688 && (GET_CODE (realpart_x) == SUBREG
2689 || GET_CODE (imagpart_x) == SUBREG))
2690 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2691
2692 emit_move_insn (realpart_x, realpart_y);
2693 emit_move_insn (imagpart_x, imagpart_y);
2694 }
2695
2696 return get_last_insn ();
2697 }
2698
2699 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
2700 find a mode to do it in. If we have a movcc, use it. Otherwise,
2701 find the MODE_INT mode of the same width. */
2702 else if (GET_MODE_CLASS (mode) == MODE_CC
2703 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2704 {
2705 enum insn_code insn_code;
2706 enum machine_mode tmode = VOIDmode;
2707 rtx x1 = x, y1 = y;
2708
2709 if (mode != CCmode
2710 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
2711 tmode = CCmode;
2712 else
2713 for (tmode = QImode; tmode != VOIDmode;
2714 tmode = GET_MODE_WIDER_MODE (tmode))
2715 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
2716 break;
2717
2718 if (tmode == VOIDmode)
2719 abort ();
2720
2721 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
2722 may call change_address which is not appropriate if we were
2723 called when a reload was in progress. We don't have to worry
2724 about changing the address since the size in bytes is supposed to
2725 be the same. Copy the MEM to change the mode and move any
2726 substitutions from the old MEM to the new one. */
2727
2728 if (reload_in_progress)
2729 {
2730 x = gen_lowpart_common (tmode, x1);
2731 if (x == 0 && MEM_P (x1))
2732 {
2733 x = adjust_address_nv (x1, tmode, 0);
2734 copy_replacements (x1, x);
2735 }
2736
2737 y = gen_lowpart_common (tmode, y1);
2738 if (y == 0 && MEM_P (y1))
2739 {
2740 y = adjust_address_nv (y1, tmode, 0);
2741 copy_replacements (y1, y);
2742 }
2743 }
2744 else
2745 {
2746 x = gen_lowpart (tmode, x);
2747 y = gen_lowpart (tmode, y);
2748 }
2749
2750 insn_code = mov_optab->handlers[(int) tmode].insn_code;
2751 return emit_insn (GEN_FCN (insn_code) (x, y));
2752 }
2753
2754 /* Try using a move pattern for the corresponding integer mode. This is
2755 only safe when simplify_subreg can convert MODE constants into integer
2756 constants. At present, it can only do this reliably if the value
2757 fits within a HOST_WIDE_INT. */
2758 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2759 && (submode = int_mode_for_mode (mode)) != BLKmode
2760 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
2761 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
2762 (simplify_gen_subreg (submode, x, mode, 0),
2763 simplify_gen_subreg (submode, y, mode, 0)));
2764
2765 /* This will handle any multi-word or full-word mode that lacks a move_insn
2766 pattern. However, you will get better code if you define such patterns,
2767 even if they must turn into multiple assembler instructions. */
2768 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
2769 {
2770 rtx last_insn = 0;
2771 rtx seq, inner;
2772 int need_clobber;
2773 int i;
2774
2775 #ifdef PUSH_ROUNDING
2776
2777 /* If X is a push on the stack, do the push now and replace
2778 X with a reference to the stack pointer. */
2779 if (push_operand (x, GET_MODE (x)))
2780 {
2781 rtx temp;
2782 enum rtx_code code;
2783
2784 /* Do not use anti_adjust_stack, since we don't want to update
2785 stack_pointer_delta. */
2786 temp = expand_binop (Pmode,
2787 #ifdef STACK_GROWS_DOWNWARD
2788 sub_optab,
2789 #else
2790 add_optab,
2791 #endif
2792 stack_pointer_rtx,
2793 GEN_INT
2794 (PUSH_ROUNDING
2795 (GET_MODE_SIZE (GET_MODE (x)))),
2796 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2797
2798 if (temp != stack_pointer_rtx)
2799 emit_move_insn (stack_pointer_rtx, temp);
2800
2801 code = GET_CODE (XEXP (x, 0));
2802
2803 /* Just hope that small offsets off SP are OK. */
2804 if (code == POST_INC)
2805 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2806 GEN_INT (-((HOST_WIDE_INT)
2807 GET_MODE_SIZE (GET_MODE (x)))));
2808 else if (code == POST_DEC)
2809 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2810 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2811 else
2812 temp = stack_pointer_rtx;
2813
2814 x = change_address (x, VOIDmode, temp);
2815 }
2816 #endif
2817
2818 /* If we are in reload, see if either operand is a MEM whose address
2819 is scheduled for replacement. */
2820 if (reload_in_progress && MEM_P (x)
2821 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2822 x = replace_equiv_address_nv (x, inner);
2823 if (reload_in_progress && MEM_P (y)
2824 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2825 y = replace_equiv_address_nv (y, inner);
2826
2827 start_sequence ();
2828
2829 need_clobber = 0;
2830 for (i = 0;
2831 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2832 i++)
2833 {
2834 rtx xpart = operand_subword (x, i, 1, mode);
2835 rtx ypart = operand_subword (y, i, 1, mode);
2836
2837 /* If we can't get a part of Y, put Y into memory if it is a
2838 constant. Otherwise, force it into a register. If we still
2839 can't get a part of Y, abort. */
2840 if (ypart == 0 && CONSTANT_P (y))
2841 {
2842 y = force_const_mem (mode, y);
2843 ypart = operand_subword (y, i, 1, mode);
2844 }
2845 else if (ypart == 0)
2846 ypart = operand_subword_force (y, i, mode);
2847
2848 if (xpart == 0 || ypart == 0)
2849 abort ();
2850
2851 need_clobber |= (GET_CODE (xpart) == SUBREG);
2852
2853 last_insn = emit_move_insn (xpart, ypart);
2854 }
2855
2856 seq = get_insns ();
2857 end_sequence ();
2858
2859 /* Show the output dies here. This is necessary for SUBREGs
2860 of pseudos since we cannot track their lifetimes correctly;
2861 hard regs shouldn't appear here except as return values.
2862 We never want to emit such a clobber after reload. */
2863 if (x != y
2864 && ! (reload_in_progress || reload_completed)
2865 && need_clobber != 0)
2866 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2867
2868 emit_insn (seq);
2869
2870 return last_insn;
2871 }
2872 else
2873 abort ();
2874 }
2875
2876 /* If Y is representable exactly in a narrower mode, and the target can
2877 perform the extension directly from constant or memory, then emit the
2878 move as an extension. */
2879
2880 static rtx
2881 compress_float_constant (rtx x, rtx y)
2882 {
2883 enum machine_mode dstmode = GET_MODE (x);
2884 enum machine_mode orig_srcmode = GET_MODE (y);
2885 enum machine_mode srcmode;
2886 REAL_VALUE_TYPE r;
2887
2888 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
2889
2890 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
2891 srcmode != orig_srcmode;
2892 srcmode = GET_MODE_WIDER_MODE (srcmode))
2893 {
2894 enum insn_code ic;
2895 rtx trunc_y, last_insn;
2896
2897 /* Skip if the target can't extend this way. */
2898 ic = can_extend_p (dstmode, srcmode, 0);
2899 if (ic == CODE_FOR_nothing)
2900 continue;
2901
2902 /* Skip if the narrowed value isn't exact. */
2903 if (! exact_real_truncate (srcmode, &r))
2904 continue;
2905
2906 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
2907
2908 if (LEGITIMATE_CONSTANT_P (trunc_y))
2909 {
2910 /* Skip if the target needs extra instructions to perform
2911 the extension. */
2912 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
2913 continue;
2914 }
2915 else if (float_extend_from_mem[dstmode][srcmode])
2916 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
2917 else
2918 continue;
2919
2920 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
2921 last_insn = get_last_insn ();
2922
2923 if (REG_P (x))
2924 set_unique_reg_note (last_insn, REG_EQUAL, y);
2925
2926 return last_insn;
2927 }
2928
2929 return NULL_RTX;
2930 }
2931 \f
2932 /* Pushing data onto the stack. */
2933
2934 /* Push a block of length SIZE (perhaps variable)
2935 and return an rtx to address the beginning of the block.
2936 The value may be virtual_outgoing_args_rtx.
2937
2938 EXTRA is the number of bytes of padding to push in addition to SIZE.
2939 BELOW nonzero means this padding comes at low addresses;
2940 otherwise, the padding comes at high addresses. */
2941
2942 rtx
2943 push_block (rtx size, int extra, int below)
2944 {
2945 rtx temp;
2946
2947 size = convert_modes (Pmode, ptr_mode, size, 1);
2948 if (CONSTANT_P (size))
2949 anti_adjust_stack (plus_constant (size, extra));
2950 else if (REG_P (size) && extra == 0)
2951 anti_adjust_stack (size);
2952 else
2953 {
2954 temp = copy_to_mode_reg (Pmode, size);
2955 if (extra != 0)
2956 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2957 temp, 0, OPTAB_LIB_WIDEN);
2958 anti_adjust_stack (temp);
2959 }
2960
2961 #ifndef STACK_GROWS_DOWNWARD
2962 if (0)
2963 #else
2964 if (1)
2965 #endif
2966 {
2967 temp = virtual_outgoing_args_rtx;
2968 if (extra != 0 && below)
2969 temp = plus_constant (temp, extra);
2970 }
2971 else
2972 {
2973 if (GET_CODE (size) == CONST_INT)
2974 temp = plus_constant (virtual_outgoing_args_rtx,
2975 -INTVAL (size) - (below ? 0 : extra));
2976 else if (extra != 0 && !below)
2977 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2978 negate_rtx (Pmode, plus_constant (size, extra)));
2979 else
2980 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2981 negate_rtx (Pmode, size));
2982 }
2983
2984 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2985 }
2986
2987 #ifdef PUSH_ROUNDING
2988
2989 /* Emit single push insn. */
2990
2991 static void
2992 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
2993 {
2994 rtx dest_addr;
2995 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
2996 rtx dest;
2997 enum insn_code icode;
2998 insn_operand_predicate_fn pred;
2999
3000 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3001 /* If there is push pattern, use it. Otherwise try old way of throwing
3002 MEM representing push operation to move expander. */
3003 icode = push_optab->handlers[(int) mode].insn_code;
3004 if (icode != CODE_FOR_nothing)
3005 {
3006 if (((pred = insn_data[(int) icode].operand[0].predicate)
3007 && !((*pred) (x, mode))))
3008 x = force_reg (mode, x);
3009 emit_insn (GEN_FCN (icode) (x));
3010 return;
3011 }
3012 if (GET_MODE_SIZE (mode) == rounded_size)
3013 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3014 /* If we are to pad downward, adjust the stack pointer first and
3015 then store X into the stack location using an offset. This is
3016 because emit_move_insn does not know how to pad; it does not have
3017 access to type. */
3018 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3019 {
3020 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3021 HOST_WIDE_INT offset;
3022
3023 emit_move_insn (stack_pointer_rtx,
3024 expand_binop (Pmode,
3025 #ifdef STACK_GROWS_DOWNWARD
3026 sub_optab,
3027 #else
3028 add_optab,
3029 #endif
3030 stack_pointer_rtx,
3031 GEN_INT (rounded_size),
3032 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3033
3034 offset = (HOST_WIDE_INT) padding_size;
3035 #ifdef STACK_GROWS_DOWNWARD
3036 if (STACK_PUSH_CODE == POST_DEC)
3037 /* We have already decremented the stack pointer, so get the
3038 previous value. */
3039 offset += (HOST_WIDE_INT) rounded_size;
3040 #else
3041 if (STACK_PUSH_CODE == POST_INC)
3042 /* We have already incremented the stack pointer, so get the
3043 previous value. */
3044 offset -= (HOST_WIDE_INT) rounded_size;
3045 #endif
3046 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3047 }
3048 else
3049 {
3050 #ifdef STACK_GROWS_DOWNWARD
3051 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3052 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3053 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3054 #else
3055 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3056 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3057 GEN_INT (rounded_size));
3058 #endif
3059 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3060 }
3061
3062 dest = gen_rtx_MEM (mode, dest_addr);
3063
3064 if (type != 0)
3065 {
3066 set_mem_attributes (dest, type, 1);
3067
3068 if (flag_optimize_sibling_calls)
3069 /* Function incoming arguments may overlap with sibling call
3070 outgoing arguments and we cannot allow reordering of reads
3071 from function arguments with stores to outgoing arguments
3072 of sibling calls. */
3073 set_mem_alias_set (dest, 0);
3074 }
3075 emit_move_insn (dest, x);
3076 }
3077 #endif
3078
3079 /* Generate code to push X onto the stack, assuming it has mode MODE and
3080 type TYPE.
3081 MODE is redundant except when X is a CONST_INT (since they don't
3082 carry mode info).
3083 SIZE is an rtx for the size of data to be copied (in bytes),
3084 needed only if X is BLKmode.
3085
3086 ALIGN (in bits) is maximum alignment we can assume.
3087
3088 If PARTIAL and REG are both nonzero, then copy that many of the first
3089 words of X into registers starting with REG, and push the rest of X.
3090 The amount of space pushed is decreased by PARTIAL words,
3091 rounded *down* to a multiple of PARM_BOUNDARY.
3092 REG must be a hard register in this case.
3093 If REG is zero but PARTIAL is not, take any all others actions for an
3094 argument partially in registers, but do not actually load any
3095 registers.
3096
3097 EXTRA is the amount in bytes of extra space to leave next to this arg.
3098 This is ignored if an argument block has already been allocated.
3099
3100 On a machine that lacks real push insns, ARGS_ADDR is the address of
3101 the bottom of the argument block for this call. We use indexing off there
3102 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3103 argument block has not been preallocated.
3104
3105 ARGS_SO_FAR is the size of args previously pushed for this call.
3106
3107 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3108 for arguments passed in registers. If nonzero, it will be the number
3109 of bytes required. */
3110
3111 void
3112 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3113 unsigned int align, int partial, rtx reg, int extra,
3114 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3115 rtx alignment_pad)
3116 {
3117 rtx xinner;
3118 enum direction stack_direction
3119 #ifdef STACK_GROWS_DOWNWARD
3120 = downward;
3121 #else
3122 = upward;
3123 #endif
3124
3125 /* Decide where to pad the argument: `downward' for below,
3126 `upward' for above, or `none' for don't pad it.
3127 Default is below for small data on big-endian machines; else above. */
3128 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3129
3130 /* Invert direction if stack is post-decrement.
3131 FIXME: why? */
3132 if (STACK_PUSH_CODE == POST_DEC)
3133 if (where_pad != none)
3134 where_pad = (where_pad == downward ? upward : downward);
3135
3136 xinner = x;
3137
3138 if (mode == BLKmode)
3139 {
3140 /* Copy a block into the stack, entirely or partially. */
3141
3142 rtx temp;
3143 int used = partial * UNITS_PER_WORD;
3144 int offset;
3145 int skip;
3146
3147 if (reg && GET_CODE (reg) == PARALLEL)
3148 {
3149 /* Use the size of the elt to compute offset. */
3150 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3151 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3152 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3153 }
3154 else
3155 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3156
3157 if (size == 0)
3158 abort ();
3159
3160 used -= offset;
3161
3162 /* USED is now the # of bytes we need not copy to the stack
3163 because registers will take care of them. */
3164
3165 if (partial != 0)
3166 xinner = adjust_address (xinner, BLKmode, used);
3167
3168 /* If the partial register-part of the arg counts in its stack size,
3169 skip the part of stack space corresponding to the registers.
3170 Otherwise, start copying to the beginning of the stack space,
3171 by setting SKIP to 0. */
3172 skip = (reg_parm_stack_space == 0) ? 0 : used;
3173
3174 #ifdef PUSH_ROUNDING
3175 /* Do it with several push insns if that doesn't take lots of insns
3176 and if there is no difficulty with push insns that skip bytes
3177 on the stack for alignment purposes. */
3178 if (args_addr == 0
3179 && PUSH_ARGS
3180 && GET_CODE (size) == CONST_INT
3181 && skip == 0
3182 && MEM_ALIGN (xinner) >= align
3183 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3184 /* Here we avoid the case of a structure whose weak alignment
3185 forces many pushes of a small amount of data,
3186 and such small pushes do rounding that causes trouble. */
3187 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3188 || align >= BIGGEST_ALIGNMENT
3189 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3190 == (align / BITS_PER_UNIT)))
3191 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3192 {
3193 /* Push padding now if padding above and stack grows down,
3194 or if padding below and stack grows up.
3195 But if space already allocated, this has already been done. */
3196 if (extra && args_addr == 0
3197 && where_pad != none && where_pad != stack_direction)
3198 anti_adjust_stack (GEN_INT (extra));
3199
3200 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3201 }
3202 else
3203 #endif /* PUSH_ROUNDING */
3204 {
3205 rtx target;
3206
3207 /* Otherwise make space on the stack and copy the data
3208 to the address of that space. */
3209
3210 /* Deduct words put into registers from the size we must copy. */
3211 if (partial != 0)
3212 {
3213 if (GET_CODE (size) == CONST_INT)
3214 size = GEN_INT (INTVAL (size) - used);
3215 else
3216 size = expand_binop (GET_MODE (size), sub_optab, size,
3217 GEN_INT (used), NULL_RTX, 0,
3218 OPTAB_LIB_WIDEN);
3219 }
3220
3221 /* Get the address of the stack space.
3222 In this case, we do not deal with EXTRA separately.
3223 A single stack adjust will do. */
3224 if (! args_addr)
3225 {
3226 temp = push_block (size, extra, where_pad == downward);
3227 extra = 0;
3228 }
3229 else if (GET_CODE (args_so_far) == CONST_INT)
3230 temp = memory_address (BLKmode,
3231 plus_constant (args_addr,
3232 skip + INTVAL (args_so_far)));
3233 else
3234 temp = memory_address (BLKmode,
3235 plus_constant (gen_rtx_PLUS (Pmode,
3236 args_addr,
3237 args_so_far),
3238 skip));
3239
3240 if (!ACCUMULATE_OUTGOING_ARGS)
3241 {
3242 /* If the source is referenced relative to the stack pointer,
3243 copy it to another register to stabilize it. We do not need
3244 to do this if we know that we won't be changing sp. */
3245
3246 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3247 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3248 temp = copy_to_reg (temp);
3249 }
3250
3251 target = gen_rtx_MEM (BLKmode, temp);
3252
3253 if (type != 0)
3254 {
3255 set_mem_attributes (target, type, 1);
3256 /* Function incoming arguments may overlap with sibling call
3257 outgoing arguments and we cannot allow reordering of reads
3258 from function arguments with stores to outgoing arguments
3259 of sibling calls. */
3260 set_mem_alias_set (target, 0);
3261 }
3262
3263 /* ALIGN may well be better aligned than TYPE, e.g. due to
3264 PARM_BOUNDARY. Assume the caller isn't lying. */
3265 set_mem_align (target, align);
3266
3267 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3268 }
3269 }
3270 else if (partial > 0)
3271 {
3272 /* Scalar partly in registers. */
3273
3274 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3275 int i;
3276 int not_stack;
3277 /* # words of start of argument
3278 that we must make space for but need not store. */
3279 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3280 int args_offset = INTVAL (args_so_far);
3281 int skip;
3282
3283 /* Push padding now if padding above and stack grows down,
3284 or if padding below and stack grows up.
3285 But if space already allocated, this has already been done. */
3286 if (extra && args_addr == 0
3287 && where_pad != none && where_pad != stack_direction)
3288 anti_adjust_stack (GEN_INT (extra));
3289
3290 /* If we make space by pushing it, we might as well push
3291 the real data. Otherwise, we can leave OFFSET nonzero
3292 and leave the space uninitialized. */
3293 if (args_addr == 0)
3294 offset = 0;
3295
3296 /* Now NOT_STACK gets the number of words that we don't need to
3297 allocate on the stack. */
3298 not_stack = partial - offset;
3299
3300 /* If the partial register-part of the arg counts in its stack size,
3301 skip the part of stack space corresponding to the registers.
3302 Otherwise, start copying to the beginning of the stack space,
3303 by setting SKIP to 0. */
3304 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3305
3306 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3307 x = validize_mem (force_const_mem (mode, x));
3308
3309 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3310 SUBREGs of such registers are not allowed. */
3311 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3312 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3313 x = copy_to_reg (x);
3314
3315 /* Loop over all the words allocated on the stack for this arg. */
3316 /* We can do it by words, because any scalar bigger than a word
3317 has a size a multiple of a word. */
3318 #ifndef PUSH_ARGS_REVERSED
3319 for (i = not_stack; i < size; i++)
3320 #else
3321 for (i = size - 1; i >= not_stack; i--)
3322 #endif
3323 if (i >= not_stack + offset)
3324 emit_push_insn (operand_subword_force (x, i, mode),
3325 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3326 0, args_addr,
3327 GEN_INT (args_offset + ((i - not_stack + skip)
3328 * UNITS_PER_WORD)),
3329 reg_parm_stack_space, alignment_pad);
3330 }
3331 else
3332 {
3333 rtx addr;
3334 rtx dest;
3335
3336 /* Push padding now if padding above and stack grows down,
3337 or if padding below and stack grows up.
3338 But if space already allocated, this has already been done. */
3339 if (extra && args_addr == 0
3340 && where_pad != none && where_pad != stack_direction)
3341 anti_adjust_stack (GEN_INT (extra));
3342
3343 #ifdef PUSH_ROUNDING
3344 if (args_addr == 0 && PUSH_ARGS)
3345 emit_single_push_insn (mode, x, type);
3346 else
3347 #endif
3348 {
3349 if (GET_CODE (args_so_far) == CONST_INT)
3350 addr
3351 = memory_address (mode,
3352 plus_constant (args_addr,
3353 INTVAL (args_so_far)));
3354 else
3355 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3356 args_so_far));
3357 dest = gen_rtx_MEM (mode, addr);
3358 if (type != 0)
3359 {
3360 set_mem_attributes (dest, type, 1);
3361 /* Function incoming arguments may overlap with sibling call
3362 outgoing arguments and we cannot allow reordering of reads
3363 from function arguments with stores to outgoing arguments
3364 of sibling calls. */
3365 set_mem_alias_set (dest, 0);
3366 }
3367
3368 emit_move_insn (dest, x);
3369 }
3370 }
3371
3372 /* If part should go in registers, copy that part
3373 into the appropriate registers. Do this now, at the end,
3374 since mem-to-mem copies above may do function calls. */
3375 if (partial > 0 && reg != 0)
3376 {
3377 /* Handle calls that pass values in multiple non-contiguous locations.
3378 The Irix 6 ABI has examples of this. */
3379 if (GET_CODE (reg) == PARALLEL)
3380 emit_group_load (reg, x, type, -1);
3381 else
3382 move_block_to_reg (REGNO (reg), x, partial, mode);
3383 }
3384
3385 if (extra && args_addr == 0 && where_pad == stack_direction)
3386 anti_adjust_stack (GEN_INT (extra));
3387
3388 if (alignment_pad && args_addr == 0)
3389 anti_adjust_stack (alignment_pad);
3390 }
3391 \f
3392 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3393 operations. */
3394
3395 static rtx
3396 get_subtarget (rtx x)
3397 {
3398 return ((x == 0
3399 /* Only registers can be subtargets. */
3400 || !REG_P (x)
3401 /* If the register is readonly, it can't be set more than once. */
3402 || RTX_UNCHANGING_P (x)
3403 /* Don't use hard regs to avoid extending their life. */
3404 || REGNO (x) < FIRST_PSEUDO_REGISTER
3405 /* Avoid subtargets inside loops,
3406 since they hide some invariant expressions. */
3407 || preserve_subexpressions_p ())
3408 ? 0 : x);
3409 }
3410
3411 /* Expand an assignment that stores the value of FROM into TO.
3412 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3413 (If the value is constant, this rtx is a constant.)
3414 Otherwise, the returned value is NULL_RTX. */
3415
3416 rtx
3417 expand_assignment (tree to, tree from, int want_value)
3418 {
3419 rtx to_rtx = 0;
3420 rtx result;
3421
3422 /* Don't crash if the lhs of the assignment was erroneous. */
3423
3424 if (TREE_CODE (to) == ERROR_MARK)
3425 {
3426 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3427 return want_value ? result : NULL_RTX;
3428 }
3429
3430 /* Assignment of a structure component needs special treatment
3431 if the structure component's rtx is not simply a MEM.
3432 Assignment of an array element at a constant index, and assignment of
3433 an array element in an unaligned packed structure field, has the same
3434 problem. */
3435
3436 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3437 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3438 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3439 {
3440 enum machine_mode mode1;
3441 HOST_WIDE_INT bitsize, bitpos;
3442 rtx orig_to_rtx;
3443 tree offset;
3444 int unsignedp;
3445 int volatilep = 0;
3446 tree tem;
3447
3448 push_temp_slots ();
3449 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3450 &unsignedp, &volatilep);
3451
3452 /* If we are going to use store_bit_field and extract_bit_field,
3453 make sure to_rtx will be safe for multiple use. */
3454
3455 if (mode1 == VOIDmode && want_value)
3456 tem = stabilize_reference (tem);
3457
3458 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3459
3460 if (offset != 0)
3461 {
3462 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3463
3464 if (!MEM_P (to_rtx))
3465 abort ();
3466
3467 #ifdef POINTERS_EXTEND_UNSIGNED
3468 if (GET_MODE (offset_rtx) != Pmode)
3469 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3470 #else
3471 if (GET_MODE (offset_rtx) != ptr_mode)
3472 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3473 #endif
3474
3475 /* A constant address in TO_RTX can have VOIDmode, we must not try
3476 to call force_reg for that case. Avoid that case. */
3477 if (MEM_P (to_rtx)
3478 && GET_MODE (to_rtx) == BLKmode
3479 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3480 && bitsize > 0
3481 && (bitpos % bitsize) == 0
3482 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3483 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3484 {
3485 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3486 bitpos = 0;
3487 }
3488
3489 to_rtx = offset_address (to_rtx, offset_rtx,
3490 highest_pow2_factor_for_target (to,
3491 offset));
3492 }
3493
3494 if (MEM_P (to_rtx))
3495 {
3496 /* If the field is at offset zero, we could have been given the
3497 DECL_RTX of the parent struct. Don't munge it. */
3498 to_rtx = shallow_copy_rtx (to_rtx);
3499
3500 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3501 }
3502
3503 /* Deal with volatile and readonly fields. The former is only done
3504 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3505 if (volatilep && MEM_P (to_rtx))
3506 {
3507 if (to_rtx == orig_to_rtx)
3508 to_rtx = copy_rtx (to_rtx);
3509 MEM_VOLATILE_P (to_rtx) = 1;
3510 }
3511
3512 if (TREE_CODE (to) == COMPONENT_REF
3513 && TREE_READONLY (TREE_OPERAND (to, 1))
3514 /* We can't assert that a MEM won't be set more than once
3515 if the component is not addressable because another
3516 non-addressable component may be referenced by the same MEM. */
3517 && ! (MEM_P (to_rtx) && ! can_address_p (to)))
3518 {
3519 if (to_rtx == orig_to_rtx)
3520 to_rtx = copy_rtx (to_rtx);
3521 RTX_UNCHANGING_P (to_rtx) = 1;
3522 }
3523
3524 if (MEM_P (to_rtx) && ! can_address_p (to))
3525 {
3526 if (to_rtx == orig_to_rtx)
3527 to_rtx = copy_rtx (to_rtx);
3528 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3529 }
3530
3531 /* Optimize bitfld op= val in certain cases. */
3532 while (mode1 == VOIDmode && !want_value
3533 && bitsize > 0 && bitsize < BITS_PER_WORD
3534 && GET_MODE_BITSIZE (GET_MODE (to_rtx)) <= BITS_PER_WORD
3535 && !TREE_SIDE_EFFECTS (to)
3536 && !TREE_THIS_VOLATILE (to))
3537 {
3538 tree src, op0, op1;
3539 rtx value, str_rtx = to_rtx;
3540 HOST_WIDE_INT bitpos1 = bitpos;
3541 optab binop;
3542
3543 src = from;
3544 STRIP_NOPS (src);
3545 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE
3546 || TREE_CODE_CLASS (TREE_CODE (src)) != '2')
3547 break;
3548
3549 op0 = TREE_OPERAND (src, 0);
3550 op1 = TREE_OPERAND (src, 1);
3551 STRIP_NOPS (op0);
3552
3553 if (! operand_equal_p (to, op0, 0))
3554 break;
3555
3556 if (MEM_P (str_rtx))
3557 {
3558 enum machine_mode mode = GET_MODE (str_rtx);
3559 HOST_WIDE_INT offset1;
3560
3561 if (GET_MODE_BITSIZE (mode) == 0
3562 || GET_MODE_BITSIZE (mode) > BITS_PER_WORD)
3563 mode = word_mode;
3564 mode = get_best_mode (bitsize, bitpos1, MEM_ALIGN (str_rtx),
3565 mode, 0);
3566 if (mode == VOIDmode)
3567 break;
3568
3569 offset1 = bitpos1;
3570 bitpos1 %= GET_MODE_BITSIZE (mode);
3571 offset1 = (offset1 - bitpos1) / BITS_PER_UNIT;
3572 str_rtx = adjust_address (str_rtx, mode, offset1);
3573 }
3574 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3575 break;
3576
3577 /* If the bit field covers the whole REG/MEM, store_field
3578 will likely generate better code. */
3579 if (bitsize >= GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3580 break;
3581
3582 /* We can't handle fields split accross multiple entities. */
3583 if (bitpos1 + bitsize > GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3584 break;
3585
3586 if (BYTES_BIG_ENDIAN)
3587 bitpos1 = GET_MODE_BITSIZE (GET_MODE (str_rtx)) - bitpos1
3588 - bitsize;
3589
3590 /* Special case some bitfield op= exp. */
3591 switch (TREE_CODE (src))
3592 {
3593 case PLUS_EXPR:
3594 case MINUS_EXPR:
3595 /* For now, just optimize the case of the topmost bitfield
3596 where we don't need to do any masking and also
3597 1 bit bitfields where xor can be used.
3598 We might win by one instruction for the other bitfields
3599 too if insv/extv instructions aren't used, so that
3600 can be added later. */
3601 if (bitpos1 + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx))
3602 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3603 break;
3604 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3605 value = convert_modes (GET_MODE (str_rtx),
3606 TYPE_MODE (TREE_TYPE (op1)), value,
3607 TYPE_UNSIGNED (TREE_TYPE (op1)));
3608
3609 /* We may be accessing data outside the field, which means
3610 we can alias adjacent data. */
3611 if (MEM_P (str_rtx))
3612 {
3613 str_rtx = shallow_copy_rtx (str_rtx);
3614 set_mem_alias_set (str_rtx, 0);
3615 set_mem_expr (str_rtx, 0);
3616 }
3617
3618 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3619 if (bitsize == 1
3620 && bitpos1 + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3621 {
3622 value = expand_and (GET_MODE (str_rtx), value, const1_rtx,
3623 NULL_RTX);
3624 binop = xor_optab;
3625 }
3626 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx),
3627 value, build_int_2 (bitpos1, 0),
3628 NULL_RTX, 1);
3629 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
3630 value, str_rtx, 1, OPTAB_WIDEN);
3631 if (result != str_rtx)
3632 emit_move_insn (str_rtx, result);
3633 free_temp_slots ();
3634 pop_temp_slots ();
3635 return NULL_RTX;
3636
3637 default:
3638 break;
3639 }
3640
3641 break;
3642 }
3643
3644 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3645 (want_value
3646 /* Spurious cast for HPUX compiler. */
3647 ? ((enum machine_mode)
3648 TYPE_MODE (TREE_TYPE (to)))
3649 : VOIDmode),
3650 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3651
3652 preserve_temp_slots (result);
3653 free_temp_slots ();
3654 pop_temp_slots ();
3655
3656 /* If the value is meaningful, convert RESULT to the proper mode.
3657 Otherwise, return nothing. */
3658 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3659 TYPE_MODE (TREE_TYPE (from)),
3660 result,
3661 TYPE_UNSIGNED (TREE_TYPE (to)))
3662 : NULL_RTX);
3663 }
3664
3665 /* If the rhs is a function call and its value is not an aggregate,
3666 call the function before we start to compute the lhs.
3667 This is needed for correct code for cases such as
3668 val = setjmp (buf) on machines where reference to val
3669 requires loading up part of an address in a separate insn.
3670
3671 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3672 since it might be a promoted variable where the zero- or sign- extension
3673 needs to be done. Handling this in the normal way is safe because no
3674 computation is done before the call. */
3675 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3676 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3677 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3678 && REG_P (DECL_RTL (to))))
3679 {
3680 rtx value;
3681
3682 push_temp_slots ();
3683 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3684 if (to_rtx == 0)
3685 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3686
3687 /* Handle calls that return values in multiple non-contiguous locations.
3688 The Irix 6 ABI has examples of this. */
3689 if (GET_CODE (to_rtx) == PARALLEL)
3690 emit_group_load (to_rtx, value, TREE_TYPE (from),
3691 int_size_in_bytes (TREE_TYPE (from)));
3692 else if (GET_MODE (to_rtx) == BLKmode)
3693 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3694 else
3695 {
3696 if (POINTER_TYPE_P (TREE_TYPE (to)))
3697 value = convert_memory_address (GET_MODE (to_rtx), value);
3698 emit_move_insn (to_rtx, value);
3699 }
3700 preserve_temp_slots (to_rtx);
3701 free_temp_slots ();
3702 pop_temp_slots ();
3703 return want_value ? to_rtx : NULL_RTX;
3704 }
3705
3706 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3707 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3708
3709 if (to_rtx == 0)
3710 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3711
3712 /* Don't move directly into a return register. */
3713 if (TREE_CODE (to) == RESULT_DECL
3714 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
3715 {
3716 rtx temp;
3717
3718 push_temp_slots ();
3719 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3720
3721 if (GET_CODE (to_rtx) == PARALLEL)
3722 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3723 int_size_in_bytes (TREE_TYPE (from)));
3724 else
3725 emit_move_insn (to_rtx, temp);
3726
3727 preserve_temp_slots (to_rtx);
3728 free_temp_slots ();
3729 pop_temp_slots ();
3730 return want_value ? to_rtx : NULL_RTX;
3731 }
3732
3733 /* In case we are returning the contents of an object which overlaps
3734 the place the value is being stored, use a safe function when copying
3735 a value through a pointer into a structure value return block. */
3736 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3737 && current_function_returns_struct
3738 && !current_function_returns_pcc_struct)
3739 {
3740 rtx from_rtx, size;
3741
3742 push_temp_slots ();
3743 size = expr_size (from);
3744 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3745
3746 emit_library_call (memmove_libfunc, LCT_NORMAL,
3747 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3748 XEXP (from_rtx, 0), Pmode,
3749 convert_to_mode (TYPE_MODE (sizetype),
3750 size, TYPE_UNSIGNED (sizetype)),
3751 TYPE_MODE (sizetype));
3752
3753 preserve_temp_slots (to_rtx);
3754 free_temp_slots ();
3755 pop_temp_slots ();
3756 return want_value ? to_rtx : NULL_RTX;
3757 }
3758
3759 /* Compute FROM and store the value in the rtx we got. */
3760
3761 push_temp_slots ();
3762 result = store_expr (from, to_rtx, want_value);
3763 preserve_temp_slots (result);
3764 free_temp_slots ();
3765 pop_temp_slots ();
3766 return want_value ? result : NULL_RTX;
3767 }
3768
3769 /* Generate code for computing expression EXP,
3770 and storing the value into TARGET.
3771
3772 If WANT_VALUE & 1 is nonzero, return a copy of the value
3773 not in TARGET, so that we can be sure to use the proper
3774 value in a containing expression even if TARGET has something
3775 else stored in it. If possible, we copy the value through a pseudo
3776 and return that pseudo. Or, if the value is constant, we try to
3777 return the constant. In some cases, we return a pseudo
3778 copied *from* TARGET.
3779
3780 If the mode is BLKmode then we may return TARGET itself.
3781 It turns out that in BLKmode it doesn't cause a problem.
3782 because C has no operators that could combine two different
3783 assignments into the same BLKmode object with different values
3784 with no sequence point. Will other languages need this to
3785 be more thorough?
3786
3787 If WANT_VALUE & 1 is 0, we return NULL, to make sure
3788 to catch quickly any cases where the caller uses the value
3789 and fails to set WANT_VALUE.
3790
3791 If WANT_VALUE & 2 is set, this is a store into a call param on the
3792 stack, and block moves may need to be treated specially. */
3793
3794 rtx
3795 store_expr (tree exp, rtx target, int want_value)
3796 {
3797 rtx temp;
3798 rtx alt_rtl = NULL_RTX;
3799 int dont_return_target = 0;
3800 int dont_store_target = 0;
3801
3802 if (VOID_TYPE_P (TREE_TYPE (exp)))
3803 {
3804 /* C++ can generate ?: expressions with a throw expression in one
3805 branch and an rvalue in the other. Here, we resolve attempts to
3806 store the throw expression's nonexistent result. */
3807 if (want_value)
3808 abort ();
3809 expand_expr (exp, const0_rtx, VOIDmode, 0);
3810 return NULL_RTX;
3811 }
3812 if (TREE_CODE (exp) == COMPOUND_EXPR)
3813 {
3814 /* Perform first part of compound expression, then assign from second
3815 part. */
3816 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
3817 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3818 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3819 }
3820 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3821 {
3822 /* For conditional expression, get safe form of the target. Then
3823 test the condition, doing the appropriate assignment on either
3824 side. This avoids the creation of unnecessary temporaries.
3825 For non-BLKmode, it is more efficient not to do this. */
3826
3827 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3828
3829 do_pending_stack_adjust ();
3830 NO_DEFER_POP;
3831 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3832 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
3833 emit_jump_insn (gen_jump (lab2));
3834 emit_barrier ();
3835 emit_label (lab1);
3836 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
3837 emit_label (lab2);
3838 OK_DEFER_POP;
3839
3840 return want_value & 1 ? target : NULL_RTX;
3841 }
3842 else if ((want_value & 1) != 0
3843 && MEM_P (target)
3844 && ! MEM_VOLATILE_P (target)
3845 && GET_MODE (target) != BLKmode)
3846 /* If target is in memory and caller wants value in a register instead,
3847 arrange that. Pass TARGET as target for expand_expr so that,
3848 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3849 We know expand_expr will not use the target in that case.
3850 Don't do this if TARGET is volatile because we are supposed
3851 to write it and then read it. */
3852 {
3853 temp = expand_expr (exp, target, GET_MODE (target),
3854 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3855 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3856 {
3857 /* If TEMP is already in the desired TARGET, only copy it from
3858 memory and don't store it there again. */
3859 if (temp == target
3860 || (rtx_equal_p (temp, target)
3861 && ! side_effects_p (temp) && ! side_effects_p (target)))
3862 dont_store_target = 1;
3863 temp = copy_to_reg (temp);
3864 }
3865 dont_return_target = 1;
3866 }
3867 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3868 /* If this is a scalar in a register that is stored in a wider mode
3869 than the declared mode, compute the result into its declared mode
3870 and then convert to the wider mode. Our value is the computed
3871 expression. */
3872 {
3873 rtx inner_target = 0;
3874
3875 /* If we don't want a value, we can do the conversion inside EXP,
3876 which will often result in some optimizations. Do the conversion
3877 in two steps: first change the signedness, if needed, then
3878 the extend. But don't do this if the type of EXP is a subtype
3879 of something else since then the conversion might involve
3880 more than just converting modes. */
3881 if ((want_value & 1) == 0
3882 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3883 && TREE_TYPE (TREE_TYPE (exp)) == 0
3884 && (!lang_hooks.reduce_bit_field_operations
3885 || (GET_MODE_PRECISION (GET_MODE (target))
3886 == TYPE_PRECISION (TREE_TYPE (exp)))))
3887 {
3888 if (TYPE_UNSIGNED (TREE_TYPE (exp))
3889 != SUBREG_PROMOTED_UNSIGNED_P (target))
3890 exp = convert
3891 (lang_hooks.types.signed_or_unsigned_type
3892 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
3893
3894 exp = convert (lang_hooks.types.type_for_mode
3895 (GET_MODE (SUBREG_REG (target)),
3896 SUBREG_PROMOTED_UNSIGNED_P (target)),
3897 exp);
3898
3899 inner_target = SUBREG_REG (target);
3900 }
3901
3902 temp = expand_expr (exp, inner_target, VOIDmode,
3903 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3904
3905 /* If TEMP is a MEM and we want a result value, make the access
3906 now so it gets done only once. Strictly speaking, this is
3907 only necessary if the MEM is volatile, or if the address
3908 overlaps TARGET. But not performing the load twice also
3909 reduces the amount of rtl we generate and then have to CSE. */
3910 if (MEM_P (temp) && (want_value & 1) != 0)
3911 temp = copy_to_reg (temp);
3912
3913 /* If TEMP is a VOIDmode constant, use convert_modes to make
3914 sure that we properly convert it. */
3915 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3916 {
3917 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3918 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3919 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3920 GET_MODE (target), temp,
3921 SUBREG_PROMOTED_UNSIGNED_P (target));
3922 }
3923
3924 convert_move (SUBREG_REG (target), temp,
3925 SUBREG_PROMOTED_UNSIGNED_P (target));
3926
3927 /* If we promoted a constant, change the mode back down to match
3928 target. Otherwise, the caller might get confused by a result whose
3929 mode is larger than expected. */
3930
3931 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
3932 {
3933 if (GET_MODE (temp) != VOIDmode)
3934 {
3935 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
3936 SUBREG_PROMOTED_VAR_P (temp) = 1;
3937 SUBREG_PROMOTED_UNSIGNED_SET (temp,
3938 SUBREG_PROMOTED_UNSIGNED_P (target));
3939 }
3940 else
3941 temp = convert_modes (GET_MODE (target),
3942 GET_MODE (SUBREG_REG (target)),
3943 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3944 }
3945
3946 return want_value & 1 ? temp : NULL_RTX;
3947 }
3948 else
3949 {
3950 temp = expand_expr_real (exp, target, GET_MODE (target),
3951 (want_value & 2
3952 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
3953 &alt_rtl);
3954 /* Return TARGET if it's a specified hardware register.
3955 If TARGET is a volatile mem ref, either return TARGET
3956 or return a reg copied *from* TARGET; ANSI requires this.
3957
3958 Otherwise, if TEMP is not TARGET, return TEMP
3959 if it is constant (for efficiency),
3960 or if we really want the correct value. */
3961 if (!(target && REG_P (target)
3962 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3963 && !(MEM_P (target) && MEM_VOLATILE_P (target))
3964 && ! rtx_equal_p (temp, target)
3965 && (CONSTANT_P (temp) || (want_value & 1) != 0))
3966 dont_return_target = 1;
3967 }
3968
3969 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3970 the same as that of TARGET, adjust the constant. This is needed, for
3971 example, in case it is a CONST_DOUBLE and we want only a word-sized
3972 value. */
3973 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3974 && TREE_CODE (exp) != ERROR_MARK
3975 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3976 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3977 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
3978
3979 /* If value was not generated in the target, store it there.
3980 Convert the value to TARGET's type first if necessary and emit the
3981 pending incrementations that have been queued when expanding EXP.
3982 Note that we cannot emit the whole queue blindly because this will
3983 effectively disable the POST_INC optimization later.
3984
3985 If TEMP and TARGET compare equal according to rtx_equal_p, but
3986 one or both of them are volatile memory refs, we have to distinguish
3987 two cases:
3988 - expand_expr has used TARGET. In this case, we must not generate
3989 another copy. This can be detected by TARGET being equal according
3990 to == .
3991 - expand_expr has not used TARGET - that means that the source just
3992 happens to have the same RTX form. Since temp will have been created
3993 by expand_expr, it will compare unequal according to == .
3994 We must generate a copy in this case, to reach the correct number
3995 of volatile memory references. */
3996
3997 if ((! rtx_equal_p (temp, target)
3998 || (temp != target && (side_effects_p (temp)
3999 || side_effects_p (target))))
4000 && TREE_CODE (exp) != ERROR_MARK
4001 && ! dont_store_target
4002 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4003 but TARGET is not valid memory reference, TEMP will differ
4004 from TARGET although it is really the same location. */
4005 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4006 /* If there's nothing to copy, don't bother. Don't call expr_size
4007 unless necessary, because some front-ends (C++) expr_size-hook
4008 aborts on objects that are not supposed to be bit-copied or
4009 bit-initialized. */
4010 && expr_size (exp) != const0_rtx)
4011 {
4012 if (GET_MODE (temp) != GET_MODE (target)
4013 && GET_MODE (temp) != VOIDmode)
4014 {
4015 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4016 if (dont_return_target)
4017 {
4018 /* In this case, we will return TEMP,
4019 so make sure it has the proper mode.
4020 But don't forget to store the value into TARGET. */
4021 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4022 emit_move_insn (target, temp);
4023 }
4024 else
4025 convert_move (target, temp, unsignedp);
4026 }
4027
4028 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4029 {
4030 /* Handle copying a string constant into an array. The string
4031 constant may be shorter than the array. So copy just the string's
4032 actual length, and clear the rest. First get the size of the data
4033 type of the string, which is actually the size of the target. */
4034 rtx size = expr_size (exp);
4035
4036 if (GET_CODE (size) == CONST_INT
4037 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4038 emit_block_move (target, temp, size,
4039 (want_value & 2
4040 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4041 else
4042 {
4043 /* Compute the size of the data to copy from the string. */
4044 tree copy_size
4045 = size_binop (MIN_EXPR,
4046 make_tree (sizetype, size),
4047 size_int (TREE_STRING_LENGTH (exp)));
4048 rtx copy_size_rtx
4049 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4050 (want_value & 2
4051 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4052 rtx label = 0;
4053
4054 /* Copy that much. */
4055 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4056 TYPE_UNSIGNED (sizetype));
4057 emit_block_move (target, temp, copy_size_rtx,
4058 (want_value & 2
4059 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4060
4061 /* Figure out how much is left in TARGET that we have to clear.
4062 Do all calculations in ptr_mode. */
4063 if (GET_CODE (copy_size_rtx) == CONST_INT)
4064 {
4065 size = plus_constant (size, -INTVAL (copy_size_rtx));
4066 target = adjust_address (target, BLKmode,
4067 INTVAL (copy_size_rtx));
4068 }
4069 else
4070 {
4071 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4072 copy_size_rtx, NULL_RTX, 0,
4073 OPTAB_LIB_WIDEN);
4074
4075 #ifdef POINTERS_EXTEND_UNSIGNED
4076 if (GET_MODE (copy_size_rtx) != Pmode)
4077 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4078 TYPE_UNSIGNED (sizetype));
4079 #endif
4080
4081 target = offset_address (target, copy_size_rtx,
4082 highest_pow2_factor (copy_size));
4083 label = gen_label_rtx ();
4084 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4085 GET_MODE (size), 0, label);
4086 }
4087
4088 if (size != const0_rtx)
4089 clear_storage (target, size);
4090
4091 if (label)
4092 emit_label (label);
4093 }
4094 }
4095 /* Handle calls that return values in multiple non-contiguous locations.
4096 The Irix 6 ABI has examples of this. */
4097 else if (GET_CODE (target) == PARALLEL)
4098 emit_group_load (target, temp, TREE_TYPE (exp),
4099 int_size_in_bytes (TREE_TYPE (exp)));
4100 else if (GET_MODE (temp) == BLKmode)
4101 emit_block_move (target, temp, expr_size (exp),
4102 (want_value & 2
4103 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4104 else
4105 {
4106 temp = force_operand (temp, target);
4107 if (temp != target)
4108 emit_move_insn (target, temp);
4109 }
4110 }
4111
4112 /* If we don't want a value, return NULL_RTX. */
4113 if ((want_value & 1) == 0)
4114 return NULL_RTX;
4115
4116 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4117 ??? The latter test doesn't seem to make sense. */
4118 else if (dont_return_target && !MEM_P (temp))
4119 return temp;
4120
4121 /* Return TARGET itself if it is a hard register. */
4122 else if ((want_value & 1) != 0
4123 && GET_MODE (target) != BLKmode
4124 && ! (REG_P (target)
4125 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4126 return copy_to_reg (target);
4127
4128 else
4129 return target;
4130 }
4131 \f
4132 /* Examine CTOR. Discover how many scalar fields are set to nonzero
4133 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4134 are set to non-constant values and place it in *P_NC_ELTS. */
4135
4136 static void
4137 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4138 HOST_WIDE_INT *p_nc_elts)
4139 {
4140 HOST_WIDE_INT nz_elts, nc_elts;
4141 tree list;
4142
4143 nz_elts = 0;
4144 nc_elts = 0;
4145
4146 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4147 {
4148 tree value = TREE_VALUE (list);
4149 tree purpose = TREE_PURPOSE (list);
4150 HOST_WIDE_INT mult;
4151
4152 mult = 1;
4153 if (TREE_CODE (purpose) == RANGE_EXPR)
4154 {
4155 tree lo_index = TREE_OPERAND (purpose, 0);
4156 tree hi_index = TREE_OPERAND (purpose, 1);
4157
4158 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4159 mult = (tree_low_cst (hi_index, 1)
4160 - tree_low_cst (lo_index, 1) + 1);
4161 }
4162
4163 switch (TREE_CODE (value))
4164 {
4165 case CONSTRUCTOR:
4166 {
4167 HOST_WIDE_INT nz = 0, nc = 0;
4168 categorize_ctor_elements_1 (value, &nz, &nc);
4169 nz_elts += mult * nz;
4170 nc_elts += mult * nc;
4171 }
4172 break;
4173
4174 case INTEGER_CST:
4175 case REAL_CST:
4176 if (!initializer_zerop (value))
4177 nz_elts += mult;
4178 break;
4179 case COMPLEX_CST:
4180 if (!initializer_zerop (TREE_REALPART (value)))
4181 nz_elts += mult;
4182 if (!initializer_zerop (TREE_IMAGPART (value)))
4183 nz_elts += mult;
4184 break;
4185 case VECTOR_CST:
4186 {
4187 tree v;
4188 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4189 if (!initializer_zerop (TREE_VALUE (v)))
4190 nz_elts += mult;
4191 }
4192 break;
4193
4194 default:
4195 nz_elts += mult;
4196 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4197 nc_elts += mult;
4198 break;
4199 }
4200 }
4201
4202 *p_nz_elts += nz_elts;
4203 *p_nc_elts += nc_elts;
4204 }
4205
4206 void
4207 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4208 HOST_WIDE_INT *p_nc_elts)
4209 {
4210 *p_nz_elts = 0;
4211 *p_nc_elts = 0;
4212 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts);
4213 }
4214
4215 /* Count the number of scalars in TYPE. Return -1 on overflow or
4216 variable-sized. */
4217
4218 HOST_WIDE_INT
4219 count_type_elements (tree type)
4220 {
4221 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4222 switch (TREE_CODE (type))
4223 {
4224 case ARRAY_TYPE:
4225 {
4226 tree telts = array_type_nelts (type);
4227 if (telts && host_integerp (telts, 1))
4228 {
4229 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4230 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4231 if (n == 0)
4232 return 0;
4233 else if (max / n > m)
4234 return n * m;
4235 }
4236 return -1;
4237 }
4238
4239 case RECORD_TYPE:
4240 {
4241 HOST_WIDE_INT n = 0, t;
4242 tree f;
4243
4244 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4245 if (TREE_CODE (f) == FIELD_DECL)
4246 {
4247 t = count_type_elements (TREE_TYPE (f));
4248 if (t < 0)
4249 return -1;
4250 n += t;
4251 }
4252
4253 return n;
4254 }
4255
4256 case UNION_TYPE:
4257 case QUAL_UNION_TYPE:
4258 {
4259 /* Ho hum. How in the world do we guess here? Clearly it isn't
4260 right to count the fields. Guess based on the number of words. */
4261 HOST_WIDE_INT n = int_size_in_bytes (type);
4262 if (n < 0)
4263 return -1;
4264 return n / UNITS_PER_WORD;
4265 }
4266
4267 case COMPLEX_TYPE:
4268 return 2;
4269
4270 case VECTOR_TYPE:
4271 /* ??? This is broke. We should encode the vector width in the tree. */
4272 return GET_MODE_NUNITS (TYPE_MODE (type));
4273
4274 case INTEGER_TYPE:
4275 case REAL_TYPE:
4276 case ENUMERAL_TYPE:
4277 case BOOLEAN_TYPE:
4278 case CHAR_TYPE:
4279 case POINTER_TYPE:
4280 case OFFSET_TYPE:
4281 case REFERENCE_TYPE:
4282 return 1;
4283
4284 case VOID_TYPE:
4285 case METHOD_TYPE:
4286 case FILE_TYPE:
4287 case SET_TYPE:
4288 case FUNCTION_TYPE:
4289 case LANG_TYPE:
4290 default:
4291 abort ();
4292 }
4293 }
4294
4295 /* Return 1 if EXP contains mostly (3/4) zeros. */
4296
4297 int
4298 mostly_zeros_p (tree exp)
4299 {
4300 if (TREE_CODE (exp) == CONSTRUCTOR)
4301
4302 {
4303 HOST_WIDE_INT nz_elts, nc_elts, elts;
4304
4305 /* If there are no ranges of true bits, it is all zero. */
4306 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4307 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4308
4309 categorize_ctor_elements (exp, &nz_elts, &nc_elts);
4310 elts = count_type_elements (TREE_TYPE (exp));
4311
4312 return nz_elts < elts / 4;
4313 }
4314
4315 return initializer_zerop (exp);
4316 }
4317 \f
4318 /* Helper function for store_constructor.
4319 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4320 TYPE is the type of the CONSTRUCTOR, not the element type.
4321 CLEARED is as for store_constructor.
4322 ALIAS_SET is the alias set to use for any stores.
4323
4324 This provides a recursive shortcut back to store_constructor when it isn't
4325 necessary to go through store_field. This is so that we can pass through
4326 the cleared field to let store_constructor know that we may not have to
4327 clear a substructure if the outer structure has already been cleared. */
4328
4329 static void
4330 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4331 HOST_WIDE_INT bitpos, enum machine_mode mode,
4332 tree exp, tree type, int cleared, int alias_set)
4333 {
4334 if (TREE_CODE (exp) == CONSTRUCTOR
4335 /* We can only call store_constructor recursively if the size and
4336 bit position are on a byte boundary. */
4337 && bitpos % BITS_PER_UNIT == 0
4338 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4339 /* If we have a nonzero bitpos for a register target, then we just
4340 let store_field do the bitfield handling. This is unlikely to
4341 generate unnecessary clear instructions anyways. */
4342 && (bitpos == 0 || MEM_P (target)))
4343 {
4344 if (MEM_P (target))
4345 target
4346 = adjust_address (target,
4347 GET_MODE (target) == BLKmode
4348 || 0 != (bitpos
4349 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4350 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4351
4352
4353 /* Update the alias set, if required. */
4354 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4355 && MEM_ALIAS_SET (target) != 0)
4356 {
4357 target = copy_rtx (target);
4358 set_mem_alias_set (target, alias_set);
4359 }
4360
4361 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4362 }
4363 else
4364 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4365 alias_set);
4366 }
4367
4368 /* Store the value of constructor EXP into the rtx TARGET.
4369 TARGET is either a REG or a MEM; we know it cannot conflict, since
4370 safe_from_p has been called.
4371 CLEARED is true if TARGET is known to have been zero'd.
4372 SIZE is the number of bytes of TARGET we are allowed to modify: this
4373 may not be the same as the size of EXP if we are assigning to a field
4374 which has been packed to exclude padding bits. */
4375
4376 static void
4377 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4378 {
4379 tree type = TREE_TYPE (exp);
4380 #ifdef WORD_REGISTER_OPERATIONS
4381 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4382 #endif
4383
4384 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4385 || TREE_CODE (type) == QUAL_UNION_TYPE)
4386 {
4387 tree elt;
4388
4389 /* If size is zero or the target is already cleared, do nothing. */
4390 if (size == 0 || cleared)
4391 cleared = 1;
4392 /* We either clear the aggregate or indicate the value is dead. */
4393 else if ((TREE_CODE (type) == UNION_TYPE
4394 || TREE_CODE (type) == QUAL_UNION_TYPE)
4395 && ! CONSTRUCTOR_ELTS (exp))
4396 /* If the constructor is empty, clear the union. */
4397 {
4398 clear_storage (target, expr_size (exp));
4399 cleared = 1;
4400 }
4401
4402 /* If we are building a static constructor into a register,
4403 set the initial value as zero so we can fold the value into
4404 a constant. But if more than one register is involved,
4405 this probably loses. */
4406 else if (REG_P (target) && TREE_STATIC (exp)
4407 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4408 {
4409 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4410 cleared = 1;
4411 }
4412
4413 /* If the constructor has fewer fields than the structure
4414 or if we are initializing the structure to mostly zeros,
4415 clear the whole structure first. Don't do this if TARGET is a
4416 register whose mode size isn't equal to SIZE since clear_storage
4417 can't handle this case. */
4418 else if (size > 0
4419 && ((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4420 || mostly_zeros_p (exp))
4421 && (!REG_P (target)
4422 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4423 == size)))
4424 {
4425 rtx xtarget = target;
4426
4427 if (readonly_fields_p (type))
4428 {
4429 xtarget = copy_rtx (xtarget);
4430 RTX_UNCHANGING_P (xtarget) = 1;
4431 }
4432
4433 clear_storage (xtarget, GEN_INT (size));
4434 cleared = 1;
4435 }
4436
4437 if (! cleared)
4438 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4439
4440 /* Store each element of the constructor into
4441 the corresponding field of TARGET. */
4442
4443 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4444 {
4445 tree field = TREE_PURPOSE (elt);
4446 tree value = TREE_VALUE (elt);
4447 enum machine_mode mode;
4448 HOST_WIDE_INT bitsize;
4449 HOST_WIDE_INT bitpos = 0;
4450 tree offset;
4451 rtx to_rtx = target;
4452
4453 /* Just ignore missing fields.
4454 We cleared the whole structure, above,
4455 if any fields are missing. */
4456 if (field == 0)
4457 continue;
4458
4459 if (cleared && initializer_zerop (value))
4460 continue;
4461
4462 if (host_integerp (DECL_SIZE (field), 1))
4463 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4464 else
4465 bitsize = -1;
4466
4467 mode = DECL_MODE (field);
4468 if (DECL_BIT_FIELD (field))
4469 mode = VOIDmode;
4470
4471 offset = DECL_FIELD_OFFSET (field);
4472 if (host_integerp (offset, 0)
4473 && host_integerp (bit_position (field), 0))
4474 {
4475 bitpos = int_bit_position (field);
4476 offset = 0;
4477 }
4478 else
4479 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4480
4481 if (offset)
4482 {
4483 rtx offset_rtx;
4484
4485 offset
4486 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4487 make_tree (TREE_TYPE (exp),
4488 target));
4489
4490 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4491 if (!MEM_P (to_rtx))
4492 abort ();
4493
4494 #ifdef POINTERS_EXTEND_UNSIGNED
4495 if (GET_MODE (offset_rtx) != Pmode)
4496 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4497 #else
4498 if (GET_MODE (offset_rtx) != ptr_mode)
4499 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4500 #endif
4501
4502 to_rtx = offset_address (to_rtx, offset_rtx,
4503 highest_pow2_factor (offset));
4504 }
4505
4506 if (TREE_READONLY (field))
4507 {
4508 if (MEM_P (to_rtx))
4509 to_rtx = copy_rtx (to_rtx);
4510
4511 RTX_UNCHANGING_P (to_rtx) = 1;
4512 }
4513
4514 #ifdef WORD_REGISTER_OPERATIONS
4515 /* If this initializes a field that is smaller than a word, at the
4516 start of a word, try to widen it to a full word.
4517 This special case allows us to output C++ member function
4518 initializations in a form that the optimizers can understand. */
4519 if (REG_P (target)
4520 && bitsize < BITS_PER_WORD
4521 && bitpos % BITS_PER_WORD == 0
4522 && GET_MODE_CLASS (mode) == MODE_INT
4523 && TREE_CODE (value) == INTEGER_CST
4524 && exp_size >= 0
4525 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4526 {
4527 tree type = TREE_TYPE (value);
4528
4529 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4530 {
4531 type = lang_hooks.types.type_for_size
4532 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4533 value = convert (type, value);
4534 }
4535
4536 if (BYTES_BIG_ENDIAN)
4537 value
4538 = fold (build (LSHIFT_EXPR, type, value,
4539 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4540 bitsize = BITS_PER_WORD;
4541 mode = word_mode;
4542 }
4543 #endif
4544
4545 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4546 && DECL_NONADDRESSABLE_P (field))
4547 {
4548 to_rtx = copy_rtx (to_rtx);
4549 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4550 }
4551
4552 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4553 value, type, cleared,
4554 get_alias_set (TREE_TYPE (field)));
4555 }
4556 }
4557 else if (TREE_CODE (type) == ARRAY_TYPE
4558 || TREE_CODE (type) == VECTOR_TYPE)
4559 {
4560 tree elt;
4561 int i;
4562 int need_to_clear;
4563 tree domain;
4564 tree elttype = TREE_TYPE (type);
4565 int const_bounds_p;
4566 HOST_WIDE_INT minelt = 0;
4567 HOST_WIDE_INT maxelt = 0;
4568 int icode = 0;
4569 rtx *vector = NULL;
4570 int elt_size = 0;
4571 unsigned n_elts = 0;
4572
4573 if (TREE_CODE (type) == ARRAY_TYPE)
4574 domain = TYPE_DOMAIN (type);
4575 else
4576 /* Vectors do not have domains; look up the domain of
4577 the array embedded in the debug representation type.
4578 FIXME Would probably be more efficient to treat vectors
4579 separately from arrays. */
4580 {
4581 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4582 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4583 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4584 {
4585 enum machine_mode mode = GET_MODE (target);
4586
4587 icode = (int) vec_init_optab->handlers[mode].insn_code;
4588 if (icode != CODE_FOR_nothing)
4589 {
4590 unsigned int i;
4591
4592 elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4593 n_elts = (GET_MODE_SIZE (mode) / elt_size);
4594 vector = alloca (n_elts);
4595 for (i = 0; i < n_elts; i++)
4596 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4597 }
4598 }
4599 }
4600
4601 const_bounds_p = (TYPE_MIN_VALUE (domain)
4602 && TYPE_MAX_VALUE (domain)
4603 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4604 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4605
4606 /* If we have constant bounds for the range of the type, get them. */
4607 if (const_bounds_p)
4608 {
4609 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4610 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4611 }
4612
4613 /* If the constructor has fewer elements than the array,
4614 clear the whole array first. Similarly if this is
4615 static constructor of a non-BLKmode object. */
4616 if (cleared || (REG_P (target) && TREE_STATIC (exp)))
4617 need_to_clear = 1;
4618 else
4619 {
4620 HOST_WIDE_INT count = 0, zero_count = 0;
4621 need_to_clear = ! const_bounds_p;
4622
4623 /* This loop is a more accurate version of the loop in
4624 mostly_zeros_p (it handles RANGE_EXPR in an index).
4625 It is also needed to check for missing elements. */
4626 for (elt = CONSTRUCTOR_ELTS (exp);
4627 elt != NULL_TREE && ! need_to_clear;
4628 elt = TREE_CHAIN (elt))
4629 {
4630 tree index = TREE_PURPOSE (elt);
4631 HOST_WIDE_INT this_node_count;
4632
4633 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4634 {
4635 tree lo_index = TREE_OPERAND (index, 0);
4636 tree hi_index = TREE_OPERAND (index, 1);
4637
4638 if (! host_integerp (lo_index, 1)
4639 || ! host_integerp (hi_index, 1))
4640 {
4641 need_to_clear = 1;
4642 break;
4643 }
4644
4645 this_node_count = (tree_low_cst (hi_index, 1)
4646 - tree_low_cst (lo_index, 1) + 1);
4647 }
4648 else
4649 this_node_count = 1;
4650
4651 count += this_node_count;
4652 if (mostly_zeros_p (TREE_VALUE (elt)))
4653 zero_count += this_node_count;
4654 }
4655
4656 /* Clear the entire array first if there are any missing elements,
4657 or if the incidence of zero elements is >= 75%. */
4658 if (! need_to_clear
4659 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4660 need_to_clear = 1;
4661 }
4662
4663 if (need_to_clear && size > 0 && !vector)
4664 {
4665 if (! cleared)
4666 {
4667 if (REG_P (target))
4668 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4669 else
4670 clear_storage (target, GEN_INT (size));
4671 }
4672 cleared = 1;
4673 }
4674 else if (REG_P (target))
4675 /* Inform later passes that the old value is dead. */
4676 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4677
4678 /* Store each element of the constructor into
4679 the corresponding element of TARGET, determined
4680 by counting the elements. */
4681 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4682 elt;
4683 elt = TREE_CHAIN (elt), i++)
4684 {
4685 enum machine_mode mode;
4686 HOST_WIDE_INT bitsize;
4687 HOST_WIDE_INT bitpos;
4688 int unsignedp;
4689 tree value = TREE_VALUE (elt);
4690 tree index = TREE_PURPOSE (elt);
4691 rtx xtarget = target;
4692
4693 if (cleared && initializer_zerop (value))
4694 continue;
4695
4696 unsignedp = TYPE_UNSIGNED (elttype);
4697 mode = TYPE_MODE (elttype);
4698 if (mode == BLKmode)
4699 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4700 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4701 : -1);
4702 else
4703 bitsize = GET_MODE_BITSIZE (mode);
4704
4705 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4706 {
4707 tree lo_index = TREE_OPERAND (index, 0);
4708 tree hi_index = TREE_OPERAND (index, 1);
4709 rtx index_r, pos_rtx;
4710 HOST_WIDE_INT lo, hi, count;
4711 tree position;
4712
4713 if (vector)
4714 abort ();
4715
4716 /* If the range is constant and "small", unroll the loop. */
4717 if (const_bounds_p
4718 && host_integerp (lo_index, 0)
4719 && host_integerp (hi_index, 0)
4720 && (lo = tree_low_cst (lo_index, 0),
4721 hi = tree_low_cst (hi_index, 0),
4722 count = hi - lo + 1,
4723 (!MEM_P (target)
4724 || count <= 2
4725 || (host_integerp (TYPE_SIZE (elttype), 1)
4726 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4727 <= 40 * 8)))))
4728 {
4729 lo -= minelt; hi -= minelt;
4730 for (; lo <= hi; lo++)
4731 {
4732 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4733
4734 if (MEM_P (target)
4735 && !MEM_KEEP_ALIAS_SET_P (target)
4736 && TREE_CODE (type) == ARRAY_TYPE
4737 && TYPE_NONALIASED_COMPONENT (type))
4738 {
4739 target = copy_rtx (target);
4740 MEM_KEEP_ALIAS_SET_P (target) = 1;
4741 }
4742
4743 store_constructor_field
4744 (target, bitsize, bitpos, mode, value, type, cleared,
4745 get_alias_set (elttype));
4746 }
4747 }
4748 else
4749 {
4750 rtx loop_start = gen_label_rtx ();
4751 rtx loop_end = gen_label_rtx ();
4752 tree exit_cond;
4753
4754 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4755 unsignedp = TYPE_UNSIGNED (domain);
4756
4757 index = build_decl (VAR_DECL, NULL_TREE, domain);
4758
4759 index_r
4760 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4761 &unsignedp, 0));
4762 SET_DECL_RTL (index, index_r);
4763 store_expr (lo_index, index_r, 0);
4764
4765 /* Build the head of the loop. */
4766 do_pending_stack_adjust ();
4767 emit_label (loop_start);
4768
4769 /* Assign value to element index. */
4770 position
4771 = convert (ssizetype,
4772 fold (build (MINUS_EXPR, TREE_TYPE (index),
4773 index, TYPE_MIN_VALUE (domain))));
4774 position = size_binop (MULT_EXPR, position,
4775 convert (ssizetype,
4776 TYPE_SIZE_UNIT (elttype)));
4777
4778 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4779 xtarget = offset_address (target, pos_rtx,
4780 highest_pow2_factor (position));
4781 xtarget = adjust_address (xtarget, mode, 0);
4782 if (TREE_CODE (value) == CONSTRUCTOR)
4783 store_constructor (value, xtarget, cleared,
4784 bitsize / BITS_PER_UNIT);
4785 else
4786 store_expr (value, xtarget, 0);
4787
4788 /* Generate a conditional jump to exit the loop. */
4789 exit_cond = build (LT_EXPR, integer_type_node,
4790 index, hi_index);
4791 jumpif (exit_cond, loop_end);
4792
4793 /* Update the loop counter, and jump to the head of
4794 the loop. */
4795 expand_assignment (index,
4796 build2 (PLUS_EXPR, TREE_TYPE (index),
4797 index, integer_one_node), 0);
4798
4799 emit_jump (loop_start);
4800
4801 /* Build the end of the loop. */
4802 emit_label (loop_end);
4803 }
4804 }
4805 else if ((index != 0 && ! host_integerp (index, 0))
4806 || ! host_integerp (TYPE_SIZE (elttype), 1))
4807 {
4808 tree position;
4809
4810 if (vector)
4811 abort ();
4812
4813 if (index == 0)
4814 index = ssize_int (1);
4815
4816 if (minelt)
4817 index = convert (ssizetype,
4818 fold (build (MINUS_EXPR, index,
4819 TYPE_MIN_VALUE (domain))));
4820
4821 position = size_binop (MULT_EXPR, index,
4822 convert (ssizetype,
4823 TYPE_SIZE_UNIT (elttype)));
4824 xtarget = offset_address (target,
4825 expand_expr (position, 0, VOIDmode, 0),
4826 highest_pow2_factor (position));
4827 xtarget = adjust_address (xtarget, mode, 0);
4828 store_expr (value, xtarget, 0);
4829 }
4830 else if (vector)
4831 {
4832 int pos;
4833
4834 if (index != 0)
4835 pos = tree_low_cst (index, 0) - minelt;
4836 else
4837 pos = i;
4838 vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4839 }
4840 else
4841 {
4842 if (index != 0)
4843 bitpos = ((tree_low_cst (index, 0) - minelt)
4844 * tree_low_cst (TYPE_SIZE (elttype), 1));
4845 else
4846 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4847
4848 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
4849 && TREE_CODE (type) == ARRAY_TYPE
4850 && TYPE_NONALIASED_COMPONENT (type))
4851 {
4852 target = copy_rtx (target);
4853 MEM_KEEP_ALIAS_SET_P (target) = 1;
4854 }
4855 store_constructor_field (target, bitsize, bitpos, mode, value,
4856 type, cleared, get_alias_set (elttype));
4857 }
4858 }
4859 if (vector)
4860 {
4861 emit_insn (GEN_FCN (icode) (target,
4862 gen_rtx_PARALLEL (GET_MODE (target),
4863 gen_rtvec_v (n_elts, vector))));
4864 }
4865 }
4866
4867 /* Set constructor assignments. */
4868 else if (TREE_CODE (type) == SET_TYPE)
4869 {
4870 tree elt = CONSTRUCTOR_ELTS (exp);
4871 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4872 tree domain = TYPE_DOMAIN (type);
4873 tree domain_min, domain_max, bitlength;
4874
4875 /* The default implementation strategy is to extract the constant
4876 parts of the constructor, use that to initialize the target,
4877 and then "or" in whatever non-constant ranges we need in addition.
4878
4879 If a large set is all zero or all ones, it is
4880 probably better to set it using memset.
4881 Also, if a large set has just a single range, it may also be
4882 better to first clear all the first clear the set (using
4883 memset), and set the bits we want. */
4884
4885 /* Check for all zeros. */
4886 if (elt == NULL_TREE && size > 0)
4887 {
4888 if (!cleared)
4889 clear_storage (target, GEN_INT (size));
4890 return;
4891 }
4892
4893 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4894 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4895 bitlength = size_binop (PLUS_EXPR,
4896 size_diffop (domain_max, domain_min),
4897 ssize_int (1));
4898
4899 nbits = tree_low_cst (bitlength, 1);
4900
4901 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4902 are "complicated" (more than one range), initialize (the
4903 constant parts) by copying from a constant. */
4904 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4905 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4906 {
4907 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4908 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4909 char *bit_buffer = alloca (nbits);
4910 HOST_WIDE_INT word = 0;
4911 unsigned int bit_pos = 0;
4912 unsigned int ibit = 0;
4913 unsigned int offset = 0; /* In bytes from beginning of set. */
4914
4915 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4916 for (;;)
4917 {
4918 if (bit_buffer[ibit])
4919 {
4920 if (BYTES_BIG_ENDIAN)
4921 word |= (1 << (set_word_size - 1 - bit_pos));
4922 else
4923 word |= 1 << bit_pos;
4924 }
4925
4926 bit_pos++; ibit++;
4927 if (bit_pos >= set_word_size || ibit == nbits)
4928 {
4929 if (word != 0 || ! cleared)
4930 {
4931 rtx datum = gen_int_mode (word, mode);
4932 rtx to_rtx;
4933
4934 /* The assumption here is that it is safe to use
4935 XEXP if the set is multi-word, but not if
4936 it's single-word. */
4937 if (MEM_P (target))
4938 to_rtx = adjust_address (target, mode, offset);
4939 else if (offset == 0)
4940 to_rtx = target;
4941 else
4942 abort ();
4943 emit_move_insn (to_rtx, datum);
4944 }
4945
4946 if (ibit == nbits)
4947 break;
4948 word = 0;
4949 bit_pos = 0;
4950 offset += set_word_size / BITS_PER_UNIT;
4951 }
4952 }
4953 }
4954 else if (!cleared)
4955 /* Don't bother clearing storage if the set is all ones. */
4956 if (TREE_CHAIN (elt) != NULL_TREE
4957 || (TREE_PURPOSE (elt) == NULL_TREE
4958 ? nbits != 1
4959 : ( ! host_integerp (TREE_VALUE (elt), 0)
4960 || ! host_integerp (TREE_PURPOSE (elt), 0)
4961 || (tree_low_cst (TREE_VALUE (elt), 0)
4962 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4963 != (HOST_WIDE_INT) nbits))))
4964 clear_storage (target, expr_size (exp));
4965
4966 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4967 {
4968 /* Start of range of element or NULL. */
4969 tree startbit = TREE_PURPOSE (elt);
4970 /* End of range of element, or element value. */
4971 tree endbit = TREE_VALUE (elt);
4972 HOST_WIDE_INT startb, endb;
4973 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4974
4975 bitlength_rtx = expand_expr (bitlength,
4976 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4977
4978 /* Handle non-range tuple element like [ expr ]. */
4979 if (startbit == NULL_TREE)
4980 {
4981 startbit = save_expr (endbit);
4982 endbit = startbit;
4983 }
4984
4985 startbit = convert (sizetype, startbit);
4986 endbit = convert (sizetype, endbit);
4987 if (! integer_zerop (domain_min))
4988 {
4989 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4990 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4991 }
4992 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4993 EXPAND_CONST_ADDRESS);
4994 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4995 EXPAND_CONST_ADDRESS);
4996
4997 if (REG_P (target))
4998 {
4999 targetx
5000 = assign_temp
5001 ((build_qualified_type (lang_hooks.types.type_for_mode
5002 (GET_MODE (target), 0),
5003 TYPE_QUAL_CONST)),
5004 0, 1, 1);
5005 emit_move_insn (targetx, target);
5006 }
5007
5008 else if (MEM_P (target))
5009 targetx = target;
5010 else
5011 abort ();
5012
5013 /* Optimization: If startbit and endbit are constants divisible
5014 by BITS_PER_UNIT, call memset instead. */
5015 if (TREE_CODE (startbit) == INTEGER_CST
5016 && TREE_CODE (endbit) == INTEGER_CST
5017 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5018 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5019 {
5020 emit_library_call (memset_libfunc, LCT_NORMAL,
5021 VOIDmode, 3,
5022 plus_constant (XEXP (targetx, 0),
5023 startb / BITS_PER_UNIT),
5024 Pmode,
5025 constm1_rtx, TYPE_MODE (integer_type_node),
5026 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5027 TYPE_MODE (sizetype));
5028 }
5029 else
5030 emit_library_call (setbits_libfunc, LCT_NORMAL,
5031 VOIDmode, 4, XEXP (targetx, 0),
5032 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5033 startbit_rtx, TYPE_MODE (sizetype),
5034 endbit_rtx, TYPE_MODE (sizetype));
5035
5036 if (REG_P (target))
5037 emit_move_insn (target, targetx);
5038 }
5039 }
5040
5041 else
5042 abort ();
5043 }
5044
5045 /* Store the value of EXP (an expression tree)
5046 into a subfield of TARGET which has mode MODE and occupies
5047 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5048 If MODE is VOIDmode, it means that we are storing into a bit-field.
5049
5050 If VALUE_MODE is VOIDmode, return nothing in particular.
5051 UNSIGNEDP is not used in this case.
5052
5053 Otherwise, return an rtx for the value stored. This rtx
5054 has mode VALUE_MODE if that is convenient to do.
5055 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5056
5057 TYPE is the type of the underlying object,
5058
5059 ALIAS_SET is the alias set for the destination. This value will
5060 (in general) be different from that for TARGET, since TARGET is a
5061 reference to the containing structure. */
5062
5063 static rtx
5064 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5065 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5066 int unsignedp, tree type, int alias_set)
5067 {
5068 HOST_WIDE_INT width_mask = 0;
5069
5070 if (TREE_CODE (exp) == ERROR_MARK)
5071 return const0_rtx;
5072
5073 /* If we have nothing to store, do nothing unless the expression has
5074 side-effects. */
5075 if (bitsize == 0)
5076 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5077 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5078 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5079
5080 /* If we are storing into an unaligned field of an aligned union that is
5081 in a register, we may have the mode of TARGET being an integer mode but
5082 MODE == BLKmode. In that case, get an aligned object whose size and
5083 alignment are the same as TARGET and store TARGET into it (we can avoid
5084 the store if the field being stored is the entire width of TARGET). Then
5085 call ourselves recursively to store the field into a BLKmode version of
5086 that object. Finally, load from the object into TARGET. This is not
5087 very efficient in general, but should only be slightly more expensive
5088 than the otherwise-required unaligned accesses. Perhaps this can be
5089 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5090 twice, once with emit_move_insn and once via store_field. */
5091
5092 if (mode == BLKmode
5093 && (REG_P (target) || GET_CODE (target) == SUBREG))
5094 {
5095 rtx object = assign_temp (type, 0, 1, 1);
5096 rtx blk_object = adjust_address (object, BLKmode, 0);
5097
5098 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5099 emit_move_insn (object, target);
5100
5101 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5102 alias_set);
5103
5104 emit_move_insn (target, object);
5105
5106 /* We want to return the BLKmode version of the data. */
5107 return blk_object;
5108 }
5109
5110 if (GET_CODE (target) == CONCAT)
5111 {
5112 /* We're storing into a struct containing a single __complex. */
5113
5114 if (bitpos != 0)
5115 abort ();
5116 return store_expr (exp, target, value_mode != VOIDmode);
5117 }
5118
5119 /* If the structure is in a register or if the component
5120 is a bit field, we cannot use addressing to access it.
5121 Use bit-field techniques or SUBREG to store in it. */
5122
5123 if (mode == VOIDmode
5124 || (mode != BLKmode && ! direct_store[(int) mode]
5125 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5126 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5127 || REG_P (target)
5128 || GET_CODE (target) == SUBREG
5129 /* If the field isn't aligned enough to store as an ordinary memref,
5130 store it as a bit field. */
5131 || (mode != BLKmode
5132 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5133 || bitpos % GET_MODE_ALIGNMENT (mode))
5134 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5135 || (bitpos % BITS_PER_UNIT != 0)))
5136 /* If the RHS and field are a constant size and the size of the
5137 RHS isn't the same size as the bitfield, we must use bitfield
5138 operations. */
5139 || (bitsize >= 0
5140 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5141 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5142 {
5143 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5144
5145 /* If BITSIZE is narrower than the size of the type of EXP
5146 we will be narrowing TEMP. Normally, what's wanted are the
5147 low-order bits. However, if EXP's type is a record and this is
5148 big-endian machine, we want the upper BITSIZE bits. */
5149 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5150 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5151 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5152 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5153 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5154 - bitsize),
5155 NULL_RTX, 1);
5156
5157 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5158 MODE. */
5159 if (mode != VOIDmode && mode != BLKmode
5160 && mode != TYPE_MODE (TREE_TYPE (exp)))
5161 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5162
5163 /* If the modes of TARGET and TEMP are both BLKmode, both
5164 must be in memory and BITPOS must be aligned on a byte
5165 boundary. If so, we simply do a block copy. */
5166 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5167 {
5168 if (!MEM_P (target) || !MEM_P (temp)
5169 || bitpos % BITS_PER_UNIT != 0)
5170 abort ();
5171
5172 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5173 emit_block_move (target, temp,
5174 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5175 / BITS_PER_UNIT),
5176 BLOCK_OP_NORMAL);
5177
5178 return value_mode == VOIDmode ? const0_rtx : target;
5179 }
5180
5181 /* Store the value in the bitfield. */
5182 store_bit_field (target, bitsize, bitpos, mode, temp);
5183
5184 if (value_mode != VOIDmode)
5185 {
5186 /* The caller wants an rtx for the value.
5187 If possible, avoid refetching from the bitfield itself. */
5188 if (width_mask != 0
5189 && ! (MEM_P (target) && MEM_VOLATILE_P (target)))
5190 {
5191 tree count;
5192 enum machine_mode tmode;
5193
5194 tmode = GET_MODE (temp);
5195 if (tmode == VOIDmode)
5196 tmode = value_mode;
5197
5198 if (unsignedp)
5199 return expand_and (tmode, temp,
5200 gen_int_mode (width_mask, tmode),
5201 NULL_RTX);
5202
5203 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5204 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5205 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5206 }
5207
5208 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5209 NULL_RTX, value_mode, VOIDmode);
5210 }
5211 return const0_rtx;
5212 }
5213 else
5214 {
5215 rtx addr = XEXP (target, 0);
5216 rtx to_rtx = target;
5217
5218 /* If a value is wanted, it must be the lhs;
5219 so make the address stable for multiple use. */
5220
5221 if (value_mode != VOIDmode && !REG_P (addr)
5222 && ! CONSTANT_ADDRESS_P (addr)
5223 /* A frame-pointer reference is already stable. */
5224 && ! (GET_CODE (addr) == PLUS
5225 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5226 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5227 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5228 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5229
5230 /* Now build a reference to just the desired component. */
5231
5232 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5233
5234 if (to_rtx == target)
5235 to_rtx = copy_rtx (to_rtx);
5236
5237 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5238 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5239 set_mem_alias_set (to_rtx, alias_set);
5240
5241 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5242 }
5243 }
5244 \f
5245 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5246 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5247 codes and find the ultimate containing object, which we return.
5248
5249 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5250 bit position, and *PUNSIGNEDP to the signedness of the field.
5251 If the position of the field is variable, we store a tree
5252 giving the variable offset (in units) in *POFFSET.
5253 This offset is in addition to the bit position.
5254 If the position is not variable, we store 0 in *POFFSET.
5255
5256 If any of the extraction expressions is volatile,
5257 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5258
5259 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5260 is a mode that can be used to access the field. In that case, *PBITSIZE
5261 is redundant.
5262
5263 If the field describes a variable-sized object, *PMODE is set to
5264 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5265 this case, but the address of the object can be found. */
5266
5267 tree
5268 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5269 HOST_WIDE_INT *pbitpos, tree *poffset,
5270 enum machine_mode *pmode, int *punsignedp,
5271 int *pvolatilep)
5272 {
5273 tree size_tree = 0;
5274 enum machine_mode mode = VOIDmode;
5275 tree offset = size_zero_node;
5276 tree bit_offset = bitsize_zero_node;
5277 tree tem;
5278
5279 /* First get the mode, signedness, and size. We do this from just the
5280 outermost expression. */
5281 if (TREE_CODE (exp) == COMPONENT_REF)
5282 {
5283 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5284 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5285 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5286
5287 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5288 }
5289 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5290 {
5291 size_tree = TREE_OPERAND (exp, 1);
5292 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5293 }
5294 else
5295 {
5296 mode = TYPE_MODE (TREE_TYPE (exp));
5297 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5298
5299 if (mode == BLKmode)
5300 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5301 else
5302 *pbitsize = GET_MODE_BITSIZE (mode);
5303 }
5304
5305 if (size_tree != 0)
5306 {
5307 if (! host_integerp (size_tree, 1))
5308 mode = BLKmode, *pbitsize = -1;
5309 else
5310 *pbitsize = tree_low_cst (size_tree, 1);
5311 }
5312
5313 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5314 and find the ultimate containing object. */
5315 while (1)
5316 {
5317 if (TREE_CODE (exp) == BIT_FIELD_REF)
5318 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5319 else if (TREE_CODE (exp) == COMPONENT_REF)
5320 {
5321 tree field = TREE_OPERAND (exp, 1);
5322 tree this_offset = component_ref_field_offset (exp);
5323
5324 /* If this field hasn't been filled in yet, don't go
5325 past it. This should only happen when folding expressions
5326 made during type construction. */
5327 if (this_offset == 0)
5328 break;
5329
5330 offset = size_binop (PLUS_EXPR, offset, this_offset);
5331 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5332 DECL_FIELD_BIT_OFFSET (field));
5333
5334 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5335 }
5336
5337 else if (TREE_CODE (exp) == ARRAY_REF
5338 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5339 {
5340 tree index = TREE_OPERAND (exp, 1);
5341 tree low_bound = array_ref_low_bound (exp);
5342 tree unit_size = array_ref_element_size (exp);
5343
5344 /* We assume all arrays have sizes that are a multiple of a byte.
5345 First subtract the lower bound, if any, in the type of the
5346 index, then convert to sizetype and multiply by the size of the
5347 array element. */
5348 if (! integer_zerop (low_bound))
5349 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5350 index, low_bound));
5351
5352 offset = size_binop (PLUS_EXPR, offset,
5353 size_binop (MULT_EXPR,
5354 convert (sizetype, index),
5355 unit_size));
5356 }
5357
5358 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5359 conversions that don't change the mode, and all view conversions
5360 except those that need to "step up" the alignment. */
5361 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5362 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5363 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5364 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5365 && STRICT_ALIGNMENT
5366 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5367 < BIGGEST_ALIGNMENT)
5368 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5369 || TYPE_ALIGN_OK (TREE_TYPE
5370 (TREE_OPERAND (exp, 0))))))
5371 && ! ((TREE_CODE (exp) == NOP_EXPR
5372 || TREE_CODE (exp) == CONVERT_EXPR)
5373 && (TYPE_MODE (TREE_TYPE (exp))
5374 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5375 break;
5376
5377 /* If any reference in the chain is volatile, the effect is volatile. */
5378 if (TREE_THIS_VOLATILE (exp))
5379 *pvolatilep = 1;
5380
5381 exp = TREE_OPERAND (exp, 0);
5382 }
5383
5384 /* If OFFSET is constant, see if we can return the whole thing as a
5385 constant bit position. Otherwise, split it up. */
5386 if (host_integerp (offset, 0)
5387 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5388 bitsize_unit_node))
5389 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5390 && host_integerp (tem, 0))
5391 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5392 else
5393 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5394
5395 *pmode = mode;
5396 return exp;
5397 }
5398
5399 /* Return a tree of sizetype representing the size, in bytes, of the element
5400 of EXP, an ARRAY_REF. */
5401
5402 tree
5403 array_ref_element_size (tree exp)
5404 {
5405 tree aligned_size = TREE_OPERAND (exp, 3);
5406 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5407
5408 /* If a size was specified in the ARRAY_REF, it's the size measured
5409 in alignment units of the element type. So multiply by that value. */
5410 if (aligned_size)
5411 return size_binop (MULT_EXPR, aligned_size,
5412 size_int (TYPE_ALIGN (elmt_type) / BITS_PER_UNIT));
5413
5414 /* Otherwise, take the size from that of the element type. Substitute
5415 any PLACEHOLDER_EXPR that we have. */
5416 else
5417 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5418 }
5419
5420 /* Return a tree representing the lower bound of the array mentioned in
5421 EXP, an ARRAY_REF. */
5422
5423 tree
5424 array_ref_low_bound (tree exp)
5425 {
5426 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5427
5428 /* If a lower bound is specified in EXP, use it. */
5429 if (TREE_OPERAND (exp, 2))
5430 return TREE_OPERAND (exp, 2);
5431
5432 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5433 substituting for a PLACEHOLDER_EXPR as needed. */
5434 if (domain_type && TYPE_MIN_VALUE (domain_type))
5435 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5436
5437 /* Otherwise, return a zero of the appropriate type. */
5438 return fold_convert (TREE_TYPE (TREE_OPERAND (exp, 1)), integer_zero_node);
5439 }
5440
5441 /* Return a tree representing the upper bound of the array mentioned in
5442 EXP, an ARRAY_REF. */
5443
5444 tree
5445 array_ref_up_bound (tree exp)
5446 {
5447 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5448
5449 /* If there is a domain type and it has an upper bound, use it, substituting
5450 for a PLACEHOLDER_EXPR as needed. */
5451 if (domain_type && TYPE_MAX_VALUE (domain_type))
5452 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5453
5454 /* Otherwise fail. */
5455 return NULL_TREE;
5456 }
5457
5458 /* Return a tree representing the offset, in bytes, of the field referenced
5459 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5460
5461 tree
5462 component_ref_field_offset (tree exp)
5463 {
5464 tree aligned_offset = TREE_OPERAND (exp, 2);
5465 tree field = TREE_OPERAND (exp, 1);
5466
5467 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5468 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5469 value. */
5470 if (aligned_offset)
5471 return size_binop (MULT_EXPR, aligned_offset,
5472 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5473
5474 /* Otherwise, take the offset from that of the field. Substitute
5475 any PLACEHOLDER_EXPR that we have. */
5476 else
5477 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5478 }
5479
5480 /* Return 1 if T is an expression that get_inner_reference handles. */
5481
5482 int
5483 handled_component_p (tree t)
5484 {
5485 switch (TREE_CODE (t))
5486 {
5487 case BIT_FIELD_REF:
5488 case COMPONENT_REF:
5489 case ARRAY_REF:
5490 case ARRAY_RANGE_REF:
5491 case NON_LVALUE_EXPR:
5492 case VIEW_CONVERT_EXPR:
5493 return 1;
5494
5495 /* ??? Sure they are handled, but get_inner_reference may return
5496 a different PBITSIZE, depending upon whether the expression is
5497 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5498 case NOP_EXPR:
5499 case CONVERT_EXPR:
5500 return (TYPE_MODE (TREE_TYPE (t))
5501 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5502
5503 default:
5504 return 0;
5505 }
5506 }
5507 \f
5508 /* Given an rtx VALUE that may contain additions and multiplications, return
5509 an equivalent value that just refers to a register, memory, or constant.
5510 This is done by generating instructions to perform the arithmetic and
5511 returning a pseudo-register containing the value.
5512
5513 The returned value may be a REG, SUBREG, MEM or constant. */
5514
5515 rtx
5516 force_operand (rtx value, rtx target)
5517 {
5518 rtx op1, op2;
5519 /* Use subtarget as the target for operand 0 of a binary operation. */
5520 rtx subtarget = get_subtarget (target);
5521 enum rtx_code code = GET_CODE (value);
5522
5523 /* Check for subreg applied to an expression produced by loop optimizer. */
5524 if (code == SUBREG
5525 && !REG_P (SUBREG_REG (value))
5526 && !MEM_P (SUBREG_REG (value)))
5527 {
5528 value = simplify_gen_subreg (GET_MODE (value),
5529 force_reg (GET_MODE (SUBREG_REG (value)),
5530 force_operand (SUBREG_REG (value),
5531 NULL_RTX)),
5532 GET_MODE (SUBREG_REG (value)),
5533 SUBREG_BYTE (value));
5534 code = GET_CODE (value);
5535 }
5536
5537 /* Check for a PIC address load. */
5538 if ((code == PLUS || code == MINUS)
5539 && XEXP (value, 0) == pic_offset_table_rtx
5540 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5541 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5542 || GET_CODE (XEXP (value, 1)) == CONST))
5543 {
5544 if (!subtarget)
5545 subtarget = gen_reg_rtx (GET_MODE (value));
5546 emit_move_insn (subtarget, value);
5547 return subtarget;
5548 }
5549
5550 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5551 {
5552 if (!target)
5553 target = gen_reg_rtx (GET_MODE (value));
5554 convert_move (target, force_operand (XEXP (value, 0), NULL),
5555 code == ZERO_EXTEND);
5556 return target;
5557 }
5558
5559 if (ARITHMETIC_P (value))
5560 {
5561 op2 = XEXP (value, 1);
5562 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5563 subtarget = 0;
5564 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5565 {
5566 code = PLUS;
5567 op2 = negate_rtx (GET_MODE (value), op2);
5568 }
5569
5570 /* Check for an addition with OP2 a constant integer and our first
5571 operand a PLUS of a virtual register and something else. In that
5572 case, we want to emit the sum of the virtual register and the
5573 constant first and then add the other value. This allows virtual
5574 register instantiation to simply modify the constant rather than
5575 creating another one around this addition. */
5576 if (code == PLUS && GET_CODE (op2) == CONST_INT
5577 && GET_CODE (XEXP (value, 0)) == PLUS
5578 && REG_P (XEXP (XEXP (value, 0), 0))
5579 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5580 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5581 {
5582 rtx temp = expand_simple_binop (GET_MODE (value), code,
5583 XEXP (XEXP (value, 0), 0), op2,
5584 subtarget, 0, OPTAB_LIB_WIDEN);
5585 return expand_simple_binop (GET_MODE (value), code, temp,
5586 force_operand (XEXP (XEXP (value,
5587 0), 1), 0),
5588 target, 0, OPTAB_LIB_WIDEN);
5589 }
5590
5591 op1 = force_operand (XEXP (value, 0), subtarget);
5592 op2 = force_operand (op2, NULL_RTX);
5593 switch (code)
5594 {
5595 case MULT:
5596 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5597 case DIV:
5598 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5599 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5600 target, 1, OPTAB_LIB_WIDEN);
5601 else
5602 return expand_divmod (0,
5603 FLOAT_MODE_P (GET_MODE (value))
5604 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5605 GET_MODE (value), op1, op2, target, 0);
5606 break;
5607 case MOD:
5608 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5609 target, 0);
5610 break;
5611 case UDIV:
5612 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5613 target, 1);
5614 break;
5615 case UMOD:
5616 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5617 target, 1);
5618 break;
5619 case ASHIFTRT:
5620 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5621 target, 0, OPTAB_LIB_WIDEN);
5622 break;
5623 default:
5624 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5625 target, 1, OPTAB_LIB_WIDEN);
5626 }
5627 }
5628 if (UNARY_P (value))
5629 {
5630 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5631 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5632 }
5633
5634 #ifdef INSN_SCHEDULING
5635 /* On machines that have insn scheduling, we want all memory reference to be
5636 explicit, so we need to deal with such paradoxical SUBREGs. */
5637 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5638 && (GET_MODE_SIZE (GET_MODE (value))
5639 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5640 value
5641 = simplify_gen_subreg (GET_MODE (value),
5642 force_reg (GET_MODE (SUBREG_REG (value)),
5643 force_operand (SUBREG_REG (value),
5644 NULL_RTX)),
5645 GET_MODE (SUBREG_REG (value)),
5646 SUBREG_BYTE (value));
5647 #endif
5648
5649 return value;
5650 }
5651 \f
5652 /* Subroutine of expand_expr: return nonzero iff there is no way that
5653 EXP can reference X, which is being modified. TOP_P is nonzero if this
5654 call is going to be used to determine whether we need a temporary
5655 for EXP, as opposed to a recursive call to this function.
5656
5657 It is always safe for this routine to return zero since it merely
5658 searches for optimization opportunities. */
5659
5660 int
5661 safe_from_p (rtx x, tree exp, int top_p)
5662 {
5663 rtx exp_rtl = 0;
5664 int i, nops;
5665
5666 if (x == 0
5667 /* If EXP has varying size, we MUST use a target since we currently
5668 have no way of allocating temporaries of variable size
5669 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5670 So we assume here that something at a higher level has prevented a
5671 clash. This is somewhat bogus, but the best we can do. Only
5672 do this when X is BLKmode and when we are at the top level. */
5673 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5674 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5675 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5676 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5677 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5678 != INTEGER_CST)
5679 && GET_MODE (x) == BLKmode)
5680 /* If X is in the outgoing argument area, it is always safe. */
5681 || (MEM_P (x)
5682 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5683 || (GET_CODE (XEXP (x, 0)) == PLUS
5684 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5685 return 1;
5686
5687 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5688 find the underlying pseudo. */
5689 if (GET_CODE (x) == SUBREG)
5690 {
5691 x = SUBREG_REG (x);
5692 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5693 return 0;
5694 }
5695
5696 /* Now look at our tree code and possibly recurse. */
5697 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5698 {
5699 case 'd':
5700 exp_rtl = DECL_RTL_IF_SET (exp);
5701 break;
5702
5703 case 'c':
5704 return 1;
5705
5706 case 'x':
5707 if (TREE_CODE (exp) == TREE_LIST)
5708 {
5709 while (1)
5710 {
5711 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5712 return 0;
5713 exp = TREE_CHAIN (exp);
5714 if (!exp)
5715 return 1;
5716 if (TREE_CODE (exp) != TREE_LIST)
5717 return safe_from_p (x, exp, 0);
5718 }
5719 }
5720 else if (TREE_CODE (exp) == ERROR_MARK)
5721 return 1; /* An already-visited SAVE_EXPR? */
5722 else
5723 return 0;
5724
5725 case 's':
5726 /* The only case we look at here is the DECL_INITIAL inside a
5727 DECL_EXPR. */
5728 return (TREE_CODE (exp) != DECL_EXPR
5729 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5730 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5731 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5732
5733 case '2':
5734 case '<':
5735 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5736 return 0;
5737 /* Fall through. */
5738
5739 case '1':
5740 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5741
5742 case 'e':
5743 case 'r':
5744 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5745 the expression. If it is set, we conflict iff we are that rtx or
5746 both are in memory. Otherwise, we check all operands of the
5747 expression recursively. */
5748
5749 switch (TREE_CODE (exp))
5750 {
5751 case ADDR_EXPR:
5752 /* If the operand is static or we are static, we can't conflict.
5753 Likewise if we don't conflict with the operand at all. */
5754 if (staticp (TREE_OPERAND (exp, 0))
5755 || TREE_STATIC (exp)
5756 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5757 return 1;
5758
5759 /* Otherwise, the only way this can conflict is if we are taking
5760 the address of a DECL a that address if part of X, which is
5761 very rare. */
5762 exp = TREE_OPERAND (exp, 0);
5763 if (DECL_P (exp))
5764 {
5765 if (!DECL_RTL_SET_P (exp)
5766 || !MEM_P (DECL_RTL (exp)))
5767 return 0;
5768 else
5769 exp_rtl = XEXP (DECL_RTL (exp), 0);
5770 }
5771 break;
5772
5773 case INDIRECT_REF:
5774 if (MEM_P (x)
5775 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5776 get_alias_set (exp)))
5777 return 0;
5778 break;
5779
5780 case CALL_EXPR:
5781 /* Assume that the call will clobber all hard registers and
5782 all of memory. */
5783 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5784 || MEM_P (x))
5785 return 0;
5786 break;
5787
5788 case WITH_CLEANUP_EXPR:
5789 case CLEANUP_POINT_EXPR:
5790 /* Lowered by gimplify.c. */
5791 abort ();
5792
5793 case SAVE_EXPR:
5794 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5795
5796 case BIND_EXPR:
5797 /* The only operand we look at is operand 1. The rest aren't
5798 part of the expression. */
5799 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5800
5801 default:
5802 break;
5803 }
5804
5805 /* If we have an rtx, we do not need to scan our operands. */
5806 if (exp_rtl)
5807 break;
5808
5809 nops = first_rtl_op (TREE_CODE (exp));
5810 for (i = 0; i < nops; i++)
5811 if (TREE_OPERAND (exp, i) != 0
5812 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5813 return 0;
5814
5815 /* If this is a language-specific tree code, it may require
5816 special handling. */
5817 if ((unsigned int) TREE_CODE (exp)
5818 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5819 && !lang_hooks.safe_from_p (x, exp))
5820 return 0;
5821 }
5822
5823 /* If we have an rtl, find any enclosed object. Then see if we conflict
5824 with it. */
5825 if (exp_rtl)
5826 {
5827 if (GET_CODE (exp_rtl) == SUBREG)
5828 {
5829 exp_rtl = SUBREG_REG (exp_rtl);
5830 if (REG_P (exp_rtl)
5831 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5832 return 0;
5833 }
5834
5835 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5836 are memory and they conflict. */
5837 return ! (rtx_equal_p (x, exp_rtl)
5838 || (MEM_P (x) && MEM_P (exp_rtl)
5839 && true_dependence (exp_rtl, VOIDmode, x,
5840 rtx_addr_varies_p)));
5841 }
5842
5843 /* If we reach here, it is safe. */
5844 return 1;
5845 }
5846
5847 /* Subroutine of expand_expr: return rtx if EXP is a
5848 variable or parameter; else return 0. */
5849
5850 static rtx
5851 var_rtx (tree exp)
5852 {
5853 STRIP_NOPS (exp);
5854 switch (TREE_CODE (exp))
5855 {
5856 case PARM_DECL:
5857 case VAR_DECL:
5858 return DECL_RTL (exp);
5859 default:
5860 return 0;
5861 }
5862 }
5863 \f
5864 /* Return the highest power of two that EXP is known to be a multiple of.
5865 This is used in updating alignment of MEMs in array references. */
5866
5867 static unsigned HOST_WIDE_INT
5868 highest_pow2_factor (tree exp)
5869 {
5870 unsigned HOST_WIDE_INT c0, c1;
5871
5872 switch (TREE_CODE (exp))
5873 {
5874 case INTEGER_CST:
5875 /* We can find the lowest bit that's a one. If the low
5876 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5877 We need to handle this case since we can find it in a COND_EXPR,
5878 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5879 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5880 later ICE. */
5881 if (TREE_CONSTANT_OVERFLOW (exp))
5882 return BIGGEST_ALIGNMENT;
5883 else
5884 {
5885 /* Note: tree_low_cst is intentionally not used here,
5886 we don't care about the upper bits. */
5887 c0 = TREE_INT_CST_LOW (exp);
5888 c0 &= -c0;
5889 return c0 ? c0 : BIGGEST_ALIGNMENT;
5890 }
5891 break;
5892
5893 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5894 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5895 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5896 return MIN (c0, c1);
5897
5898 case MULT_EXPR:
5899 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5900 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5901 return c0 * c1;
5902
5903 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5904 case CEIL_DIV_EXPR:
5905 if (integer_pow2p (TREE_OPERAND (exp, 1))
5906 && host_integerp (TREE_OPERAND (exp, 1), 1))
5907 {
5908 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5909 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5910 return MAX (1, c0 / c1);
5911 }
5912 break;
5913
5914 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5915 case SAVE_EXPR:
5916 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5917
5918 case COMPOUND_EXPR:
5919 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5920
5921 case COND_EXPR:
5922 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5923 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5924 return MIN (c0, c1);
5925
5926 default:
5927 break;
5928 }
5929
5930 return 1;
5931 }
5932
5933 /* Similar, except that the alignment requirements of TARGET are
5934 taken into account. Assume it is at least as aligned as its
5935 type, unless it is a COMPONENT_REF in which case the layout of
5936 the structure gives the alignment. */
5937
5938 static unsigned HOST_WIDE_INT
5939 highest_pow2_factor_for_target (tree target, tree exp)
5940 {
5941 unsigned HOST_WIDE_INT target_align, factor;
5942
5943 factor = highest_pow2_factor (exp);
5944 if (TREE_CODE (target) == COMPONENT_REF)
5945 target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT;
5946 else
5947 target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT;
5948 return MAX (factor, target_align);
5949 }
5950 \f
5951 /* Expands variable VAR. */
5952
5953 void
5954 expand_var (tree var)
5955 {
5956 if (DECL_EXTERNAL (var))
5957 return;
5958
5959 if (TREE_STATIC (var))
5960 /* If this is an inlined copy of a static local variable,
5961 look up the original decl. */
5962 var = DECL_ORIGIN (var);
5963
5964 if (TREE_STATIC (var)
5965 ? !TREE_ASM_WRITTEN (var)
5966 : !DECL_RTL_SET_P (var))
5967 {
5968 if (TREE_CODE (var) == VAR_DECL && DECL_DEFER_OUTPUT (var))
5969 {
5970 /* Prepare a mem & address for the decl. */
5971 rtx x;
5972
5973 if (TREE_STATIC (var))
5974 abort ();
5975
5976 x = gen_rtx_MEM (DECL_MODE (var),
5977 gen_reg_rtx (Pmode));
5978
5979 set_mem_attributes (x, var, 1);
5980 SET_DECL_RTL (var, x);
5981 }
5982 else if (lang_hooks.expand_decl (var))
5983 /* OK. */;
5984 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
5985 expand_decl (var);
5986 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
5987 rest_of_decl_compilation (var, NULL, 0, 0);
5988 else if (TREE_CODE (var) == TYPE_DECL
5989 || TREE_CODE (var) == CONST_DECL
5990 || TREE_CODE (var) == FUNCTION_DECL
5991 || TREE_CODE (var) == LABEL_DECL)
5992 /* No expansion needed. */;
5993 else
5994 abort ();
5995 }
5996 }
5997
5998 /* Expands declarations of variables in list VARS. */
5999
6000 static void
6001 expand_vars (tree vars)
6002 {
6003 for (; vars; vars = TREE_CHAIN (vars))
6004 {
6005 tree var = vars;
6006
6007 if (DECL_EXTERNAL (var))
6008 continue;
6009
6010 expand_var (var);
6011 expand_decl_init (var);
6012 }
6013 }
6014
6015 /* Subroutine of expand_expr. Expand the two operands of a binary
6016 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6017 The value may be stored in TARGET if TARGET is nonzero. The
6018 MODIFIER argument is as documented by expand_expr. */
6019
6020 static void
6021 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6022 enum expand_modifier modifier)
6023 {
6024 if (! safe_from_p (target, exp1, 1))
6025 target = 0;
6026 if (operand_equal_p (exp0, exp1, 0))
6027 {
6028 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6029 *op1 = copy_rtx (*op0);
6030 }
6031 else
6032 {
6033 /* If we need to preserve evaluation order, copy exp0 into its own
6034 temporary variable so that it can't be clobbered by exp1. */
6035 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6036 exp0 = save_expr (exp0);
6037 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6038 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6039 }
6040 }
6041
6042 \f
6043 /* expand_expr: generate code for computing expression EXP.
6044 An rtx for the computed value is returned. The value is never null.
6045 In the case of a void EXP, const0_rtx is returned.
6046
6047 The value may be stored in TARGET if TARGET is nonzero.
6048 TARGET is just a suggestion; callers must assume that
6049 the rtx returned may not be the same as TARGET.
6050
6051 If TARGET is CONST0_RTX, it means that the value will be ignored.
6052
6053 If TMODE is not VOIDmode, it suggests generating the
6054 result in mode TMODE. But this is done only when convenient.
6055 Otherwise, TMODE is ignored and the value generated in its natural mode.
6056 TMODE is just a suggestion; callers must assume that
6057 the rtx returned may not have mode TMODE.
6058
6059 Note that TARGET may have neither TMODE nor MODE. In that case, it
6060 probably will not be used.
6061
6062 If MODIFIER is EXPAND_SUM then when EXP is an addition
6063 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6064 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6065 products as above, or REG or MEM, or constant.
6066 Ordinarily in such cases we would output mul or add instructions
6067 and then return a pseudo reg containing the sum.
6068
6069 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6070 it also marks a label as absolutely required (it can't be dead).
6071 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6072 This is used for outputting expressions used in initializers.
6073
6074 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6075 with a constant address even if that address is not normally legitimate.
6076 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6077
6078 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6079 a call parameter. Such targets require special care as we haven't yet
6080 marked TARGET so that it's safe from being trashed by libcalls. We
6081 don't want to use TARGET for anything but the final result;
6082 Intermediate values must go elsewhere. Additionally, calls to
6083 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6084
6085 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6086 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6087 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6088 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6089 recursively. */
6090
6091 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6092 enum expand_modifier, rtx *);
6093
6094 rtx
6095 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6096 enum expand_modifier modifier, rtx *alt_rtl)
6097 {
6098 int rn = -1;
6099 rtx ret, last = NULL;
6100
6101 /* Handle ERROR_MARK before anybody tries to access its type. */
6102 if (TREE_CODE (exp) == ERROR_MARK
6103 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6104 {
6105 ret = CONST0_RTX (tmode);
6106 return ret ? ret : const0_rtx;
6107 }
6108
6109 if (flag_non_call_exceptions)
6110 {
6111 rn = lookup_stmt_eh_region (exp);
6112 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6113 if (rn >= 0)
6114 last = get_last_insn ();
6115 }
6116
6117 /* If this is an expression of some kind and it has an associated line
6118 number, then emit the line number before expanding the expression.
6119
6120 We need to save and restore the file and line information so that
6121 errors discovered during expansion are emitted with the right
6122 information. It would be better of the diagnostic routines
6123 used the file/line information embedded in the tree nodes rather
6124 than globals. */
6125 if (cfun && EXPR_HAS_LOCATION (exp))
6126 {
6127 location_t saved_location = input_location;
6128 input_location = EXPR_LOCATION (exp);
6129 emit_line_note (input_location);
6130
6131 /* Record where the insns produced belong. */
6132 record_block_change (TREE_BLOCK (exp));
6133
6134 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6135
6136 input_location = saved_location;
6137 }
6138 else
6139 {
6140 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6141 }
6142
6143 /* If using non-call exceptions, mark all insns that may trap.
6144 expand_call() will mark CALL_INSNs before we get to this code,
6145 but it doesn't handle libcalls, and these may trap. */
6146 if (rn >= 0)
6147 {
6148 rtx insn;
6149 for (insn = next_real_insn (last); insn;
6150 insn = next_real_insn (insn))
6151 {
6152 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6153 /* If we want exceptions for non-call insns, any
6154 may_trap_p instruction may throw. */
6155 && GET_CODE (PATTERN (insn)) != CLOBBER
6156 && GET_CODE (PATTERN (insn)) != USE
6157 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6158 {
6159 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6160 REG_NOTES (insn));
6161 }
6162 }
6163 }
6164
6165 return ret;
6166 }
6167
6168 static rtx
6169 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6170 enum expand_modifier modifier, rtx *alt_rtl)
6171 {
6172 rtx op0, op1, temp;
6173 tree type = TREE_TYPE (exp);
6174 int unsignedp;
6175 enum machine_mode mode;
6176 enum tree_code code = TREE_CODE (exp);
6177 optab this_optab;
6178 rtx subtarget, original_target;
6179 int ignore;
6180 tree context;
6181 bool reduce_bit_field = false;
6182 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6183 ? reduce_to_bit_field_precision ((expr), \
6184 target, \
6185 type) \
6186 : (expr))
6187
6188 mode = TYPE_MODE (type);
6189 unsignedp = TYPE_UNSIGNED (type);
6190 if (lang_hooks.reduce_bit_field_operations
6191 && TREE_CODE (type) == INTEGER_TYPE
6192 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6193 {
6194 /* An operation in what may be a bit-field type needs the
6195 result to be reduced to the precision of the bit-field type,
6196 which is narrower than that of the type's mode. */
6197 reduce_bit_field = true;
6198 if (modifier == EXPAND_STACK_PARM)
6199 target = 0;
6200 }
6201
6202 /* Use subtarget as the target for operand 0 of a binary operation. */
6203 subtarget = get_subtarget (target);
6204 original_target = target;
6205 ignore = (target == const0_rtx
6206 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6207 || code == CONVERT_EXPR || code == COND_EXPR
6208 || code == VIEW_CONVERT_EXPR)
6209 && TREE_CODE (type) == VOID_TYPE));
6210
6211 /* If we are going to ignore this result, we need only do something
6212 if there is a side-effect somewhere in the expression. If there
6213 is, short-circuit the most common cases here. Note that we must
6214 not call expand_expr with anything but const0_rtx in case this
6215 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6216
6217 if (ignore)
6218 {
6219 if (! TREE_SIDE_EFFECTS (exp))
6220 return const0_rtx;
6221
6222 /* Ensure we reference a volatile object even if value is ignored, but
6223 don't do this if all we are doing is taking its address. */
6224 if (TREE_THIS_VOLATILE (exp)
6225 && TREE_CODE (exp) != FUNCTION_DECL
6226 && mode != VOIDmode && mode != BLKmode
6227 && modifier != EXPAND_CONST_ADDRESS)
6228 {
6229 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6230 if (MEM_P (temp))
6231 temp = copy_to_reg (temp);
6232 return const0_rtx;
6233 }
6234
6235 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6236 || code == INDIRECT_REF)
6237 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6238 modifier);
6239
6240 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6241 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6242 {
6243 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6244 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6245 return const0_rtx;
6246 }
6247 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6248 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6249 /* If the second operand has no side effects, just evaluate
6250 the first. */
6251 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6252 modifier);
6253 else if (code == BIT_FIELD_REF)
6254 {
6255 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6256 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6257 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6258 return const0_rtx;
6259 }
6260
6261 target = 0;
6262 }
6263
6264 /* If will do cse, generate all results into pseudo registers
6265 since 1) that allows cse to find more things
6266 and 2) otherwise cse could produce an insn the machine
6267 cannot support. An exception is a CONSTRUCTOR into a multi-word
6268 MEM: that's much more likely to be most efficient into the MEM.
6269 Another is a CALL_EXPR which must return in memory. */
6270
6271 if (! cse_not_expected && mode != BLKmode && target
6272 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
6273 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6274 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6275 target = 0;
6276
6277 switch (code)
6278 {
6279 case LABEL_DECL:
6280 {
6281 tree function = decl_function_context (exp);
6282
6283 temp = label_rtx (exp);
6284 temp = gen_rtx_LABEL_REF (Pmode, temp);
6285
6286 if (function != current_function_decl
6287 && function != 0)
6288 LABEL_REF_NONLOCAL_P (temp) = 1;
6289
6290 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6291 return temp;
6292 }
6293
6294 case PARM_DECL:
6295 if (!DECL_RTL_SET_P (exp))
6296 {
6297 error ("%Jprior parameter's size depends on '%D'", exp, exp);
6298 return CONST0_RTX (mode);
6299 }
6300
6301 /* ... fall through ... */
6302
6303 case VAR_DECL:
6304 /* If a static var's type was incomplete when the decl was written,
6305 but the type is complete now, lay out the decl now. */
6306 if (DECL_SIZE (exp) == 0
6307 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6308 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6309 layout_decl (exp, 0);
6310
6311 /* ... fall through ... */
6312
6313 case FUNCTION_DECL:
6314 case RESULT_DECL:
6315 if (DECL_RTL (exp) == 0)
6316 abort ();
6317
6318 /* Ensure variable marked as used even if it doesn't go through
6319 a parser. If it hasn't be used yet, write out an external
6320 definition. */
6321 if (! TREE_USED (exp))
6322 {
6323 assemble_external (exp);
6324 TREE_USED (exp) = 1;
6325 }
6326
6327 /* Show we haven't gotten RTL for this yet. */
6328 temp = 0;
6329
6330 /* Handle variables inherited from containing functions. */
6331 context = decl_function_context (exp);
6332
6333 if (context != 0 && context != current_function_decl
6334 /* If var is static, we don't need a static chain to access it. */
6335 && ! (MEM_P (DECL_RTL (exp))
6336 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6337 {
6338 rtx addr;
6339
6340 /* Mark as non-local and addressable. */
6341 DECL_NONLOCAL (exp) = 1;
6342 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6343 abort ();
6344 lang_hooks.mark_addressable (exp);
6345 if (!MEM_P (DECL_RTL (exp)))
6346 abort ();
6347 addr = XEXP (DECL_RTL (exp), 0);
6348 if (MEM_P (addr))
6349 addr
6350 = replace_equiv_address (addr,
6351 fix_lexical_addr (XEXP (addr, 0), exp));
6352 else
6353 addr = fix_lexical_addr (addr, exp);
6354
6355 temp = replace_equiv_address (DECL_RTL (exp), addr);
6356 }
6357
6358 /* This is the case of an array whose size is to be determined
6359 from its initializer, while the initializer is still being parsed.
6360 See expand_decl. */
6361
6362 else if (MEM_P (DECL_RTL (exp))
6363 && REG_P (XEXP (DECL_RTL (exp), 0)))
6364 temp = validize_mem (DECL_RTL (exp));
6365
6366 /* If DECL_RTL is memory, we are in the normal case and either
6367 the address is not valid or it is not a register and -fforce-addr
6368 is specified, get the address into a register. */
6369
6370 else if (MEM_P (DECL_RTL (exp))
6371 && modifier != EXPAND_CONST_ADDRESS
6372 && modifier != EXPAND_SUM
6373 && modifier != EXPAND_INITIALIZER
6374 && (! memory_address_p (DECL_MODE (exp),
6375 XEXP (DECL_RTL (exp), 0))
6376 || (flag_force_addr
6377 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6378 {
6379 if (alt_rtl)
6380 *alt_rtl = DECL_RTL (exp);
6381 temp = replace_equiv_address (DECL_RTL (exp),
6382 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6383 }
6384
6385 /* If we got something, return it. But first, set the alignment
6386 if the address is a register. */
6387 if (temp != 0)
6388 {
6389 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6390 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6391
6392 return temp;
6393 }
6394
6395 /* If the mode of DECL_RTL does not match that of the decl, it
6396 must be a promoted value. We return a SUBREG of the wanted mode,
6397 but mark it so that we know that it was already extended. */
6398
6399 if (REG_P (DECL_RTL (exp))
6400 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6401 {
6402 /* Get the signedness used for this variable. Ensure we get the
6403 same mode we got when the variable was declared. */
6404 if (GET_MODE (DECL_RTL (exp))
6405 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6406 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6407 abort ();
6408
6409 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6410 SUBREG_PROMOTED_VAR_P (temp) = 1;
6411 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6412 return temp;
6413 }
6414
6415 return DECL_RTL (exp);
6416
6417 case INTEGER_CST:
6418 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6419 TREE_INT_CST_HIGH (exp), mode);
6420
6421 /* ??? If overflow is set, fold will have done an incomplete job,
6422 which can result in (plus xx (const_int 0)), which can get
6423 simplified by validate_replace_rtx during virtual register
6424 instantiation, which can result in unrecognizable insns.
6425 Avoid this by forcing all overflows into registers. */
6426 if (TREE_CONSTANT_OVERFLOW (exp)
6427 && modifier != EXPAND_INITIALIZER)
6428 temp = force_reg (mode, temp);
6429
6430 return temp;
6431
6432 case VECTOR_CST:
6433 return const_vector_from_tree (exp);
6434
6435 case CONST_DECL:
6436 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6437
6438 case REAL_CST:
6439 /* If optimized, generate immediate CONST_DOUBLE
6440 which will be turned into memory by reload if necessary.
6441
6442 We used to force a register so that loop.c could see it. But
6443 this does not allow gen_* patterns to perform optimizations with
6444 the constants. It also produces two insns in cases like "x = 1.0;".
6445 On most machines, floating-point constants are not permitted in
6446 many insns, so we'd end up copying it to a register in any case.
6447
6448 Now, we do the copying in expand_binop, if appropriate. */
6449 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6450 TYPE_MODE (TREE_TYPE (exp)));
6451
6452 case COMPLEX_CST:
6453 /* Handle evaluating a complex constant in a CONCAT target. */
6454 if (original_target && GET_CODE (original_target) == CONCAT)
6455 {
6456 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6457 rtx rtarg, itarg;
6458
6459 rtarg = XEXP (original_target, 0);
6460 itarg = XEXP (original_target, 1);
6461
6462 /* Move the real and imaginary parts separately. */
6463 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6464 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6465
6466 if (op0 != rtarg)
6467 emit_move_insn (rtarg, op0);
6468 if (op1 != itarg)
6469 emit_move_insn (itarg, op1);
6470
6471 return original_target;
6472 }
6473
6474 /* ... fall through ... */
6475
6476 case STRING_CST:
6477 temp = output_constant_def (exp, 1);
6478
6479 /* temp contains a constant address.
6480 On RISC machines where a constant address isn't valid,
6481 make some insns to get that address into a register. */
6482 if (modifier != EXPAND_CONST_ADDRESS
6483 && modifier != EXPAND_INITIALIZER
6484 && modifier != EXPAND_SUM
6485 && (! memory_address_p (mode, XEXP (temp, 0))
6486 || flag_force_addr))
6487 return replace_equiv_address (temp,
6488 copy_rtx (XEXP (temp, 0)));
6489 return temp;
6490
6491 case SAVE_EXPR:
6492 {
6493 tree val = TREE_OPERAND (exp, 0);
6494 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6495
6496 if (TREE_CODE (val) != VAR_DECL || !DECL_ARTIFICIAL (val))
6497 {
6498 /* We can indeed still hit this case, typically via builtin
6499 expanders calling save_expr immediately before expanding
6500 something. Assume this means that we only have to deal
6501 with non-BLKmode values. */
6502 if (GET_MODE (ret) == BLKmode)
6503 abort ();
6504
6505 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6506 DECL_ARTIFICIAL (val) = 1;
6507 TREE_OPERAND (exp, 0) = val;
6508
6509 if (!CONSTANT_P (ret))
6510 ret = copy_to_reg (ret);
6511 SET_DECL_RTL (val, ret);
6512 }
6513
6514 return ret;
6515 }
6516
6517 case UNSAVE_EXPR:
6518 {
6519 rtx temp;
6520 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6521 TREE_OPERAND (exp, 0)
6522 = lang_hooks.unsave_expr_now (TREE_OPERAND (exp, 0));
6523 return temp;
6524 }
6525
6526 case GOTO_EXPR:
6527 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6528 expand_goto (TREE_OPERAND (exp, 0));
6529 else
6530 expand_computed_goto (TREE_OPERAND (exp, 0));
6531 return const0_rtx;
6532
6533 /* These are lowered during gimplification, so we should never ever
6534 see them here. */
6535 case LOOP_EXPR:
6536 case EXIT_EXPR:
6537 abort ();
6538
6539 case LABELED_BLOCK_EXPR:
6540 if (LABELED_BLOCK_BODY (exp))
6541 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6542 /* Should perhaps use expand_label, but this is simpler and safer. */
6543 do_pending_stack_adjust ();
6544 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6545 return const0_rtx;
6546
6547 case EXIT_BLOCK_EXPR:
6548 if (EXIT_BLOCK_RETURN (exp))
6549 sorry ("returned value in block_exit_expr");
6550 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6551 return const0_rtx;
6552
6553 case BIND_EXPR:
6554 {
6555 tree block = BIND_EXPR_BLOCK (exp);
6556 int mark_ends;
6557
6558 /* If we're in functions-as-trees mode, this BIND_EXPR represents
6559 the block, so we need to emit NOTE_INSN_BLOCK_* notes. */
6560 mark_ends = (block != NULL_TREE);
6561 expand_start_bindings_and_block (mark_ends ? 0 : 2, block);
6562
6563 /* If VARS have not yet been expanded, expand them now. */
6564 expand_vars (BIND_EXPR_VARS (exp));
6565
6566 /* TARGET was clobbered early in this function. The correct
6567 indicator or whether or not we need the value of this
6568 expression is the IGNORE variable. */
6569 temp = expand_expr (BIND_EXPR_BODY (exp),
6570 ignore ? const0_rtx : target,
6571 tmode, modifier);
6572
6573 expand_end_bindings (BIND_EXPR_VARS (exp), mark_ends, 0);
6574
6575 return temp;
6576 }
6577
6578 case CONSTRUCTOR:
6579 /* If we don't need the result, just ensure we evaluate any
6580 subexpressions. */
6581 if (ignore)
6582 {
6583 tree elt;
6584
6585 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6586 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6587
6588 return const0_rtx;
6589 }
6590
6591 /* All elts simple constants => refer to a constant in memory. But
6592 if this is a non-BLKmode mode, let it store a field at a time
6593 since that should make a CONST_INT or CONST_DOUBLE when we
6594 fold. Likewise, if we have a target we can use, it is best to
6595 store directly into the target unless the type is large enough
6596 that memcpy will be used. If we are making an initializer and
6597 all operands are constant, put it in memory as well.
6598
6599 FIXME: Avoid trying to fill vector constructors piece-meal.
6600 Output them with output_constant_def below unless we're sure
6601 they're zeros. This should go away when vector initializers
6602 are treated like VECTOR_CST instead of arrays.
6603 */
6604 else if ((TREE_STATIC (exp)
6605 && ((mode == BLKmode
6606 && ! (target != 0 && safe_from_p (target, exp, 1)))
6607 || TREE_ADDRESSABLE (exp)
6608 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6609 && (! MOVE_BY_PIECES_P
6610 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6611 TYPE_ALIGN (type)))
6612 && ! mostly_zeros_p (exp))))
6613 || ((modifier == EXPAND_INITIALIZER
6614 || modifier == EXPAND_CONST_ADDRESS)
6615 && TREE_CONSTANT (exp)))
6616 {
6617 rtx constructor = output_constant_def (exp, 1);
6618
6619 if (modifier != EXPAND_CONST_ADDRESS
6620 && modifier != EXPAND_INITIALIZER
6621 && modifier != EXPAND_SUM)
6622 constructor = validize_mem (constructor);
6623
6624 return constructor;
6625 }
6626 else
6627 {
6628 /* Handle calls that pass values in multiple non-contiguous
6629 locations. The Irix 6 ABI has examples of this. */
6630 if (target == 0 || ! safe_from_p (target, exp, 1)
6631 || GET_CODE (target) == PARALLEL
6632 || modifier == EXPAND_STACK_PARM)
6633 target
6634 = assign_temp (build_qualified_type (type,
6635 (TYPE_QUALS (type)
6636 | (TREE_READONLY (exp)
6637 * TYPE_QUAL_CONST))),
6638 0, TREE_ADDRESSABLE (exp), 1);
6639
6640 store_constructor (exp, target, 0, int_expr_size (exp));
6641 return target;
6642 }
6643
6644 case INDIRECT_REF:
6645 {
6646 tree exp1 = TREE_OPERAND (exp, 0);
6647
6648 if (modifier != EXPAND_WRITE)
6649 {
6650 tree t;
6651
6652 t = fold_read_from_constant_string (exp);
6653 if (t)
6654 return expand_expr (t, target, tmode, modifier);
6655 }
6656
6657 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6658 op0 = memory_address (mode, op0);
6659 temp = gen_rtx_MEM (mode, op0);
6660 set_mem_attributes (temp, exp, 0);
6661
6662 /* If we are writing to this object and its type is a record with
6663 readonly fields, we must mark it as readonly so it will
6664 conflict with readonly references to those fields. */
6665 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6666 RTX_UNCHANGING_P (temp) = 1;
6667
6668 return temp;
6669 }
6670
6671 case ARRAY_REF:
6672
6673 #ifdef ENABLE_CHECKING
6674 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6675 abort ();
6676 #endif
6677
6678 {
6679 tree array = TREE_OPERAND (exp, 0);
6680 tree low_bound = array_ref_low_bound (exp);
6681 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6682 HOST_WIDE_INT i;
6683
6684 /* Optimize the special-case of a zero lower bound.
6685
6686 We convert the low_bound to sizetype to avoid some problems
6687 with constant folding. (E.g. suppose the lower bound is 1,
6688 and its mode is QI. Without the conversion, (ARRAY
6689 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6690 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6691
6692 if (! integer_zerop (low_bound))
6693 index = size_diffop (index, convert (sizetype, low_bound));
6694
6695 /* Fold an expression like: "foo"[2].
6696 This is not done in fold so it won't happen inside &.
6697 Don't fold if this is for wide characters since it's too
6698 difficult to do correctly and this is a very rare case. */
6699
6700 if (modifier != EXPAND_CONST_ADDRESS
6701 && modifier != EXPAND_INITIALIZER
6702 && modifier != EXPAND_MEMORY)
6703 {
6704 tree t = fold_read_from_constant_string (exp);
6705
6706 if (t)
6707 return expand_expr (t, target, tmode, modifier);
6708 }
6709
6710 /* If this is a constant index into a constant array,
6711 just get the value from the array. Handle both the cases when
6712 we have an explicit constructor and when our operand is a variable
6713 that was declared const. */
6714
6715 if (modifier != EXPAND_CONST_ADDRESS
6716 && modifier != EXPAND_INITIALIZER
6717 && modifier != EXPAND_MEMORY
6718 && TREE_CODE (array) == CONSTRUCTOR
6719 && ! TREE_SIDE_EFFECTS (array)
6720 && TREE_CODE (index) == INTEGER_CST
6721 && 0 > compare_tree_int (index,
6722 list_length (CONSTRUCTOR_ELTS
6723 (TREE_OPERAND (exp, 0)))))
6724 {
6725 tree elem;
6726
6727 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6728 i = TREE_INT_CST_LOW (index);
6729 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6730 ;
6731
6732 if (elem)
6733 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6734 modifier);
6735 }
6736
6737 else if (optimize >= 1
6738 && modifier != EXPAND_CONST_ADDRESS
6739 && modifier != EXPAND_INITIALIZER
6740 && modifier != EXPAND_MEMORY
6741 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6742 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6743 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6744 && targetm.binds_local_p (array))
6745 {
6746 if (TREE_CODE (index) == INTEGER_CST)
6747 {
6748 tree init = DECL_INITIAL (array);
6749
6750 if (TREE_CODE (init) == CONSTRUCTOR)
6751 {
6752 tree elem;
6753
6754 for (elem = CONSTRUCTOR_ELTS (init);
6755 (elem
6756 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6757 elem = TREE_CHAIN (elem))
6758 ;
6759
6760 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6761 return expand_expr (fold (TREE_VALUE (elem)), target,
6762 tmode, modifier);
6763 }
6764 else if (TREE_CODE (init) == STRING_CST
6765 && 0 > compare_tree_int (index,
6766 TREE_STRING_LENGTH (init)))
6767 {
6768 tree type = TREE_TYPE (TREE_TYPE (init));
6769 enum machine_mode mode = TYPE_MODE (type);
6770
6771 if (GET_MODE_CLASS (mode) == MODE_INT
6772 && GET_MODE_SIZE (mode) == 1)
6773 return gen_int_mode (TREE_STRING_POINTER (init)
6774 [TREE_INT_CST_LOW (index)], mode);
6775 }
6776 }
6777 }
6778 }
6779 goto normal_inner_ref;
6780
6781 case COMPONENT_REF:
6782 /* If the operand is a CONSTRUCTOR, we can just extract the
6783 appropriate field if it is present. */
6784 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6785 {
6786 tree elt;
6787
6788 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6789 elt = TREE_CHAIN (elt))
6790 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6791 /* We can normally use the value of the field in the
6792 CONSTRUCTOR. However, if this is a bitfield in
6793 an integral mode that we can fit in a HOST_WIDE_INT,
6794 we must mask only the number of bits in the bitfield,
6795 since this is done implicitly by the constructor. If
6796 the bitfield does not meet either of those conditions,
6797 we can't do this optimization. */
6798 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6799 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6800 == MODE_INT)
6801 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6802 <= HOST_BITS_PER_WIDE_INT))))
6803 {
6804 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6805 && modifier == EXPAND_STACK_PARM)
6806 target = 0;
6807 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6808 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6809 {
6810 HOST_WIDE_INT bitsize
6811 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6812 enum machine_mode imode
6813 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6814
6815 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6816 {
6817 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6818 op0 = expand_and (imode, op0, op1, target);
6819 }
6820 else
6821 {
6822 tree count
6823 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6824 0);
6825
6826 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6827 target, 0);
6828 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6829 target, 0);
6830 }
6831 }
6832
6833 return op0;
6834 }
6835 }
6836 goto normal_inner_ref;
6837
6838 case BIT_FIELD_REF:
6839 case ARRAY_RANGE_REF:
6840 normal_inner_ref:
6841 {
6842 enum machine_mode mode1;
6843 HOST_WIDE_INT bitsize, bitpos;
6844 tree offset;
6845 int volatilep = 0;
6846 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6847 &mode1, &unsignedp, &volatilep);
6848 rtx orig_op0;
6849
6850 /* If we got back the original object, something is wrong. Perhaps
6851 we are evaluating an expression too early. In any event, don't
6852 infinitely recurse. */
6853 if (tem == exp)
6854 abort ();
6855
6856 /* If TEM's type is a union of variable size, pass TARGET to the inner
6857 computation, since it will need a temporary and TARGET is known
6858 to have to do. This occurs in unchecked conversion in Ada. */
6859
6860 orig_op0 = op0
6861 = expand_expr (tem,
6862 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6863 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6864 != INTEGER_CST)
6865 && modifier != EXPAND_STACK_PARM
6866 ? target : NULL_RTX),
6867 VOIDmode,
6868 (modifier == EXPAND_INITIALIZER
6869 || modifier == EXPAND_CONST_ADDRESS
6870 || modifier == EXPAND_STACK_PARM)
6871 ? modifier : EXPAND_NORMAL);
6872
6873 /* If this is a constant, put it into a register if it is a
6874 legitimate constant and OFFSET is 0 and memory if it isn't. */
6875 if (CONSTANT_P (op0))
6876 {
6877 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6878 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6879 && offset == 0)
6880 op0 = force_reg (mode, op0);
6881 else
6882 op0 = validize_mem (force_const_mem (mode, op0));
6883 }
6884
6885 /* Otherwise, if this object not in memory and we either have an
6886 offset or a BLKmode result, put it there. This case can't occur in
6887 C, but can in Ada if we have unchecked conversion of an expression
6888 from a scalar type to an array or record type or for an
6889 ARRAY_RANGE_REF whose type is BLKmode. */
6890 else if (!MEM_P (op0)
6891 && (offset != 0
6892 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
6893 {
6894 tree nt = build_qualified_type (TREE_TYPE (tem),
6895 (TYPE_QUALS (TREE_TYPE (tem))
6896 | TYPE_QUAL_CONST));
6897 rtx memloc = assign_temp (nt, 1, 1, 1);
6898
6899 emit_move_insn (memloc, op0);
6900 op0 = memloc;
6901 }
6902
6903 if (offset != 0)
6904 {
6905 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
6906 EXPAND_SUM);
6907
6908 if (!MEM_P (op0))
6909 abort ();
6910
6911 #ifdef POINTERS_EXTEND_UNSIGNED
6912 if (GET_MODE (offset_rtx) != Pmode)
6913 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
6914 #else
6915 if (GET_MODE (offset_rtx) != ptr_mode)
6916 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6917 #endif
6918
6919 if (GET_MODE (op0) == BLKmode
6920 /* A constant address in OP0 can have VOIDmode, we must
6921 not try to call force_reg in that case. */
6922 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6923 && bitsize != 0
6924 && (bitpos % bitsize) == 0
6925 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6926 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6927 {
6928 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6929 bitpos = 0;
6930 }
6931
6932 op0 = offset_address (op0, offset_rtx,
6933 highest_pow2_factor (offset));
6934 }
6935
6936 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
6937 record its alignment as BIGGEST_ALIGNMENT. */
6938 if (MEM_P (op0) && bitpos == 0 && offset != 0
6939 && is_aligning_offset (offset, tem))
6940 set_mem_align (op0, BIGGEST_ALIGNMENT);
6941
6942 /* Don't forget about volatility even if this is a bitfield. */
6943 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
6944 {
6945 if (op0 == orig_op0)
6946 op0 = copy_rtx (op0);
6947
6948 MEM_VOLATILE_P (op0) = 1;
6949 }
6950
6951 /* The following code doesn't handle CONCAT.
6952 Assume only bitpos == 0 can be used for CONCAT, due to
6953 one element arrays having the same mode as its element. */
6954 if (GET_CODE (op0) == CONCAT)
6955 {
6956 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
6957 abort ();
6958 return op0;
6959 }
6960
6961 /* In cases where an aligned union has an unaligned object
6962 as a field, we might be extracting a BLKmode value from
6963 an integer-mode (e.g., SImode) object. Handle this case
6964 by doing the extract into an object as wide as the field
6965 (which we know to be the width of a basic mode), then
6966 storing into memory, and changing the mode to BLKmode. */
6967 if (mode1 == VOIDmode
6968 || REG_P (op0) || GET_CODE (op0) == SUBREG
6969 || (mode1 != BLKmode && ! direct_load[(int) mode1]
6970 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6971 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
6972 && modifier != EXPAND_CONST_ADDRESS
6973 && modifier != EXPAND_INITIALIZER)
6974 /* If the field isn't aligned enough to fetch as a memref,
6975 fetch it as a bit field. */
6976 || (mode1 != BLKmode
6977 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
6978 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
6979 || (MEM_P (op0)
6980 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
6981 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
6982 && ((modifier == EXPAND_CONST_ADDRESS
6983 || modifier == EXPAND_INITIALIZER)
6984 ? STRICT_ALIGNMENT
6985 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
6986 || (bitpos % BITS_PER_UNIT != 0)))
6987 /* If the type and the field are a constant size and the
6988 size of the type isn't the same size as the bitfield,
6989 we must use bitfield operations. */
6990 || (bitsize >= 0
6991 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6992 == INTEGER_CST)
6993 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6994 bitsize)))
6995 {
6996 enum machine_mode ext_mode = mode;
6997
6998 if (ext_mode == BLKmode
6999 && ! (target != 0 && MEM_P (op0)
7000 && MEM_P (target)
7001 && bitpos % BITS_PER_UNIT == 0))
7002 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7003
7004 if (ext_mode == BLKmode)
7005 {
7006 if (target == 0)
7007 target = assign_temp (type, 0, 1, 1);
7008
7009 if (bitsize == 0)
7010 return target;
7011
7012 /* In this case, BITPOS must start at a byte boundary and
7013 TARGET, if specified, must be a MEM. */
7014 if (!MEM_P (op0)
7015 || (target != 0 && !MEM_P (target))
7016 || bitpos % BITS_PER_UNIT != 0)
7017 abort ();
7018
7019 emit_block_move (target,
7020 adjust_address (op0, VOIDmode,
7021 bitpos / BITS_PER_UNIT),
7022 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7023 / BITS_PER_UNIT),
7024 (modifier == EXPAND_STACK_PARM
7025 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7026
7027 return target;
7028 }
7029
7030 op0 = validize_mem (op0);
7031
7032 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7033 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7034
7035 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7036 (modifier == EXPAND_STACK_PARM
7037 ? NULL_RTX : target),
7038 ext_mode, ext_mode);
7039
7040 /* If the result is a record type and BITSIZE is narrower than
7041 the mode of OP0, an integral mode, and this is a big endian
7042 machine, we must put the field into the high-order bits. */
7043 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7044 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7045 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7046 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7047 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7048 - bitsize),
7049 op0, 1);
7050
7051 /* If the result type is BLKmode, store the data into a temporary
7052 of the appropriate type, but with the mode corresponding to the
7053 mode for the data we have (op0's mode). It's tempting to make
7054 this a constant type, since we know it's only being stored once,
7055 but that can cause problems if we are taking the address of this
7056 COMPONENT_REF because the MEM of any reference via that address
7057 will have flags corresponding to the type, which will not
7058 necessarily be constant. */
7059 if (mode == BLKmode)
7060 {
7061 rtx new
7062 = assign_stack_temp_for_type
7063 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7064
7065 emit_move_insn (new, op0);
7066 op0 = copy_rtx (new);
7067 PUT_MODE (op0, BLKmode);
7068 set_mem_attributes (op0, exp, 1);
7069 }
7070
7071 return op0;
7072 }
7073
7074 /* If the result is BLKmode, use that to access the object
7075 now as well. */
7076 if (mode == BLKmode)
7077 mode1 = BLKmode;
7078
7079 /* Get a reference to just this component. */
7080 if (modifier == EXPAND_CONST_ADDRESS
7081 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7082 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7083 else
7084 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7085
7086 if (op0 == orig_op0)
7087 op0 = copy_rtx (op0);
7088
7089 set_mem_attributes (op0, exp, 0);
7090 if (REG_P (XEXP (op0, 0)))
7091 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7092
7093 MEM_VOLATILE_P (op0) |= volatilep;
7094 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7095 || modifier == EXPAND_CONST_ADDRESS
7096 || modifier == EXPAND_INITIALIZER)
7097 return op0;
7098 else if (target == 0)
7099 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7100
7101 convert_move (target, op0, unsignedp);
7102 return target;
7103 }
7104
7105 case OBJ_TYPE_REF:
7106 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7107
7108 case CALL_EXPR:
7109 /* Check for a built-in function. */
7110 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7111 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7112 == FUNCTION_DECL)
7113 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7114 {
7115 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7116 == BUILT_IN_FRONTEND)
7117 return lang_hooks.expand_expr (exp, original_target,
7118 tmode, modifier,
7119 alt_rtl);
7120 else
7121 return expand_builtin (exp, target, subtarget, tmode, ignore);
7122 }
7123
7124 return expand_call (exp, target, ignore);
7125
7126 case NON_LVALUE_EXPR:
7127 case NOP_EXPR:
7128 case CONVERT_EXPR:
7129 if (TREE_OPERAND (exp, 0) == error_mark_node)
7130 return const0_rtx;
7131
7132 if (TREE_CODE (type) == UNION_TYPE)
7133 {
7134 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7135
7136 /* If both input and output are BLKmode, this conversion isn't doing
7137 anything except possibly changing memory attribute. */
7138 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7139 {
7140 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7141 modifier);
7142
7143 result = copy_rtx (result);
7144 set_mem_attributes (result, exp, 0);
7145 return result;
7146 }
7147
7148 if (target == 0)
7149 {
7150 if (TYPE_MODE (type) != BLKmode)
7151 target = gen_reg_rtx (TYPE_MODE (type));
7152 else
7153 target = assign_temp (type, 0, 1, 1);
7154 }
7155
7156 if (MEM_P (target))
7157 /* Store data into beginning of memory target. */
7158 store_expr (TREE_OPERAND (exp, 0),
7159 adjust_address (target, TYPE_MODE (valtype), 0),
7160 modifier == EXPAND_STACK_PARM ? 2 : 0);
7161
7162 else if (REG_P (target))
7163 /* Store this field into a union of the proper type. */
7164 store_field (target,
7165 MIN ((int_size_in_bytes (TREE_TYPE
7166 (TREE_OPERAND (exp, 0)))
7167 * BITS_PER_UNIT),
7168 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7169 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7170 VOIDmode, 0, type, 0);
7171 else
7172 abort ();
7173
7174 /* Return the entire union. */
7175 return target;
7176 }
7177
7178 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7179 {
7180 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7181 modifier);
7182
7183 /* If the signedness of the conversion differs and OP0 is
7184 a promoted SUBREG, clear that indication since we now
7185 have to do the proper extension. */
7186 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7187 && GET_CODE (op0) == SUBREG)
7188 SUBREG_PROMOTED_VAR_P (op0) = 0;
7189
7190 return REDUCE_BIT_FIELD (op0);
7191 }
7192
7193 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7194 op0 = REDUCE_BIT_FIELD (op0);
7195 if (GET_MODE (op0) == mode)
7196 return op0;
7197
7198 /* If OP0 is a constant, just convert it into the proper mode. */
7199 if (CONSTANT_P (op0))
7200 {
7201 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7202 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7203
7204 if (modifier == EXPAND_INITIALIZER)
7205 return simplify_gen_subreg (mode, op0, inner_mode,
7206 subreg_lowpart_offset (mode,
7207 inner_mode));
7208 else
7209 return convert_modes (mode, inner_mode, op0,
7210 TYPE_UNSIGNED (inner_type));
7211 }
7212
7213 if (modifier == EXPAND_INITIALIZER)
7214 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7215
7216 if (target == 0)
7217 return
7218 convert_to_mode (mode, op0,
7219 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7220 else
7221 convert_move (target, op0,
7222 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7223 return target;
7224
7225 case VIEW_CONVERT_EXPR:
7226 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7227
7228 /* If the input and output modes are both the same, we are done.
7229 Otherwise, if neither mode is BLKmode and both are integral and within
7230 a word, we can use gen_lowpart. If neither is true, make sure the
7231 operand is in memory and convert the MEM to the new mode. */
7232 if (TYPE_MODE (type) == GET_MODE (op0))
7233 ;
7234 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7235 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7236 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7237 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7238 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7239 op0 = gen_lowpart (TYPE_MODE (type), op0);
7240 else if (!MEM_P (op0))
7241 {
7242 /* If the operand is not a MEM, force it into memory. Since we
7243 are going to be be changing the mode of the MEM, don't call
7244 force_const_mem for constants because we don't allow pool
7245 constants to change mode. */
7246 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7247
7248 if (TREE_ADDRESSABLE (exp))
7249 abort ();
7250
7251 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7252 target
7253 = assign_stack_temp_for_type
7254 (TYPE_MODE (inner_type),
7255 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7256
7257 emit_move_insn (target, op0);
7258 op0 = target;
7259 }
7260
7261 /* At this point, OP0 is in the correct mode. If the output type is such
7262 that the operand is known to be aligned, indicate that it is.
7263 Otherwise, we need only be concerned about alignment for non-BLKmode
7264 results. */
7265 if (MEM_P (op0))
7266 {
7267 op0 = copy_rtx (op0);
7268
7269 if (TYPE_ALIGN_OK (type))
7270 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7271 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7272 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7273 {
7274 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7275 HOST_WIDE_INT temp_size
7276 = MAX (int_size_in_bytes (inner_type),
7277 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7278 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7279 temp_size, 0, type);
7280 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7281
7282 if (TREE_ADDRESSABLE (exp))
7283 abort ();
7284
7285 if (GET_MODE (op0) == BLKmode)
7286 emit_block_move (new_with_op0_mode, op0,
7287 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7288 (modifier == EXPAND_STACK_PARM
7289 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7290 else
7291 emit_move_insn (new_with_op0_mode, op0);
7292
7293 op0 = new;
7294 }
7295
7296 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7297 }
7298
7299 return op0;
7300
7301 case PLUS_EXPR:
7302 this_optab = ! unsignedp && flag_trapv
7303 && (GET_MODE_CLASS (mode) == MODE_INT)
7304 ? addv_optab : add_optab;
7305
7306 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7307 something else, make sure we add the register to the constant and
7308 then to the other thing. This case can occur during strength
7309 reduction and doing it this way will produce better code if the
7310 frame pointer or argument pointer is eliminated.
7311
7312 fold-const.c will ensure that the constant is always in the inner
7313 PLUS_EXPR, so the only case we need to do anything about is if
7314 sp, ap, or fp is our second argument, in which case we must swap
7315 the innermost first argument and our second argument. */
7316
7317 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7318 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7319 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7320 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7321 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7322 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7323 {
7324 tree t = TREE_OPERAND (exp, 1);
7325
7326 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7327 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7328 }
7329
7330 /* If the result is to be ptr_mode and we are adding an integer to
7331 something, we might be forming a constant. So try to use
7332 plus_constant. If it produces a sum and we can't accept it,
7333 use force_operand. This allows P = &ARR[const] to generate
7334 efficient code on machines where a SYMBOL_REF is not a valid
7335 address.
7336
7337 If this is an EXPAND_SUM call, always return the sum. */
7338 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7339 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7340 {
7341 if (modifier == EXPAND_STACK_PARM)
7342 target = 0;
7343 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7344 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7345 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7346 {
7347 rtx constant_part;
7348
7349 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7350 EXPAND_SUM);
7351 /* Use immed_double_const to ensure that the constant is
7352 truncated according to the mode of OP1, then sign extended
7353 to a HOST_WIDE_INT. Using the constant directly can result
7354 in non-canonical RTL in a 64x32 cross compile. */
7355 constant_part
7356 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7357 (HOST_WIDE_INT) 0,
7358 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7359 op1 = plus_constant (op1, INTVAL (constant_part));
7360 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7361 op1 = force_operand (op1, target);
7362 return REDUCE_BIT_FIELD (op1);
7363 }
7364
7365 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7366 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7367 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7368 {
7369 rtx constant_part;
7370
7371 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7372 (modifier == EXPAND_INITIALIZER
7373 ? EXPAND_INITIALIZER : EXPAND_SUM));
7374 if (! CONSTANT_P (op0))
7375 {
7376 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7377 VOIDmode, modifier);
7378 /* Return a PLUS if modifier says it's OK. */
7379 if (modifier == EXPAND_SUM
7380 || modifier == EXPAND_INITIALIZER)
7381 return simplify_gen_binary (PLUS, mode, op0, op1);
7382 goto binop2;
7383 }
7384 /* Use immed_double_const to ensure that the constant is
7385 truncated according to the mode of OP1, then sign extended
7386 to a HOST_WIDE_INT. Using the constant directly can result
7387 in non-canonical RTL in a 64x32 cross compile. */
7388 constant_part
7389 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7390 (HOST_WIDE_INT) 0,
7391 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7392 op0 = plus_constant (op0, INTVAL (constant_part));
7393 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7394 op0 = force_operand (op0, target);
7395 return REDUCE_BIT_FIELD (op0);
7396 }
7397 }
7398
7399 /* No sense saving up arithmetic to be done
7400 if it's all in the wrong mode to form part of an address.
7401 And force_operand won't know whether to sign-extend or
7402 zero-extend. */
7403 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7404 || mode != ptr_mode)
7405 {
7406 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7407 subtarget, &op0, &op1, 0);
7408 if (op0 == const0_rtx)
7409 return op1;
7410 if (op1 == const0_rtx)
7411 return op0;
7412 goto binop2;
7413 }
7414
7415 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7416 subtarget, &op0, &op1, modifier);
7417 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7418
7419 case MINUS_EXPR:
7420 /* For initializers, we are allowed to return a MINUS of two
7421 symbolic constants. Here we handle all cases when both operands
7422 are constant. */
7423 /* Handle difference of two symbolic constants,
7424 for the sake of an initializer. */
7425 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7426 && really_constant_p (TREE_OPERAND (exp, 0))
7427 && really_constant_p (TREE_OPERAND (exp, 1)))
7428 {
7429 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7430 NULL_RTX, &op0, &op1, modifier);
7431
7432 /* If the last operand is a CONST_INT, use plus_constant of
7433 the negated constant. Else make the MINUS. */
7434 if (GET_CODE (op1) == CONST_INT)
7435 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7436 else
7437 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7438 }
7439
7440 this_optab = ! unsignedp && flag_trapv
7441 && (GET_MODE_CLASS(mode) == MODE_INT)
7442 ? subv_optab : sub_optab;
7443
7444 /* No sense saving up arithmetic to be done
7445 if it's all in the wrong mode to form part of an address.
7446 And force_operand won't know whether to sign-extend or
7447 zero-extend. */
7448 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7449 || mode != ptr_mode)
7450 goto binop;
7451
7452 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7453 subtarget, &op0, &op1, modifier);
7454
7455 /* Convert A - const to A + (-const). */
7456 if (GET_CODE (op1) == CONST_INT)
7457 {
7458 op1 = negate_rtx (mode, op1);
7459 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7460 }
7461
7462 goto binop2;
7463
7464 case MULT_EXPR:
7465 /* If first operand is constant, swap them.
7466 Thus the following special case checks need only
7467 check the second operand. */
7468 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7469 {
7470 tree t1 = TREE_OPERAND (exp, 0);
7471 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7472 TREE_OPERAND (exp, 1) = t1;
7473 }
7474
7475 /* Attempt to return something suitable for generating an
7476 indexed address, for machines that support that. */
7477
7478 if (modifier == EXPAND_SUM && mode == ptr_mode
7479 && host_integerp (TREE_OPERAND (exp, 1), 0))
7480 {
7481 tree exp1 = TREE_OPERAND (exp, 1);
7482
7483 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7484 EXPAND_SUM);
7485
7486 if (!REG_P (op0))
7487 op0 = force_operand (op0, NULL_RTX);
7488 if (!REG_P (op0))
7489 op0 = copy_to_mode_reg (mode, op0);
7490
7491 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7492 gen_int_mode (tree_low_cst (exp1, 0),
7493 TYPE_MODE (TREE_TYPE (exp1)))));
7494 }
7495
7496 if (modifier == EXPAND_STACK_PARM)
7497 target = 0;
7498
7499 /* Check for multiplying things that have been extended
7500 from a narrower type. If this machine supports multiplying
7501 in that narrower type with a result in the desired type,
7502 do it that way, and avoid the explicit type-conversion. */
7503 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7504 && TREE_CODE (type) == INTEGER_TYPE
7505 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7506 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7507 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7508 && int_fits_type_p (TREE_OPERAND (exp, 1),
7509 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7510 /* Don't use a widening multiply if a shift will do. */
7511 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7512 > HOST_BITS_PER_WIDE_INT)
7513 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7514 ||
7515 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7516 && (TYPE_PRECISION (TREE_TYPE
7517 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7518 == TYPE_PRECISION (TREE_TYPE
7519 (TREE_OPERAND
7520 (TREE_OPERAND (exp, 0), 0))))
7521 /* If both operands are extended, they must either both
7522 be zero-extended or both be sign-extended. */
7523 && (TYPE_UNSIGNED (TREE_TYPE
7524 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7525 == TYPE_UNSIGNED (TREE_TYPE
7526 (TREE_OPERAND
7527 (TREE_OPERAND (exp, 0), 0)))))))
7528 {
7529 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7530 enum machine_mode innermode = TYPE_MODE (op0type);
7531 bool zextend_p = TYPE_UNSIGNED (op0type);
7532 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7533 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7534
7535 if (mode == GET_MODE_WIDER_MODE (innermode))
7536 {
7537 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7538 {
7539 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7540 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7541 TREE_OPERAND (exp, 1),
7542 NULL_RTX, &op0, &op1, 0);
7543 else
7544 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7545 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7546 NULL_RTX, &op0, &op1, 0);
7547 goto binop2;
7548 }
7549 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7550 && innermode == word_mode)
7551 {
7552 rtx htem, hipart;
7553 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7554 NULL_RTX, VOIDmode, 0);
7555 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7556 op1 = convert_modes (innermode, mode,
7557 expand_expr (TREE_OPERAND (exp, 1),
7558 NULL_RTX, VOIDmode, 0),
7559 unsignedp);
7560 else
7561 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7562 NULL_RTX, VOIDmode, 0);
7563 temp = expand_binop (mode, other_optab, op0, op1, target,
7564 unsignedp, OPTAB_LIB_WIDEN);
7565 hipart = gen_highpart (innermode, temp);
7566 htem = expand_mult_highpart_adjust (innermode, hipart,
7567 op0, op1, hipart,
7568 zextend_p);
7569 if (htem != hipart)
7570 emit_move_insn (hipart, htem);
7571 return REDUCE_BIT_FIELD (temp);
7572 }
7573 }
7574 }
7575 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7576 subtarget, &op0, &op1, 0);
7577 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7578
7579 case TRUNC_DIV_EXPR:
7580 case FLOOR_DIV_EXPR:
7581 case CEIL_DIV_EXPR:
7582 case ROUND_DIV_EXPR:
7583 case EXACT_DIV_EXPR:
7584 if (modifier == EXPAND_STACK_PARM)
7585 target = 0;
7586 /* Possible optimization: compute the dividend with EXPAND_SUM
7587 then if the divisor is constant can optimize the case
7588 where some terms of the dividend have coeffs divisible by it. */
7589 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7590 subtarget, &op0, &op1, 0);
7591 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7592
7593 case RDIV_EXPR:
7594 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7595 expensive divide. If not, combine will rebuild the original
7596 computation. */
7597 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7598 && TREE_CODE (type) == REAL_TYPE
7599 && !real_onep (TREE_OPERAND (exp, 0)))
7600 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7601 build (RDIV_EXPR, type,
7602 build_real (type, dconst1),
7603 TREE_OPERAND (exp, 1))),
7604 target, tmode, modifier);
7605 this_optab = sdiv_optab;
7606 goto binop;
7607
7608 case TRUNC_MOD_EXPR:
7609 case FLOOR_MOD_EXPR:
7610 case CEIL_MOD_EXPR:
7611 case ROUND_MOD_EXPR:
7612 if (modifier == EXPAND_STACK_PARM)
7613 target = 0;
7614 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7615 subtarget, &op0, &op1, 0);
7616 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7617
7618 case FIX_ROUND_EXPR:
7619 case FIX_FLOOR_EXPR:
7620 case FIX_CEIL_EXPR:
7621 abort (); /* Not used for C. */
7622
7623 case FIX_TRUNC_EXPR:
7624 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7625 if (target == 0 || modifier == EXPAND_STACK_PARM)
7626 target = gen_reg_rtx (mode);
7627 expand_fix (target, op0, unsignedp);
7628 return target;
7629
7630 case FLOAT_EXPR:
7631 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7632 if (target == 0 || modifier == EXPAND_STACK_PARM)
7633 target = gen_reg_rtx (mode);
7634 /* expand_float can't figure out what to do if FROM has VOIDmode.
7635 So give it the correct mode. With -O, cse will optimize this. */
7636 if (GET_MODE (op0) == VOIDmode)
7637 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7638 op0);
7639 expand_float (target, op0,
7640 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7641 return target;
7642
7643 case NEGATE_EXPR:
7644 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7645 if (modifier == EXPAND_STACK_PARM)
7646 target = 0;
7647 temp = expand_unop (mode,
7648 ! unsignedp && flag_trapv
7649 && (GET_MODE_CLASS(mode) == MODE_INT)
7650 ? negv_optab : neg_optab, op0, target, 0);
7651 if (temp == 0)
7652 abort ();
7653 return REDUCE_BIT_FIELD (temp);
7654
7655 case ABS_EXPR:
7656 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7657 if (modifier == EXPAND_STACK_PARM)
7658 target = 0;
7659
7660 /* ABS_EXPR is not valid for complex arguments. */
7661 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7662 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7663 abort ();
7664
7665 /* Unsigned abs is simply the operand. Testing here means we don't
7666 risk generating incorrect code below. */
7667 if (TYPE_UNSIGNED (type))
7668 return op0;
7669
7670 return expand_abs (mode, op0, target, unsignedp,
7671 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7672
7673 case MAX_EXPR:
7674 case MIN_EXPR:
7675 target = original_target;
7676 if (target == 0
7677 || modifier == EXPAND_STACK_PARM
7678 || (MEM_P (target) && MEM_VOLATILE_P (target))
7679 || GET_MODE (target) != mode
7680 || (REG_P (target)
7681 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7682 target = gen_reg_rtx (mode);
7683 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7684 target, &op0, &op1, 0);
7685
7686 /* First try to do it with a special MIN or MAX instruction.
7687 If that does not win, use a conditional jump to select the proper
7688 value. */
7689 this_optab = (unsignedp
7690 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7691 : (code == MIN_EXPR ? smin_optab : smax_optab));
7692
7693 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7694 OPTAB_WIDEN);
7695 if (temp != 0)
7696 return temp;
7697
7698 /* At this point, a MEM target is no longer useful; we will get better
7699 code without it. */
7700
7701 if (MEM_P (target))
7702 target = gen_reg_rtx (mode);
7703
7704 /* If op1 was placed in target, swap op0 and op1. */
7705 if (target != op0 && target == op1)
7706 {
7707 rtx tem = op0;
7708 op0 = op1;
7709 op1 = tem;
7710 }
7711
7712 if (target != op0)
7713 emit_move_insn (target, op0);
7714
7715 op0 = gen_label_rtx ();
7716
7717 /* If this mode is an integer too wide to compare properly,
7718 compare word by word. Rely on cse to optimize constant cases. */
7719 if (GET_MODE_CLASS (mode) == MODE_INT
7720 && ! can_compare_p (GE, mode, ccp_jump))
7721 {
7722 if (code == MAX_EXPR)
7723 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
7724 NULL_RTX, op0);
7725 else
7726 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
7727 NULL_RTX, op0);
7728 }
7729 else
7730 {
7731 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7732 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
7733 }
7734 emit_move_insn (target, op1);
7735 emit_label (op0);
7736 return target;
7737
7738 case BIT_NOT_EXPR:
7739 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7740 if (modifier == EXPAND_STACK_PARM)
7741 target = 0;
7742 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7743 if (temp == 0)
7744 abort ();
7745 return temp;
7746
7747 /* ??? Can optimize bitwise operations with one arg constant.
7748 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7749 and (a bitwise1 b) bitwise2 b (etc)
7750 but that is probably not worth while. */
7751
7752 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7753 boolean values when we want in all cases to compute both of them. In
7754 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7755 as actual zero-or-1 values and then bitwise anding. In cases where
7756 there cannot be any side effects, better code would be made by
7757 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7758 how to recognize those cases. */
7759
7760 case TRUTH_AND_EXPR:
7761 case BIT_AND_EXPR:
7762 this_optab = and_optab;
7763 goto binop;
7764
7765 case TRUTH_OR_EXPR:
7766 case BIT_IOR_EXPR:
7767 this_optab = ior_optab;
7768 goto binop;
7769
7770 case TRUTH_XOR_EXPR:
7771 case BIT_XOR_EXPR:
7772 this_optab = xor_optab;
7773 goto binop;
7774
7775 case LSHIFT_EXPR:
7776 case RSHIFT_EXPR:
7777 case LROTATE_EXPR:
7778 case RROTATE_EXPR:
7779 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7780 subtarget = 0;
7781 if (modifier == EXPAND_STACK_PARM)
7782 target = 0;
7783 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7784 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7785 unsignedp);
7786
7787 /* Could determine the answer when only additive constants differ. Also,
7788 the addition of one can be handled by changing the condition. */
7789 case LT_EXPR:
7790 case LE_EXPR:
7791 case GT_EXPR:
7792 case GE_EXPR:
7793 case EQ_EXPR:
7794 case NE_EXPR:
7795 case UNORDERED_EXPR:
7796 case ORDERED_EXPR:
7797 case UNLT_EXPR:
7798 case UNLE_EXPR:
7799 case UNGT_EXPR:
7800 case UNGE_EXPR:
7801 case UNEQ_EXPR:
7802 case LTGT_EXPR:
7803 temp = do_store_flag (exp,
7804 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
7805 tmode != VOIDmode ? tmode : mode, 0);
7806 if (temp != 0)
7807 return temp;
7808
7809 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7810 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7811 && original_target
7812 && REG_P (original_target)
7813 && (GET_MODE (original_target)
7814 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7815 {
7816 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7817 VOIDmode, 0);
7818
7819 /* If temp is constant, we can just compute the result. */
7820 if (GET_CODE (temp) == CONST_INT)
7821 {
7822 if (INTVAL (temp) != 0)
7823 emit_move_insn (target, const1_rtx);
7824 else
7825 emit_move_insn (target, const0_rtx);
7826
7827 return target;
7828 }
7829
7830 if (temp != original_target)
7831 {
7832 enum machine_mode mode1 = GET_MODE (temp);
7833 if (mode1 == VOIDmode)
7834 mode1 = tmode != VOIDmode ? tmode : mode;
7835
7836 temp = copy_to_mode_reg (mode1, temp);
7837 }
7838
7839 op1 = gen_label_rtx ();
7840 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7841 GET_MODE (temp), unsignedp, op1);
7842 emit_move_insn (temp, const1_rtx);
7843 emit_label (op1);
7844 return temp;
7845 }
7846
7847 /* If no set-flag instruction, must generate a conditional
7848 store into a temporary variable. Drop through
7849 and handle this like && and ||. */
7850
7851 case TRUTH_ANDIF_EXPR:
7852 case TRUTH_ORIF_EXPR:
7853 if (! ignore
7854 && (target == 0
7855 || modifier == EXPAND_STACK_PARM
7856 || ! safe_from_p (target, exp, 1)
7857 /* Make sure we don't have a hard reg (such as function's return
7858 value) live across basic blocks, if not optimizing. */
7859 || (!optimize && REG_P (target)
7860 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7861 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7862
7863 if (target)
7864 emit_clr_insn (target);
7865
7866 op1 = gen_label_rtx ();
7867 jumpifnot (exp, op1);
7868
7869 if (target)
7870 emit_0_to_1_insn (target);
7871
7872 emit_label (op1);
7873 return ignore ? const0_rtx : target;
7874
7875 case TRUTH_NOT_EXPR:
7876 if (modifier == EXPAND_STACK_PARM)
7877 target = 0;
7878 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7879 /* The parser is careful to generate TRUTH_NOT_EXPR
7880 only with operands that are always zero or one. */
7881 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7882 target, 1, OPTAB_LIB_WIDEN);
7883 if (temp == 0)
7884 abort ();
7885 return temp;
7886
7887 case COMPOUND_EXPR:
7888 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7889 return expand_expr_real (TREE_OPERAND (exp, 1),
7890 (ignore ? const0_rtx : target),
7891 VOIDmode, modifier, alt_rtl);
7892
7893 case STATEMENT_LIST:
7894 {
7895 tree_stmt_iterator iter;
7896
7897 if (!ignore)
7898 abort ();
7899
7900 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
7901 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
7902 }
7903 return const0_rtx;
7904
7905 case COND_EXPR:
7906 /* If it's void, we don't need to worry about computing a value. */
7907 if (VOID_TYPE_P (TREE_TYPE (exp)))
7908 {
7909 tree pred = TREE_OPERAND (exp, 0);
7910 tree then_ = TREE_OPERAND (exp, 1);
7911 tree else_ = TREE_OPERAND (exp, 2);
7912
7913 if (TREE_CODE (then_) == GOTO_EXPR
7914 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
7915 {
7916 jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
7917 return expand_expr (else_, const0_rtx, VOIDmode, 0);
7918 }
7919 else if (TREE_CODE (else_) == GOTO_EXPR
7920 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
7921 {
7922 jumpifnot (pred, label_rtx (GOTO_DESTINATION (else_)));
7923 return expand_expr (then_, const0_rtx, VOIDmode, 0);
7924 }
7925
7926 /* Just use the 'if' machinery. */
7927 expand_start_cond (pred, 0);
7928 expand_expr (then_, const0_rtx, VOIDmode, 0);
7929
7930 exp = else_;
7931
7932 /* Iterate over 'else if's instead of recursing. */
7933 for (; TREE_CODE (exp) == COND_EXPR; exp = TREE_OPERAND (exp, 2))
7934 {
7935 expand_start_else ();
7936 if (EXPR_HAS_LOCATION (exp))
7937 {
7938 emit_line_note (EXPR_LOCATION (exp));
7939 record_block_change (TREE_BLOCK (exp));
7940 }
7941 expand_elseif (TREE_OPERAND (exp, 0));
7942 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, 0);
7943 }
7944 /* Don't emit the jump and label if there's no 'else' clause. */
7945 if (TREE_SIDE_EFFECTS (exp))
7946 {
7947 expand_start_else ();
7948 expand_expr (exp, const0_rtx, VOIDmode, 0);
7949 }
7950 expand_end_cond ();
7951 return const0_rtx;
7952 }
7953
7954 /* If we would have a "singleton" (see below) were it not for a
7955 conversion in each arm, bring that conversion back out. */
7956 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7957 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7958 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7959 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7960 {
7961 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7962 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7963
7964 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
7965 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
7966 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
7967 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
7968 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
7969 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
7970 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
7971 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
7972 return expand_expr (build1 (NOP_EXPR, type,
7973 build (COND_EXPR, TREE_TYPE (iftrue),
7974 TREE_OPERAND (exp, 0),
7975 iftrue, iffalse)),
7976 target, tmode, modifier);
7977 }
7978
7979 {
7980 /* Note that COND_EXPRs whose type is a structure or union
7981 are required to be constructed to contain assignments of
7982 a temporary variable, so that we can evaluate them here
7983 for side effect only. If type is void, we must do likewise. */
7984
7985 /* If an arm of the branch requires a cleanup,
7986 only that cleanup is performed. */
7987
7988 tree singleton = 0;
7989 tree binary_op = 0, unary_op = 0;
7990
7991 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7992 convert it to our mode, if necessary. */
7993 if (integer_onep (TREE_OPERAND (exp, 1))
7994 && integer_zerop (TREE_OPERAND (exp, 2))
7995 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7996 {
7997 if (ignore)
7998 {
7999 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8000 modifier);
8001 return const0_rtx;
8002 }
8003
8004 if (modifier == EXPAND_STACK_PARM)
8005 target = 0;
8006 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8007 if (GET_MODE (op0) == mode)
8008 return op0;
8009
8010 if (target == 0)
8011 target = gen_reg_rtx (mode);
8012 convert_move (target, op0, unsignedp);
8013 return target;
8014 }
8015
8016 /* Check for X ? A + B : A. If we have this, we can copy A to the
8017 output and conditionally add B. Similarly for unary operations.
8018 Don't do this if X has side-effects because those side effects
8019 might affect A or B and the "?" operation is a sequence point in
8020 ANSI. (operand_equal_p tests for side effects.) */
8021
8022 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8023 && operand_equal_p (TREE_OPERAND (exp, 2),
8024 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8025 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8026 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8027 && operand_equal_p (TREE_OPERAND (exp, 1),
8028 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8029 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8030 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8031 && operand_equal_p (TREE_OPERAND (exp, 2),
8032 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8033 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8034 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8035 && operand_equal_p (TREE_OPERAND (exp, 1),
8036 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8037 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8038
8039 /* If we are not to produce a result, we have no target. Otherwise,
8040 if a target was specified use it; it will not be used as an
8041 intermediate target unless it is safe. If no target, use a
8042 temporary. */
8043
8044 if (ignore)
8045 temp = 0;
8046 else if (modifier == EXPAND_STACK_PARM)
8047 temp = assign_temp (type, 0, 0, 1);
8048 else if (original_target
8049 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8050 || (singleton && REG_P (original_target)
8051 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8052 && original_target == var_rtx (singleton)))
8053 && GET_MODE (original_target) == mode
8054 #ifdef HAVE_conditional_move
8055 && (! can_conditionally_move_p (mode)
8056 || REG_P (original_target)
8057 || TREE_ADDRESSABLE (type))
8058 #endif
8059 && (!MEM_P (original_target)
8060 || TREE_ADDRESSABLE (type)))
8061 temp = original_target;
8062 else if (TREE_ADDRESSABLE (type))
8063 abort ();
8064 else
8065 temp = assign_temp (type, 0, 0, 1);
8066
8067 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8068 do the test of X as a store-flag operation, do this as
8069 A + ((X != 0) << log C). Similarly for other simple binary
8070 operators. Only do for C == 1 if BRANCH_COST is low. */
8071 if (temp && singleton && binary_op
8072 && (TREE_CODE (binary_op) == PLUS_EXPR
8073 || TREE_CODE (binary_op) == MINUS_EXPR
8074 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8075 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8076 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8077 : integer_onep (TREE_OPERAND (binary_op, 1)))
8078 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8079 {
8080 rtx result;
8081 tree cond;
8082 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8083 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8084 ? addv_optab : add_optab)
8085 : TREE_CODE (binary_op) == MINUS_EXPR
8086 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8087 ? subv_optab : sub_optab)
8088 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8089 : xor_optab);
8090
8091 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8092 if (singleton == TREE_OPERAND (exp, 1))
8093 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8094 else
8095 cond = TREE_OPERAND (exp, 0);
8096
8097 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8098 ? temp : NULL_RTX),
8099 mode, BRANCH_COST <= 1);
8100
8101 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8102 result = expand_shift (LSHIFT_EXPR, mode, result,
8103 build_int_2 (tree_log2
8104 (TREE_OPERAND
8105 (binary_op, 1)),
8106 0),
8107 (safe_from_p (temp, singleton, 1)
8108 ? temp : NULL_RTX), 0);
8109
8110 if (result)
8111 {
8112 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8113 return expand_binop (mode, boptab, op1, result, temp,
8114 unsignedp, OPTAB_LIB_WIDEN);
8115 }
8116 }
8117
8118 do_pending_stack_adjust ();
8119 NO_DEFER_POP;
8120 op0 = gen_label_rtx ();
8121
8122 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8123 {
8124 if (temp != 0)
8125 {
8126 /* If the target conflicts with the other operand of the
8127 binary op, we can't use it. Also, we can't use the target
8128 if it is a hard register, because evaluating the condition
8129 might clobber it. */
8130 if ((binary_op
8131 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8132 || (REG_P (temp)
8133 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8134 temp = gen_reg_rtx (mode);
8135 store_expr (singleton, temp,
8136 modifier == EXPAND_STACK_PARM ? 2 : 0);
8137 }
8138 else
8139 expand_expr (singleton,
8140 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8141 if (singleton == TREE_OPERAND (exp, 1))
8142 jumpif (TREE_OPERAND (exp, 0), op0);
8143 else
8144 jumpifnot (TREE_OPERAND (exp, 0), op0);
8145
8146 if (binary_op && temp == 0)
8147 /* Just touch the other operand. */
8148 expand_expr (TREE_OPERAND (binary_op, 1),
8149 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8150 else if (binary_op)
8151 store_expr (build (TREE_CODE (binary_op), type,
8152 make_tree (type, temp),
8153 TREE_OPERAND (binary_op, 1)),
8154 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8155 else
8156 store_expr (build1 (TREE_CODE (unary_op), type,
8157 make_tree (type, temp)),
8158 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8159 op1 = op0;
8160 }
8161 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8162 comparison operator. If we have one of these cases, set the
8163 output to A, branch on A (cse will merge these two references),
8164 then set the output to FOO. */
8165 else if (temp
8166 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8167 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8168 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8169 TREE_OPERAND (exp, 1), 0)
8170 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8171 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8172 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8173 {
8174 if (REG_P (temp)
8175 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8176 temp = gen_reg_rtx (mode);
8177 store_expr (TREE_OPERAND (exp, 1), temp,
8178 modifier == EXPAND_STACK_PARM ? 2 : 0);
8179 jumpif (TREE_OPERAND (exp, 0), op0);
8180
8181 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8182 store_expr (TREE_OPERAND (exp, 2), temp,
8183 modifier == EXPAND_STACK_PARM ? 2 : 0);
8184 else
8185 expand_expr (TREE_OPERAND (exp, 2),
8186 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8187 op1 = op0;
8188 }
8189 else if (temp
8190 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8191 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8192 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8193 TREE_OPERAND (exp, 2), 0)
8194 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8195 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8196 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8197 {
8198 if (REG_P (temp)
8199 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8200 temp = gen_reg_rtx (mode);
8201 store_expr (TREE_OPERAND (exp, 2), temp,
8202 modifier == EXPAND_STACK_PARM ? 2 : 0);
8203 jumpifnot (TREE_OPERAND (exp, 0), op0);
8204
8205 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8206 store_expr (TREE_OPERAND (exp, 1), temp,
8207 modifier == EXPAND_STACK_PARM ? 2 : 0);
8208 else
8209 expand_expr (TREE_OPERAND (exp, 1),
8210 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8211 op1 = op0;
8212 }
8213 else
8214 {
8215 op1 = gen_label_rtx ();
8216 jumpifnot (TREE_OPERAND (exp, 0), op0);
8217
8218 /* One branch of the cond can be void, if it never returns. For
8219 example A ? throw : E */
8220 if (temp != 0
8221 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8222 store_expr (TREE_OPERAND (exp, 1), temp,
8223 modifier == EXPAND_STACK_PARM ? 2 : 0);
8224 else
8225 expand_expr (TREE_OPERAND (exp, 1),
8226 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8227 emit_jump_insn (gen_jump (op1));
8228 emit_barrier ();
8229 emit_label (op0);
8230 if (temp != 0
8231 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8232 store_expr (TREE_OPERAND (exp, 2), temp,
8233 modifier == EXPAND_STACK_PARM ? 2 : 0);
8234 else
8235 expand_expr (TREE_OPERAND (exp, 2),
8236 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8237 }
8238
8239 emit_label (op1);
8240 OK_DEFER_POP;
8241
8242 return temp;
8243 }
8244
8245 case INIT_EXPR:
8246 {
8247 tree lhs = TREE_OPERAND (exp, 0);
8248 tree rhs = TREE_OPERAND (exp, 1);
8249
8250 temp = expand_assignment (lhs, rhs, ! ignore);
8251 return temp;
8252 }
8253
8254 case MODIFY_EXPR:
8255 {
8256 /* If lhs is complex, expand calls in rhs before computing it.
8257 That's so we don't compute a pointer and save it over a
8258 call. If lhs is simple, compute it first so we can give it
8259 as a target if the rhs is just a call. This avoids an
8260 extra temp and copy and that prevents a partial-subsumption
8261 which makes bad code. Actually we could treat
8262 component_ref's of vars like vars. */
8263
8264 tree lhs = TREE_OPERAND (exp, 0);
8265 tree rhs = TREE_OPERAND (exp, 1);
8266
8267 temp = 0;
8268
8269 /* Check for |= or &= of a bitfield of size one into another bitfield
8270 of size 1. In this case, (unless we need the result of the
8271 assignment) we can do this more efficiently with a
8272 test followed by an assignment, if necessary.
8273
8274 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8275 things change so we do, this code should be enhanced to
8276 support it. */
8277 if (ignore
8278 && TREE_CODE (lhs) == COMPONENT_REF
8279 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8280 || TREE_CODE (rhs) == BIT_AND_EXPR)
8281 && TREE_OPERAND (rhs, 0) == lhs
8282 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8283 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8284 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8285 {
8286 rtx label = gen_label_rtx ();
8287
8288 do_jump (TREE_OPERAND (rhs, 1),
8289 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8290 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8291 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8292 (TREE_CODE (rhs) == BIT_IOR_EXPR
8293 ? integer_one_node
8294 : integer_zero_node)),
8295 0);
8296 do_pending_stack_adjust ();
8297 emit_label (label);
8298 return const0_rtx;
8299 }
8300
8301 temp = expand_assignment (lhs, rhs, ! ignore);
8302
8303 return temp;
8304 }
8305
8306 case RETURN_EXPR:
8307 if (!TREE_OPERAND (exp, 0))
8308 expand_null_return ();
8309 else
8310 expand_return (TREE_OPERAND (exp, 0));
8311 return const0_rtx;
8312
8313 case ADDR_EXPR:
8314 if (modifier == EXPAND_STACK_PARM)
8315 target = 0;
8316 /* If we are taking the address of something erroneous, just
8317 return a zero. */
8318 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8319 return const0_rtx;
8320 /* If we are taking the address of a constant and are at the
8321 top level, we have to use output_constant_def since we can't
8322 call force_const_mem at top level. */
8323 else if (cfun == 0
8324 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8325 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8326 == 'c')))
8327 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8328 else
8329 {
8330 /* We make sure to pass const0_rtx down if we came in with
8331 ignore set, to avoid doing the cleanups twice for something. */
8332 op0 = expand_expr (TREE_OPERAND (exp, 0),
8333 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8334 (modifier == EXPAND_INITIALIZER
8335 ? modifier : EXPAND_CONST_ADDRESS));
8336
8337 /* If we are going to ignore the result, OP0 will have been set
8338 to const0_rtx, so just return it. Don't get confused and
8339 think we are taking the address of the constant. */
8340 if (ignore)
8341 return op0;
8342
8343 /* We would like the object in memory. If it is a constant, we can
8344 have it be statically allocated into memory. For a non-constant,
8345 we need to allocate some memory and store the value into it. */
8346
8347 if (CONSTANT_P (op0))
8348 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8349 op0);
8350 else if (REG_P (op0) || GET_CODE (op0) == SUBREG
8351 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == PARALLEL
8352 || GET_CODE (op0) == LO_SUM)
8353 {
8354 /* If this object is in a register, it can't be BLKmode. */
8355 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8356 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8357
8358 if (GET_CODE (op0) == PARALLEL)
8359 /* Handle calls that pass values in multiple
8360 non-contiguous locations. The Irix 6 ABI has examples
8361 of this. */
8362 emit_group_store (memloc, op0, inner_type,
8363 int_size_in_bytes (inner_type));
8364 else
8365 emit_move_insn (memloc, op0);
8366
8367 op0 = memloc;
8368 }
8369
8370 if (!MEM_P (op0))
8371 abort ();
8372
8373 mark_temp_addr_taken (op0);
8374 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8375 {
8376 op0 = XEXP (op0, 0);
8377 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8378 op0 = convert_memory_address (ptr_mode, op0);
8379 return op0;
8380 }
8381
8382 /* If OP0 is not aligned as least as much as the type requires, we
8383 need to make a temporary, copy OP0 to it, and take the address of
8384 the temporary. We want to use the alignment of the type, not of
8385 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8386 the test for BLKmode means that can't happen. The test for
8387 BLKmode is because we never make mis-aligned MEMs with
8388 non-BLKmode.
8389
8390 We don't need to do this at all if the machine doesn't have
8391 strict alignment. */
8392 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8393 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8394 > MEM_ALIGN (op0))
8395 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8396 {
8397 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8398 rtx new;
8399
8400 if (TYPE_ALIGN_OK (inner_type))
8401 abort ();
8402
8403 if (TREE_ADDRESSABLE (inner_type))
8404 {
8405 /* We can't make a bitwise copy of this object, so fail. */
8406 error ("cannot take the address of an unaligned member");
8407 return const0_rtx;
8408 }
8409
8410 new = assign_stack_temp_for_type
8411 (TYPE_MODE (inner_type),
8412 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8413 : int_size_in_bytes (inner_type),
8414 1, build_qualified_type (inner_type,
8415 (TYPE_QUALS (inner_type)
8416 | TYPE_QUAL_CONST)));
8417
8418 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8419 (modifier == EXPAND_STACK_PARM
8420 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8421
8422 op0 = new;
8423 }
8424
8425 op0 = force_operand (XEXP (op0, 0), target);
8426 }
8427
8428 if (flag_force_addr
8429 && !REG_P (op0)
8430 && modifier != EXPAND_CONST_ADDRESS
8431 && modifier != EXPAND_INITIALIZER
8432 && modifier != EXPAND_SUM)
8433 op0 = force_reg (Pmode, op0);
8434
8435 if (REG_P (op0)
8436 && ! REG_USERVAR_P (op0))
8437 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8438
8439 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8440 op0 = convert_memory_address (ptr_mode, op0);
8441
8442 return op0;
8443
8444 case ENTRY_VALUE_EXPR:
8445 abort ();
8446
8447 /* COMPLEX type for Extended Pascal & Fortran */
8448 case COMPLEX_EXPR:
8449 {
8450 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8451 rtx insns;
8452
8453 /* Get the rtx code of the operands. */
8454 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8455 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8456
8457 if (! target)
8458 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8459
8460 start_sequence ();
8461
8462 /* Move the real (op0) and imaginary (op1) parts to their location. */
8463 emit_move_insn (gen_realpart (mode, target), op0);
8464 emit_move_insn (gen_imagpart (mode, target), op1);
8465
8466 insns = get_insns ();
8467 end_sequence ();
8468
8469 /* Complex construction should appear as a single unit. */
8470 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8471 each with a separate pseudo as destination.
8472 It's not correct for flow to treat them as a unit. */
8473 if (GET_CODE (target) != CONCAT)
8474 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8475 else
8476 emit_insn (insns);
8477
8478 return target;
8479 }
8480
8481 case REALPART_EXPR:
8482 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8483 return gen_realpart (mode, op0);
8484
8485 case IMAGPART_EXPR:
8486 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8487 return gen_imagpart (mode, op0);
8488
8489 case CONJ_EXPR:
8490 {
8491 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8492 rtx imag_t;
8493 rtx insns;
8494
8495 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8496
8497 if (! target)
8498 target = gen_reg_rtx (mode);
8499
8500 start_sequence ();
8501
8502 /* Store the realpart and the negated imagpart to target. */
8503 emit_move_insn (gen_realpart (partmode, target),
8504 gen_realpart (partmode, op0));
8505
8506 imag_t = gen_imagpart (partmode, target);
8507 temp = expand_unop (partmode,
8508 ! unsignedp && flag_trapv
8509 && (GET_MODE_CLASS(partmode) == MODE_INT)
8510 ? negv_optab : neg_optab,
8511 gen_imagpart (partmode, op0), imag_t, 0);
8512 if (temp != imag_t)
8513 emit_move_insn (imag_t, temp);
8514
8515 insns = get_insns ();
8516 end_sequence ();
8517
8518 /* Conjugate should appear as a single unit
8519 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8520 each with a separate pseudo as destination.
8521 It's not correct for flow to treat them as a unit. */
8522 if (GET_CODE (target) != CONCAT)
8523 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8524 else
8525 emit_insn (insns);
8526
8527 return target;
8528 }
8529
8530 case RESX_EXPR:
8531 expand_resx_expr (exp);
8532 return const0_rtx;
8533
8534 case TRY_CATCH_EXPR:
8535 case CATCH_EXPR:
8536 case EH_FILTER_EXPR:
8537 case TRY_FINALLY_EXPR:
8538 /* Lowered by tree-eh.c. */
8539 abort ();
8540
8541 case WITH_CLEANUP_EXPR:
8542 case CLEANUP_POINT_EXPR:
8543 case TARGET_EXPR:
8544 case CASE_LABEL_EXPR:
8545 case VA_ARG_EXPR:
8546 /* Lowered by gimplify.c. */
8547 abort ();
8548
8549 case EXC_PTR_EXPR:
8550 return get_exception_pointer (cfun);
8551
8552 case FILTER_EXPR:
8553 return get_exception_filter (cfun);
8554
8555 case PREINCREMENT_EXPR:
8556 case PREDECREMENT_EXPR:
8557 case POSTINCREMENT_EXPR:
8558 case POSTDECREMENT_EXPR:
8559 case FDESC_EXPR:
8560 /* Function descriptors are not valid except for as
8561 initialization constants, and should not be expanded. */
8562 abort ();
8563
8564 case SWITCH_EXPR:
8565 expand_start_case (SWITCH_COND (exp));
8566 /* The switch body is lowered in gimplify.c, we should never have
8567 switches with a non-NULL SWITCH_BODY here. */
8568 if (SWITCH_BODY (exp))
8569 abort ();
8570 if (SWITCH_LABELS (exp))
8571 {
8572 tree vec = SWITCH_LABELS (exp);
8573 size_t i = TREE_VEC_LENGTH (vec);
8574
8575 do
8576 {
8577 tree elt = TREE_VEC_ELT (vec, --i);
8578 add_case_node (CASE_LOW (elt), CASE_HIGH (elt),
8579 CASE_LABEL (elt));
8580 }
8581 while (i);
8582 }
8583 else
8584 abort ();
8585 expand_end_case_type (SWITCH_COND (exp), TREE_TYPE (exp));
8586 return const0_rtx;
8587
8588 case LABEL_EXPR:
8589 expand_label (TREE_OPERAND (exp, 0));
8590 return const0_rtx;
8591
8592 case ASM_EXPR:
8593 expand_asm_expr (exp);
8594 return const0_rtx;
8595
8596 case WITH_SIZE_EXPR:
8597 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8598 have pulled out the size to use in whatever context it needed. */
8599 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8600 modifier, alt_rtl);
8601
8602 default:
8603 return lang_hooks.expand_expr (exp, original_target, tmode,
8604 modifier, alt_rtl);
8605 }
8606
8607 /* Here to do an ordinary binary operator, generating an instruction
8608 from the optab already placed in `this_optab'. */
8609 binop:
8610 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8611 subtarget, &op0, &op1, 0);
8612 binop2:
8613 if (modifier == EXPAND_STACK_PARM)
8614 target = 0;
8615 temp = expand_binop (mode, this_optab, op0, op1, target,
8616 unsignedp, OPTAB_LIB_WIDEN);
8617 if (temp == 0)
8618 abort ();
8619 return REDUCE_BIT_FIELD (temp);
8620 }
8621 #undef REDUCE_BIT_FIELD
8622 \f
8623 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8624 signedness of TYPE), possibly returning the result in TARGET. */
8625 static rtx
8626 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8627 {
8628 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8629 if (target && GET_MODE (target) != GET_MODE (exp))
8630 target = 0;
8631 if (TYPE_UNSIGNED (type))
8632 {
8633 rtx mask;
8634 if (prec < HOST_BITS_PER_WIDE_INT)
8635 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8636 GET_MODE (exp));
8637 else
8638 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8639 ((unsigned HOST_WIDE_INT) 1
8640 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8641 GET_MODE (exp));
8642 return expand_and (GET_MODE (exp), exp, mask, target);
8643 }
8644 else
8645 {
8646 tree count = build_int_2 (GET_MODE_BITSIZE (GET_MODE (exp)) - prec, 0);
8647 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8648 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8649 }
8650 }
8651 \f
8652 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8653 when applied to the address of EXP produces an address known to be
8654 aligned more than BIGGEST_ALIGNMENT. */
8655
8656 static int
8657 is_aligning_offset (tree offset, tree exp)
8658 {
8659 /* Strip off any conversions. */
8660 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8661 || TREE_CODE (offset) == NOP_EXPR
8662 || TREE_CODE (offset) == CONVERT_EXPR)
8663 offset = TREE_OPERAND (offset, 0);
8664
8665 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8666 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8667 if (TREE_CODE (offset) != BIT_AND_EXPR
8668 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8669 || compare_tree_int (TREE_OPERAND (offset, 1),
8670 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8671 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8672 return 0;
8673
8674 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8675 It must be NEGATE_EXPR. Then strip any more conversions. */
8676 offset = TREE_OPERAND (offset, 0);
8677 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8678 || TREE_CODE (offset) == NOP_EXPR
8679 || TREE_CODE (offset) == CONVERT_EXPR)
8680 offset = TREE_OPERAND (offset, 0);
8681
8682 if (TREE_CODE (offset) != NEGATE_EXPR)
8683 return 0;
8684
8685 offset = TREE_OPERAND (offset, 0);
8686 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8687 || TREE_CODE (offset) == NOP_EXPR
8688 || TREE_CODE (offset) == CONVERT_EXPR)
8689 offset = TREE_OPERAND (offset, 0);
8690
8691 /* This must now be the address of EXP. */
8692 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8693 }
8694 \f
8695 /* Return the tree node if an ARG corresponds to a string constant or zero
8696 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8697 in bytes within the string that ARG is accessing. The type of the
8698 offset will be `sizetype'. */
8699
8700 tree
8701 string_constant (tree arg, tree *ptr_offset)
8702 {
8703 STRIP_NOPS (arg);
8704
8705 if (TREE_CODE (arg) == ADDR_EXPR
8706 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8707 {
8708 *ptr_offset = size_zero_node;
8709 return TREE_OPERAND (arg, 0);
8710 }
8711 if (TREE_CODE (arg) == ADDR_EXPR
8712 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
8713 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg, 0), 0)) == STRING_CST)
8714 {
8715 *ptr_offset = convert (sizetype, TREE_OPERAND (TREE_OPERAND (arg, 0), 1));
8716 return TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8717 }
8718 else if (TREE_CODE (arg) == PLUS_EXPR)
8719 {
8720 tree arg0 = TREE_OPERAND (arg, 0);
8721 tree arg1 = TREE_OPERAND (arg, 1);
8722
8723 STRIP_NOPS (arg0);
8724 STRIP_NOPS (arg1);
8725
8726 if (TREE_CODE (arg0) == ADDR_EXPR
8727 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8728 {
8729 *ptr_offset = convert (sizetype, arg1);
8730 return TREE_OPERAND (arg0, 0);
8731 }
8732 else if (TREE_CODE (arg1) == ADDR_EXPR
8733 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8734 {
8735 *ptr_offset = convert (sizetype, arg0);
8736 return TREE_OPERAND (arg1, 0);
8737 }
8738 }
8739
8740 return 0;
8741 }
8742 \f
8743 /* Generate code to calculate EXP using a store-flag instruction
8744 and return an rtx for the result. EXP is either a comparison
8745 or a TRUTH_NOT_EXPR whose operand is a comparison.
8746
8747 If TARGET is nonzero, store the result there if convenient.
8748
8749 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8750 cheap.
8751
8752 Return zero if there is no suitable set-flag instruction
8753 available on this machine.
8754
8755 Once expand_expr has been called on the arguments of the comparison,
8756 we are committed to doing the store flag, since it is not safe to
8757 re-evaluate the expression. We emit the store-flag insn by calling
8758 emit_store_flag, but only expand the arguments if we have a reason
8759 to believe that emit_store_flag will be successful. If we think that
8760 it will, but it isn't, we have to simulate the store-flag with a
8761 set/jump/set sequence. */
8762
8763 static rtx
8764 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8765 {
8766 enum rtx_code code;
8767 tree arg0, arg1, type;
8768 tree tem;
8769 enum machine_mode operand_mode;
8770 int invert = 0;
8771 int unsignedp;
8772 rtx op0, op1;
8773 enum insn_code icode;
8774 rtx subtarget = target;
8775 rtx result, label;
8776
8777 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8778 result at the end. We can't simply invert the test since it would
8779 have already been inverted if it were valid. This case occurs for
8780 some floating-point comparisons. */
8781
8782 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8783 invert = 1, exp = TREE_OPERAND (exp, 0);
8784
8785 arg0 = TREE_OPERAND (exp, 0);
8786 arg1 = TREE_OPERAND (exp, 1);
8787
8788 /* Don't crash if the comparison was erroneous. */
8789 if (arg0 == error_mark_node || arg1 == error_mark_node)
8790 return const0_rtx;
8791
8792 type = TREE_TYPE (arg0);
8793 operand_mode = TYPE_MODE (type);
8794 unsignedp = TYPE_UNSIGNED (type);
8795
8796 /* We won't bother with BLKmode store-flag operations because it would mean
8797 passing a lot of information to emit_store_flag. */
8798 if (operand_mode == BLKmode)
8799 return 0;
8800
8801 /* We won't bother with store-flag operations involving function pointers
8802 when function pointers must be canonicalized before comparisons. */
8803 #ifdef HAVE_canonicalize_funcptr_for_compare
8804 if (HAVE_canonicalize_funcptr_for_compare
8805 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8806 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8807 == FUNCTION_TYPE))
8808 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8809 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8810 == FUNCTION_TYPE))))
8811 return 0;
8812 #endif
8813
8814 STRIP_NOPS (arg0);
8815 STRIP_NOPS (arg1);
8816
8817 /* Get the rtx comparison code to use. We know that EXP is a comparison
8818 operation of some type. Some comparisons against 1 and -1 can be
8819 converted to comparisons with zero. Do so here so that the tests
8820 below will be aware that we have a comparison with zero. These
8821 tests will not catch constants in the first operand, but constants
8822 are rarely passed as the first operand. */
8823
8824 switch (TREE_CODE (exp))
8825 {
8826 case EQ_EXPR:
8827 code = EQ;
8828 break;
8829 case NE_EXPR:
8830 code = NE;
8831 break;
8832 case LT_EXPR:
8833 if (integer_onep (arg1))
8834 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8835 else
8836 code = unsignedp ? LTU : LT;
8837 break;
8838 case LE_EXPR:
8839 if (! unsignedp && integer_all_onesp (arg1))
8840 arg1 = integer_zero_node, code = LT;
8841 else
8842 code = unsignedp ? LEU : LE;
8843 break;
8844 case GT_EXPR:
8845 if (! unsignedp && integer_all_onesp (arg1))
8846 arg1 = integer_zero_node, code = GE;
8847 else
8848 code = unsignedp ? GTU : GT;
8849 break;
8850 case GE_EXPR:
8851 if (integer_onep (arg1))
8852 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8853 else
8854 code = unsignedp ? GEU : GE;
8855 break;
8856
8857 case UNORDERED_EXPR:
8858 code = UNORDERED;
8859 break;
8860 case ORDERED_EXPR:
8861 code = ORDERED;
8862 break;
8863 case UNLT_EXPR:
8864 code = UNLT;
8865 break;
8866 case UNLE_EXPR:
8867 code = UNLE;
8868 break;
8869 case UNGT_EXPR:
8870 code = UNGT;
8871 break;
8872 case UNGE_EXPR:
8873 code = UNGE;
8874 break;
8875 case UNEQ_EXPR:
8876 code = UNEQ;
8877 break;
8878 case LTGT_EXPR:
8879 code = LTGT;
8880 break;
8881
8882 default:
8883 abort ();
8884 }
8885
8886 /* Put a constant second. */
8887 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8888 {
8889 tem = arg0; arg0 = arg1; arg1 = tem;
8890 code = swap_condition (code);
8891 }
8892
8893 /* If this is an equality or inequality test of a single bit, we can
8894 do this by shifting the bit being tested to the low-order bit and
8895 masking the result with the constant 1. If the condition was EQ,
8896 we xor it with 1. This does not require an scc insn and is faster
8897 than an scc insn even if we have it.
8898
8899 The code to make this transformation was moved into fold_single_bit_test,
8900 so we just call into the folder and expand its result. */
8901
8902 if ((code == NE || code == EQ)
8903 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8904 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8905 {
8906 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
8907 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
8908 arg0, arg1, type),
8909 target, VOIDmode, EXPAND_NORMAL);
8910 }
8911
8912 /* Now see if we are likely to be able to do this. Return if not. */
8913 if (! can_compare_p (code, operand_mode, ccp_store_flag))
8914 return 0;
8915
8916 icode = setcc_gen_code[(int) code];
8917 if (icode == CODE_FOR_nothing
8918 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
8919 {
8920 /* We can only do this if it is one of the special cases that
8921 can be handled without an scc insn. */
8922 if ((code == LT && integer_zerop (arg1))
8923 || (! only_cheap && code == GE && integer_zerop (arg1)))
8924 ;
8925 else if (BRANCH_COST >= 0
8926 && ! only_cheap && (code == NE || code == EQ)
8927 && TREE_CODE (type) != REAL_TYPE
8928 && ((abs_optab->handlers[(int) operand_mode].insn_code
8929 != CODE_FOR_nothing)
8930 || (ffs_optab->handlers[(int) operand_mode].insn_code
8931 != CODE_FOR_nothing)))
8932 ;
8933 else
8934 return 0;
8935 }
8936
8937 if (! get_subtarget (target)
8938 || GET_MODE (subtarget) != operand_mode)
8939 subtarget = 0;
8940
8941 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
8942
8943 if (target == 0)
8944 target = gen_reg_rtx (mode);
8945
8946 result = emit_store_flag (target, code, op0, op1,
8947 operand_mode, unsignedp, 1);
8948
8949 if (result)
8950 {
8951 if (invert)
8952 result = expand_binop (mode, xor_optab, result, const1_rtx,
8953 result, 0, OPTAB_LIB_WIDEN);
8954 return result;
8955 }
8956
8957 /* If this failed, we have to do this with set/compare/jump/set code. */
8958 if (!REG_P (target)
8959 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8960 target = gen_reg_rtx (GET_MODE (target));
8961
8962 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8963 result = compare_from_rtx (op0, op1, code, unsignedp,
8964 operand_mode, NULL_RTX);
8965 if (GET_CODE (result) == CONST_INT)
8966 return (((result == const0_rtx && ! invert)
8967 || (result != const0_rtx && invert))
8968 ? const0_rtx : const1_rtx);
8969
8970 /* The code of RESULT may not match CODE if compare_from_rtx
8971 decided to swap its operands and reverse the original code.
8972
8973 We know that compare_from_rtx returns either a CONST_INT or
8974 a new comparison code, so it is safe to just extract the
8975 code from RESULT. */
8976 code = GET_CODE (result);
8977
8978 label = gen_label_rtx ();
8979 if (bcc_gen_fctn[(int) code] == 0)
8980 abort ();
8981
8982 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8983 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8984 emit_label (label);
8985
8986 return target;
8987 }
8988 \f
8989
8990 /* Stubs in case we haven't got a casesi insn. */
8991 #ifndef HAVE_casesi
8992 # define HAVE_casesi 0
8993 # define gen_casesi(a, b, c, d, e) (0)
8994 # define CODE_FOR_casesi CODE_FOR_nothing
8995 #endif
8996
8997 /* If the machine does not have a case insn that compares the bounds,
8998 this means extra overhead for dispatch tables, which raises the
8999 threshold for using them. */
9000 #ifndef CASE_VALUES_THRESHOLD
9001 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9002 #endif /* CASE_VALUES_THRESHOLD */
9003
9004 unsigned int
9005 case_values_threshold (void)
9006 {
9007 return CASE_VALUES_THRESHOLD;
9008 }
9009
9010 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9011 0 otherwise (i.e. if there is no casesi instruction). */
9012 int
9013 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9014 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9015 {
9016 enum machine_mode index_mode = SImode;
9017 int index_bits = GET_MODE_BITSIZE (index_mode);
9018 rtx op1, op2, index;
9019 enum machine_mode op_mode;
9020
9021 if (! HAVE_casesi)
9022 return 0;
9023
9024 /* Convert the index to SImode. */
9025 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9026 {
9027 enum machine_mode omode = TYPE_MODE (index_type);
9028 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9029
9030 /* We must handle the endpoints in the original mode. */
9031 index_expr = build (MINUS_EXPR, index_type,
9032 index_expr, minval);
9033 minval = integer_zero_node;
9034 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9035 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9036 omode, 1, default_label);
9037 /* Now we can safely truncate. */
9038 index = convert_to_mode (index_mode, index, 0);
9039 }
9040 else
9041 {
9042 if (TYPE_MODE (index_type) != index_mode)
9043 {
9044 index_expr = convert (lang_hooks.types.type_for_size
9045 (index_bits, 0), index_expr);
9046 index_type = TREE_TYPE (index_expr);
9047 }
9048
9049 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9050 }
9051
9052 do_pending_stack_adjust ();
9053
9054 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9055 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9056 (index, op_mode))
9057 index = copy_to_mode_reg (op_mode, index);
9058
9059 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9060
9061 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9062 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9063 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9064 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9065 (op1, op_mode))
9066 op1 = copy_to_mode_reg (op_mode, op1);
9067
9068 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9069
9070 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9071 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9072 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9073 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9074 (op2, op_mode))
9075 op2 = copy_to_mode_reg (op_mode, op2);
9076
9077 emit_jump_insn (gen_casesi (index, op1, op2,
9078 table_label, default_label));
9079 return 1;
9080 }
9081
9082 /* Attempt to generate a tablejump instruction; same concept. */
9083 #ifndef HAVE_tablejump
9084 #define HAVE_tablejump 0
9085 #define gen_tablejump(x, y) (0)
9086 #endif
9087
9088 /* Subroutine of the next function.
9089
9090 INDEX is the value being switched on, with the lowest value
9091 in the table already subtracted.
9092 MODE is its expected mode (needed if INDEX is constant).
9093 RANGE is the length of the jump table.
9094 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9095
9096 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9097 index value is out of range. */
9098
9099 static void
9100 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9101 rtx default_label)
9102 {
9103 rtx temp, vector;
9104
9105 if (INTVAL (range) > cfun->max_jumptable_ents)
9106 cfun->max_jumptable_ents = INTVAL (range);
9107
9108 /* Do an unsigned comparison (in the proper mode) between the index
9109 expression and the value which represents the length of the range.
9110 Since we just finished subtracting the lower bound of the range
9111 from the index expression, this comparison allows us to simultaneously
9112 check that the original index expression value is both greater than
9113 or equal to the minimum value of the range and less than or equal to
9114 the maximum value of the range. */
9115
9116 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9117 default_label);
9118
9119 /* If index is in range, it must fit in Pmode.
9120 Convert to Pmode so we can index with it. */
9121 if (mode != Pmode)
9122 index = convert_to_mode (Pmode, index, 1);
9123
9124 /* Don't let a MEM slip through, because then INDEX that comes
9125 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9126 and break_out_memory_refs will go to work on it and mess it up. */
9127 #ifdef PIC_CASE_VECTOR_ADDRESS
9128 if (flag_pic && !REG_P (index))
9129 index = copy_to_mode_reg (Pmode, index);
9130 #endif
9131
9132 /* If flag_force_addr were to affect this address
9133 it could interfere with the tricky assumptions made
9134 about addresses that contain label-refs,
9135 which may be valid only very near the tablejump itself. */
9136 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9137 GET_MODE_SIZE, because this indicates how large insns are. The other
9138 uses should all be Pmode, because they are addresses. This code
9139 could fail if addresses and insns are not the same size. */
9140 index = gen_rtx_PLUS (Pmode,
9141 gen_rtx_MULT (Pmode, index,
9142 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9143 gen_rtx_LABEL_REF (Pmode, table_label));
9144 #ifdef PIC_CASE_VECTOR_ADDRESS
9145 if (flag_pic)
9146 index = PIC_CASE_VECTOR_ADDRESS (index);
9147 else
9148 #endif
9149 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9150 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9151 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9152 RTX_UNCHANGING_P (vector) = 1;
9153 MEM_NOTRAP_P (vector) = 1;
9154 convert_move (temp, vector, 0);
9155
9156 emit_jump_insn (gen_tablejump (temp, table_label));
9157
9158 /* If we are generating PIC code or if the table is PC-relative, the
9159 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9160 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9161 emit_barrier ();
9162 }
9163
9164 int
9165 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9166 rtx table_label, rtx default_label)
9167 {
9168 rtx index;
9169
9170 if (! HAVE_tablejump)
9171 return 0;
9172
9173 index_expr = fold (build (MINUS_EXPR, index_type,
9174 convert (index_type, index_expr),
9175 convert (index_type, minval)));
9176 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9177 do_pending_stack_adjust ();
9178
9179 do_tablejump (index, TYPE_MODE (index_type),
9180 convert_modes (TYPE_MODE (index_type),
9181 TYPE_MODE (TREE_TYPE (range)),
9182 expand_expr (range, NULL_RTX,
9183 VOIDmode, 0),
9184 TYPE_UNSIGNED (TREE_TYPE (range))),
9185 table_label, default_label);
9186 return 1;
9187 }
9188
9189 /* Nonzero if the mode is a valid vector mode for this architecture.
9190 This returns nonzero even if there is no hardware support for the
9191 vector mode, but we can emulate with narrower modes. */
9192
9193 int
9194 vector_mode_valid_p (enum machine_mode mode)
9195 {
9196 enum mode_class class = GET_MODE_CLASS (mode);
9197 enum machine_mode innermode;
9198
9199 /* Doh! What's going on? */
9200 if (class != MODE_VECTOR_INT
9201 && class != MODE_VECTOR_FLOAT)
9202 return 0;
9203
9204 /* Hardware support. Woo hoo! */
9205 if (VECTOR_MODE_SUPPORTED_P (mode))
9206 return 1;
9207
9208 innermode = GET_MODE_INNER (mode);
9209
9210 /* We should probably return 1 if requesting V4DI and we have no DI,
9211 but we have V2DI, but this is probably very unlikely. */
9212
9213 /* If we have support for the inner mode, we can safely emulate it.
9214 We may not have V2DI, but me can emulate with a pair of DIs. */
9215 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9216 }
9217
9218 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9219 static rtx
9220 const_vector_from_tree (tree exp)
9221 {
9222 rtvec v;
9223 int units, i;
9224 tree link, elt;
9225 enum machine_mode inner, mode;
9226
9227 mode = TYPE_MODE (TREE_TYPE (exp));
9228
9229 if (initializer_zerop (exp))
9230 return CONST0_RTX (mode);
9231
9232 units = GET_MODE_NUNITS (mode);
9233 inner = GET_MODE_INNER (mode);
9234
9235 v = rtvec_alloc (units);
9236
9237 link = TREE_VECTOR_CST_ELTS (exp);
9238 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9239 {
9240 elt = TREE_VALUE (link);
9241
9242 if (TREE_CODE (elt) == REAL_CST)
9243 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9244 inner);
9245 else
9246 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9247 TREE_INT_CST_HIGH (elt),
9248 inner);
9249 }
9250
9251 /* Initialize remaining elements to 0. */
9252 for (; i < units; ++i)
9253 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9254
9255 return gen_rtx_raw_CONST_VECTOR (mode, v);
9256 }
9257 #include "gt-expr.h"