c-decl.c (c_init_decl_processing): Set pedantic_lvalues to true unconditionally.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "target.h"
51
52 /* Decide whether a function's arguments should be processed
53 from first to last or from last to first.
54
55 They should if the stack and args grow in opposite directions, but
56 only if we have push insns. */
57
58 #ifdef PUSH_ROUNDING
59
60 #ifndef PUSH_ARGS_REVERSED
61 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
62 #define PUSH_ARGS_REVERSED /* If it's last to first. */
63 #endif
64 #endif
65
66 #endif
67
68 #ifndef STACK_PUSH_CODE
69 #ifdef STACK_GROWS_DOWNWARD
70 #define STACK_PUSH_CODE PRE_DEC
71 #else
72 #define STACK_PUSH_CODE PRE_INC
73 #endif
74 #endif
75
76 /* Convert defined/undefined to boolean. */
77 #ifdef TARGET_MEM_FUNCTIONS
78 #undef TARGET_MEM_FUNCTIONS
79 #define TARGET_MEM_FUNCTIONS 1
80 #else
81 #define TARGET_MEM_FUNCTIONS 0
82 #endif
83
84
85 /* If this is nonzero, we do not bother generating VOLATILE
86 around volatile memory references, and we are willing to
87 output indirect addresses. If cse is to follow, we reject
88 indirect addresses so a useful potential cse is generated;
89 if it is used only once, instruction combination will produce
90 the same indirect address eventually. */
91 int cse_not_expected;
92
93 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
94 tree placeholder_list = 0;
95
96 /* This structure is used by move_by_pieces to describe the move to
97 be performed. */
98 struct move_by_pieces
99 {
100 rtx to;
101 rtx to_addr;
102 int autinc_to;
103 int explicit_inc_to;
104 rtx from;
105 rtx from_addr;
106 int autinc_from;
107 int explicit_inc_from;
108 unsigned HOST_WIDE_INT len;
109 HOST_WIDE_INT offset;
110 int reverse;
111 };
112
113 /* This structure is used by store_by_pieces to describe the clear to
114 be performed. */
115
116 struct store_by_pieces
117 {
118 rtx to;
119 rtx to_addr;
120 int autinc_to;
121 int explicit_inc_to;
122 unsigned HOST_WIDE_INT len;
123 HOST_WIDE_INT offset;
124 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
125 void *constfundata;
126 int reverse;
127 };
128
129 static rtx enqueue_insn (rtx, rtx);
130 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
131 unsigned int);
132 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
133 struct move_by_pieces *);
134 static bool block_move_libcall_safe_for_call_parm (void);
135 static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
136 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
137 static tree emit_block_move_libcall_fn (int);
138 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
139 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
140 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
141 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
142 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
143 struct store_by_pieces *);
144 static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
145 static rtx clear_storage_via_libcall (rtx, rtx);
146 static tree clear_storage_libcall_fn (int);
147 static rtx compress_float_constant (rtx, rtx);
148 static rtx get_subtarget (rtx);
149 static int is_zeros_p (tree);
150 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
151 HOST_WIDE_INT, enum machine_mode,
152 tree, tree, int, int);
153 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
154 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
155 tree, enum machine_mode, int, tree, int);
156 static rtx var_rtx (tree);
157
158 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
159 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type (tree, tree);
160
161 static int is_aligning_offset (tree, tree);
162 static rtx expand_increment (tree, int, int);
163 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
164 enum expand_modifier);
165 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
166 #ifdef PUSH_ROUNDING
167 static void emit_single_push_insn (enum machine_mode, rtx, tree);
168 #endif
169 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
170 static rtx const_vector_from_tree (tree);
171
172 /* Record for each mode whether we can move a register directly to or
173 from an object of that mode in memory. If we can't, we won't try
174 to use that mode directly when accessing a field of that mode. */
175
176 static char direct_load[NUM_MACHINE_MODES];
177 static char direct_store[NUM_MACHINE_MODES];
178
179 /* Record for each mode whether we can float-extend from memory. */
180
181 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
182
183 /* This macro is used to determine whether move_by_pieces should be called
184 to perform a structure copy. */
185 #ifndef MOVE_BY_PIECES_P
186 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
187 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
188 #endif
189
190 /* This macro is used to determine whether clear_by_pieces should be
191 called to clear storage. */
192 #ifndef CLEAR_BY_PIECES_P
193 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
194 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
195 #endif
196
197 /* This macro is used to determine whether store_by_pieces should be
198 called to "memset" storage with byte values other than zero, or
199 to "memcpy" storage when the source is a constant string. */
200 #ifndef STORE_BY_PIECES_P
201 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
202 #endif
203
204 /* This array records the insn_code of insns to perform block moves. */
205 enum insn_code movstr_optab[NUM_MACHINE_MODES];
206
207 /* This array records the insn_code of insns to perform block clears. */
208 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
209
210 /* These arrays record the insn_code of two different kinds of insns
211 to perform block compares. */
212 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
213 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
214
215 /* Stack of EXPR_WITH_FILE_LOCATION nested expressions. */
216 struct file_stack *expr_wfl_stack;
217
218 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
219
220 #ifndef SLOW_UNALIGNED_ACCESS
221 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
222 #endif
223 \f
224 /* This is run once per compilation to set up which modes can be used
225 directly in memory and to initialize the block move optab. */
226
227 void
228 init_expr_once (void)
229 {
230 rtx insn, pat;
231 enum machine_mode mode;
232 int num_clobbers;
233 rtx mem, mem1;
234 rtx reg;
235
236 /* Try indexing by frame ptr and try by stack ptr.
237 It is known that on the Convex the stack ptr isn't a valid index.
238 With luck, one or the other is valid on any machine. */
239 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
240 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
241
242 /* A scratch register we can modify in-place below to avoid
243 useless RTL allocations. */
244 reg = gen_rtx_REG (VOIDmode, -1);
245
246 insn = rtx_alloc (INSN);
247 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
248 PATTERN (insn) = pat;
249
250 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
251 mode = (enum machine_mode) ((int) mode + 1))
252 {
253 int regno;
254
255 direct_load[(int) mode] = direct_store[(int) mode] = 0;
256 PUT_MODE (mem, mode);
257 PUT_MODE (mem1, mode);
258 PUT_MODE (reg, mode);
259
260 /* See if there is some register that can be used in this mode and
261 directly loaded or stored from memory. */
262
263 if (mode != VOIDmode && mode != BLKmode)
264 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
265 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
266 regno++)
267 {
268 if (! HARD_REGNO_MODE_OK (regno, mode))
269 continue;
270
271 REGNO (reg) = regno;
272
273 SET_SRC (pat) = mem;
274 SET_DEST (pat) = reg;
275 if (recog (pat, insn, &num_clobbers) >= 0)
276 direct_load[(int) mode] = 1;
277
278 SET_SRC (pat) = mem1;
279 SET_DEST (pat) = reg;
280 if (recog (pat, insn, &num_clobbers) >= 0)
281 direct_load[(int) mode] = 1;
282
283 SET_SRC (pat) = reg;
284 SET_DEST (pat) = mem;
285 if (recog (pat, insn, &num_clobbers) >= 0)
286 direct_store[(int) mode] = 1;
287
288 SET_SRC (pat) = reg;
289 SET_DEST (pat) = mem1;
290 if (recog (pat, insn, &num_clobbers) >= 0)
291 direct_store[(int) mode] = 1;
292 }
293 }
294
295 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
296
297 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
298 mode = GET_MODE_WIDER_MODE (mode))
299 {
300 enum machine_mode srcmode;
301 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
302 srcmode = GET_MODE_WIDER_MODE (srcmode))
303 {
304 enum insn_code ic;
305
306 ic = can_extend_p (mode, srcmode, 0);
307 if (ic == CODE_FOR_nothing)
308 continue;
309
310 PUT_MODE (mem, srcmode);
311
312 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
313 float_extend_from_mem[mode][srcmode] = true;
314 }
315 }
316 }
317
318 /* This is run at the start of compiling a function. */
319
320 void
321 init_expr (void)
322 {
323 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
324 }
325
326 /* Small sanity check that the queue is empty at the end of a function. */
327
328 void
329 finish_expr_for_function (void)
330 {
331 if (pending_chain)
332 abort ();
333 }
334 \f
335 /* Manage the queue of increment instructions to be output
336 for POSTINCREMENT_EXPR expressions, etc. */
337
338 /* Queue up to increment (or change) VAR later. BODY says how:
339 BODY should be the same thing you would pass to emit_insn
340 to increment right away. It will go to emit_insn later on.
341
342 The value is a QUEUED expression to be used in place of VAR
343 where you want to guarantee the pre-incrementation value of VAR. */
344
345 static rtx
346 enqueue_insn (rtx var, rtx body)
347 {
348 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
349 body, pending_chain);
350 return pending_chain;
351 }
352
353 /* Use protect_from_queue to convert a QUEUED expression
354 into something that you can put immediately into an instruction.
355 If the queued incrementation has not happened yet,
356 protect_from_queue returns the variable itself.
357 If the incrementation has happened, protect_from_queue returns a temp
358 that contains a copy of the old value of the variable.
359
360 Any time an rtx which might possibly be a QUEUED is to be put
361 into an instruction, it must be passed through protect_from_queue first.
362 QUEUED expressions are not meaningful in instructions.
363
364 Do not pass a value through protect_from_queue and then hold
365 on to it for a while before putting it in an instruction!
366 If the queue is flushed in between, incorrect code will result. */
367
368 rtx
369 protect_from_queue (rtx x, int modify)
370 {
371 RTX_CODE code = GET_CODE (x);
372
373 #if 0 /* A QUEUED can hang around after the queue is forced out. */
374 /* Shortcut for most common case. */
375 if (pending_chain == 0)
376 return x;
377 #endif
378
379 if (code != QUEUED)
380 {
381 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
382 use of autoincrement. Make a copy of the contents of the memory
383 location rather than a copy of the address, but not if the value is
384 of mode BLKmode. Don't modify X in place since it might be
385 shared. */
386 if (code == MEM && GET_MODE (x) != BLKmode
387 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
388 {
389 rtx y = XEXP (x, 0);
390 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
391
392 if (QUEUED_INSN (y))
393 {
394 rtx temp = gen_reg_rtx (GET_MODE (x));
395
396 emit_insn_before (gen_move_insn (temp, new),
397 QUEUED_INSN (y));
398 return temp;
399 }
400
401 /* Copy the address into a pseudo, so that the returned value
402 remains correct across calls to emit_queue. */
403 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
404 }
405
406 /* Otherwise, recursively protect the subexpressions of all
407 the kinds of rtx's that can contain a QUEUED. */
408 if (code == MEM)
409 {
410 rtx tem = protect_from_queue (XEXP (x, 0), 0);
411 if (tem != XEXP (x, 0))
412 {
413 x = copy_rtx (x);
414 XEXP (x, 0) = tem;
415 }
416 }
417 else if (code == PLUS || code == MULT)
418 {
419 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
420 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
421 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
422 {
423 x = copy_rtx (x);
424 XEXP (x, 0) = new0;
425 XEXP (x, 1) = new1;
426 }
427 }
428 return x;
429 }
430 /* If the increment has not happened, use the variable itself. Copy it
431 into a new pseudo so that the value remains correct across calls to
432 emit_queue. */
433 if (QUEUED_INSN (x) == 0)
434 return copy_to_reg (QUEUED_VAR (x));
435 /* If the increment has happened and a pre-increment copy exists,
436 use that copy. */
437 if (QUEUED_COPY (x) != 0)
438 return QUEUED_COPY (x);
439 /* The increment has happened but we haven't set up a pre-increment copy.
440 Set one up now, and use it. */
441 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
442 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
443 QUEUED_INSN (x));
444 return QUEUED_COPY (x);
445 }
446
447 /* Return nonzero if X contains a QUEUED expression:
448 if it contains anything that will be altered by a queued increment.
449 We handle only combinations of MEM, PLUS, MINUS and MULT operators
450 since memory addresses generally contain only those. */
451
452 int
453 queued_subexp_p (rtx x)
454 {
455 enum rtx_code code = GET_CODE (x);
456 switch (code)
457 {
458 case QUEUED:
459 return 1;
460 case MEM:
461 return queued_subexp_p (XEXP (x, 0));
462 case MULT:
463 case PLUS:
464 case MINUS:
465 return (queued_subexp_p (XEXP (x, 0))
466 || queued_subexp_p (XEXP (x, 1)));
467 default:
468 return 0;
469 }
470 }
471
472 /* Perform all the pending incrementations. */
473
474 void
475 emit_queue (void)
476 {
477 rtx p;
478 while ((p = pending_chain))
479 {
480 rtx body = QUEUED_BODY (p);
481
482 switch (GET_CODE (body))
483 {
484 case INSN:
485 case JUMP_INSN:
486 case CALL_INSN:
487 case CODE_LABEL:
488 case BARRIER:
489 case NOTE:
490 QUEUED_INSN (p) = body;
491 emit_insn (body);
492 break;
493
494 #ifdef ENABLE_CHECKING
495 case SEQUENCE:
496 abort ();
497 break;
498 #endif
499
500 default:
501 QUEUED_INSN (p) = emit_insn (body);
502 break;
503 }
504
505 pending_chain = QUEUED_NEXT (p);
506 }
507 }
508 \f
509 /* Copy data from FROM to TO, where the machine modes are not the same.
510 Both modes may be integer, or both may be floating.
511 UNSIGNEDP should be nonzero if FROM is an unsigned type.
512 This causes zero-extension instead of sign-extension. */
513
514 void
515 convert_move (rtx to, rtx from, int unsignedp)
516 {
517 enum machine_mode to_mode = GET_MODE (to);
518 enum machine_mode from_mode = GET_MODE (from);
519 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
520 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
521 enum insn_code code;
522 rtx libcall;
523
524 /* rtx code for making an equivalent value. */
525 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
526 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
527
528 to = protect_from_queue (to, 1);
529 from = protect_from_queue (from, 0);
530
531 if (to_real != from_real)
532 abort ();
533
534 /* If FROM is a SUBREG that indicates that we have already done at least
535 the required extension, strip it. We don't handle such SUBREGs as
536 TO here. */
537
538 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
539 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
540 >= GET_MODE_SIZE (to_mode))
541 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
542 from = gen_lowpart (to_mode, from), from_mode = to_mode;
543
544 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
545 abort ();
546
547 if (to_mode == from_mode
548 || (from_mode == VOIDmode && CONSTANT_P (from)))
549 {
550 emit_move_insn (to, from);
551 return;
552 }
553
554 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
555 {
556 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
557 abort ();
558
559 if (VECTOR_MODE_P (to_mode))
560 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
561 else
562 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
563
564 emit_move_insn (to, from);
565 return;
566 }
567
568 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
569 {
570 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
571 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
572 return;
573 }
574
575 if (to_real)
576 {
577 rtx value, insns;
578 convert_optab tab;
579
580 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
581 tab = sext_optab;
582 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
583 tab = trunc_optab;
584 else
585 abort ();
586
587 /* Try converting directly if the insn is supported. */
588
589 code = tab->handlers[to_mode][from_mode].insn_code;
590 if (code != CODE_FOR_nothing)
591 {
592 emit_unop_insn (code, to, from,
593 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
594 return;
595 }
596
597 /* Otherwise use a libcall. */
598 libcall = tab->handlers[to_mode][from_mode].libfunc;
599
600 if (!libcall)
601 /* This conversion is not implemented yet. */
602 abort ();
603
604 start_sequence ();
605 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
606 1, from, from_mode);
607 insns = get_insns ();
608 end_sequence ();
609 emit_libcall_block (insns, to, value,
610 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
611 from)
612 : gen_rtx_FLOAT_EXTEND (to_mode, from));
613 return;
614 }
615
616 /* Handle pointer conversion. */ /* SPEE 900220. */
617 /* Targets are expected to provide conversion insns between PxImode and
618 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
619 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
620 {
621 enum machine_mode full_mode
622 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
623
624 if (trunc_optab->handlers[to_mode][full_mode].insn_code
625 == CODE_FOR_nothing)
626 abort ();
627
628 if (full_mode != from_mode)
629 from = convert_to_mode (full_mode, from, unsignedp);
630 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
631 to, from, UNKNOWN);
632 return;
633 }
634 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
635 {
636 enum machine_mode full_mode
637 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
638
639 if (sext_optab->handlers[full_mode][from_mode].insn_code
640 == CODE_FOR_nothing)
641 abort ();
642
643 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
644 to, from, UNKNOWN);
645 if (to_mode == full_mode)
646 return;
647
648 /* else proceed to integer conversions below */
649 from_mode = full_mode;
650 }
651
652 /* Now both modes are integers. */
653
654 /* Handle expanding beyond a word. */
655 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
656 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
657 {
658 rtx insns;
659 rtx lowpart;
660 rtx fill_value;
661 rtx lowfrom;
662 int i;
663 enum machine_mode lowpart_mode;
664 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
665
666 /* Try converting directly if the insn is supported. */
667 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
668 != CODE_FOR_nothing)
669 {
670 /* If FROM is a SUBREG, put it into a register. Do this
671 so that we always generate the same set of insns for
672 better cse'ing; if an intermediate assignment occurred,
673 we won't be doing the operation directly on the SUBREG. */
674 if (optimize > 0 && GET_CODE (from) == SUBREG)
675 from = force_reg (from_mode, from);
676 emit_unop_insn (code, to, from, equiv_code);
677 return;
678 }
679 /* Next, try converting via full word. */
680 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
681 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
682 != CODE_FOR_nothing))
683 {
684 if (GET_CODE (to) == REG)
685 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
686 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
687 emit_unop_insn (code, to,
688 gen_lowpart (word_mode, to), equiv_code);
689 return;
690 }
691
692 /* No special multiword conversion insn; do it by hand. */
693 start_sequence ();
694
695 /* Since we will turn this into a no conflict block, we must ensure
696 that the source does not overlap the target. */
697
698 if (reg_overlap_mentioned_p (to, from))
699 from = force_reg (from_mode, from);
700
701 /* Get a copy of FROM widened to a word, if necessary. */
702 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
703 lowpart_mode = word_mode;
704 else
705 lowpart_mode = from_mode;
706
707 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
708
709 lowpart = gen_lowpart (lowpart_mode, to);
710 emit_move_insn (lowpart, lowfrom);
711
712 /* Compute the value to put in each remaining word. */
713 if (unsignedp)
714 fill_value = const0_rtx;
715 else
716 {
717 #ifdef HAVE_slt
718 if (HAVE_slt
719 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
720 && STORE_FLAG_VALUE == -1)
721 {
722 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
723 lowpart_mode, 0);
724 fill_value = gen_reg_rtx (word_mode);
725 emit_insn (gen_slt (fill_value));
726 }
727 else
728 #endif
729 {
730 fill_value
731 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
732 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
733 NULL_RTX, 0);
734 fill_value = convert_to_mode (word_mode, fill_value, 1);
735 }
736 }
737
738 /* Fill the remaining words. */
739 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
740 {
741 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
742 rtx subword = operand_subword (to, index, 1, to_mode);
743
744 if (subword == 0)
745 abort ();
746
747 if (fill_value != subword)
748 emit_move_insn (subword, fill_value);
749 }
750
751 insns = get_insns ();
752 end_sequence ();
753
754 emit_no_conflict_block (insns, to, from, NULL_RTX,
755 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
756 return;
757 }
758
759 /* Truncating multi-word to a word or less. */
760 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
761 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
762 {
763 if (!((GET_CODE (from) == MEM
764 && ! MEM_VOLATILE_P (from)
765 && direct_load[(int) to_mode]
766 && ! mode_dependent_address_p (XEXP (from, 0)))
767 || GET_CODE (from) == REG
768 || GET_CODE (from) == SUBREG))
769 from = force_reg (from_mode, from);
770 convert_move (to, gen_lowpart (word_mode, from), 0);
771 return;
772 }
773
774 /* Now follow all the conversions between integers
775 no more than a word long. */
776
777 /* For truncation, usually we can just refer to FROM in a narrower mode. */
778 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
779 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
780 GET_MODE_BITSIZE (from_mode)))
781 {
782 if (!((GET_CODE (from) == MEM
783 && ! MEM_VOLATILE_P (from)
784 && direct_load[(int) to_mode]
785 && ! mode_dependent_address_p (XEXP (from, 0)))
786 || GET_CODE (from) == REG
787 || GET_CODE (from) == SUBREG))
788 from = force_reg (from_mode, from);
789 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
790 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
791 from = copy_to_reg (from);
792 emit_move_insn (to, gen_lowpart (to_mode, from));
793 return;
794 }
795
796 /* Handle extension. */
797 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
798 {
799 /* Convert directly if that works. */
800 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
801 != CODE_FOR_nothing)
802 {
803 if (flag_force_mem)
804 from = force_not_mem (from);
805
806 emit_unop_insn (code, to, from, equiv_code);
807 return;
808 }
809 else
810 {
811 enum machine_mode intermediate;
812 rtx tmp;
813 tree shift_amount;
814
815 /* Search for a mode to convert via. */
816 for (intermediate = from_mode; intermediate != VOIDmode;
817 intermediate = GET_MODE_WIDER_MODE (intermediate))
818 if (((can_extend_p (to_mode, intermediate, unsignedp)
819 != CODE_FOR_nothing)
820 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
821 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
822 GET_MODE_BITSIZE (intermediate))))
823 && (can_extend_p (intermediate, from_mode, unsignedp)
824 != CODE_FOR_nothing))
825 {
826 convert_move (to, convert_to_mode (intermediate, from,
827 unsignedp), unsignedp);
828 return;
829 }
830
831 /* No suitable intermediate mode.
832 Generate what we need with shifts. */
833 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
834 - GET_MODE_BITSIZE (from_mode), 0);
835 from = gen_lowpart (to_mode, force_reg (from_mode, from));
836 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
837 to, unsignedp);
838 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
839 to, unsignedp);
840 if (tmp != to)
841 emit_move_insn (to, tmp);
842 return;
843 }
844 }
845
846 /* Support special truncate insns for certain modes. */
847 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
848 {
849 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
850 to, from, UNKNOWN);
851 return;
852 }
853
854 /* Handle truncation of volatile memrefs, and so on;
855 the things that couldn't be truncated directly,
856 and for which there was no special instruction.
857
858 ??? Code above formerly short-circuited this, for most integer
859 mode pairs, with a force_reg in from_mode followed by a recursive
860 call to this routine. Appears always to have been wrong. */
861 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
862 {
863 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
864 emit_move_insn (to, temp);
865 return;
866 }
867
868 /* Mode combination is not recognized. */
869 abort ();
870 }
871
872 /* Return an rtx for a value that would result
873 from converting X to mode MODE.
874 Both X and MODE may be floating, or both integer.
875 UNSIGNEDP is nonzero if X is an unsigned value.
876 This can be done by referring to a part of X in place
877 or by copying to a new temporary with conversion.
878
879 This function *must not* call protect_from_queue
880 except when putting X into an insn (in which case convert_move does it). */
881
882 rtx
883 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
884 {
885 return convert_modes (mode, VOIDmode, x, unsignedp);
886 }
887
888 /* Return an rtx for a value that would result
889 from converting X from mode OLDMODE to mode MODE.
890 Both modes may be floating, or both integer.
891 UNSIGNEDP is nonzero if X is an unsigned value.
892
893 This can be done by referring to a part of X in place
894 or by copying to a new temporary with conversion.
895
896 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
897
898 This function *must not* call protect_from_queue
899 except when putting X into an insn (in which case convert_move does it). */
900
901 rtx
902 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
903 {
904 rtx temp;
905
906 /* If FROM is a SUBREG that indicates that we have already done at least
907 the required extension, strip it. */
908
909 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
910 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
911 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
912 x = gen_lowpart (mode, x);
913
914 if (GET_MODE (x) != VOIDmode)
915 oldmode = GET_MODE (x);
916
917 if (mode == oldmode)
918 return x;
919
920 /* There is one case that we must handle specially: If we are converting
921 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
922 we are to interpret the constant as unsigned, gen_lowpart will do
923 the wrong if the constant appears negative. What we want to do is
924 make the high-order word of the constant zero, not all ones. */
925
926 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
927 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
928 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
929 {
930 HOST_WIDE_INT val = INTVAL (x);
931
932 if (oldmode != VOIDmode
933 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
934 {
935 int width = GET_MODE_BITSIZE (oldmode);
936
937 /* We need to zero extend VAL. */
938 val &= ((HOST_WIDE_INT) 1 << width) - 1;
939 }
940
941 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
942 }
943
944 /* We can do this with a gen_lowpart if both desired and current modes
945 are integer, and this is either a constant integer, a register, or a
946 non-volatile MEM. Except for the constant case where MODE is no
947 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
948
949 if ((GET_CODE (x) == CONST_INT
950 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
951 || (GET_MODE_CLASS (mode) == MODE_INT
952 && GET_MODE_CLASS (oldmode) == MODE_INT
953 && (GET_CODE (x) == CONST_DOUBLE
954 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
955 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
956 && direct_load[(int) mode])
957 || (GET_CODE (x) == REG
958 && (! HARD_REGISTER_P (x)
959 || HARD_REGNO_MODE_OK (REGNO (x), mode))
960 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
961 GET_MODE_BITSIZE (GET_MODE (x)))))))))
962 {
963 /* ?? If we don't know OLDMODE, we have to assume here that
964 X does not need sign- or zero-extension. This may not be
965 the case, but it's the best we can do. */
966 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
967 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
968 {
969 HOST_WIDE_INT val = INTVAL (x);
970 int width = GET_MODE_BITSIZE (oldmode);
971
972 /* We must sign or zero-extend in this case. Start by
973 zero-extending, then sign extend if we need to. */
974 val &= ((HOST_WIDE_INT) 1 << width) - 1;
975 if (! unsignedp
976 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
977 val |= (HOST_WIDE_INT) (-1) << width;
978
979 return gen_int_mode (val, mode);
980 }
981
982 return gen_lowpart (mode, x);
983 }
984
985 /* Converting from integer constant into mode is always equivalent to an
986 subreg operation. */
987 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
988 {
989 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
990 abort ();
991 return simplify_gen_subreg (mode, x, oldmode, 0);
992 }
993
994 temp = gen_reg_rtx (mode);
995 convert_move (temp, x, unsignedp);
996 return temp;
997 }
998 \f
999 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1000 store efficiently. Due to internal GCC limitations, this is
1001 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1002 for an immediate constant. */
1003
1004 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1005
1006 /* Determine whether the LEN bytes can be moved by using several move
1007 instructions. Return nonzero if a call to move_by_pieces should
1008 succeed. */
1009
1010 int
1011 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1012 unsigned int align ATTRIBUTE_UNUSED)
1013 {
1014 return MOVE_BY_PIECES_P (len, align);
1015 }
1016
1017 /* Generate several move instructions to copy LEN bytes from block FROM to
1018 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1019 and TO through protect_from_queue before calling.
1020
1021 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1022 used to push FROM to the stack.
1023
1024 ALIGN is maximum stack alignment we can assume.
1025
1026 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1027 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1028 stpcpy. */
1029
1030 rtx
1031 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1032 unsigned int align, int endp)
1033 {
1034 struct move_by_pieces data;
1035 rtx to_addr, from_addr = XEXP (from, 0);
1036 unsigned int max_size = MOVE_MAX_PIECES + 1;
1037 enum machine_mode mode = VOIDmode, tmode;
1038 enum insn_code icode;
1039
1040 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1041
1042 data.offset = 0;
1043 data.from_addr = from_addr;
1044 if (to)
1045 {
1046 to_addr = XEXP (to, 0);
1047 data.to = to;
1048 data.autinc_to
1049 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1050 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1051 data.reverse
1052 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1053 }
1054 else
1055 {
1056 to_addr = NULL_RTX;
1057 data.to = NULL_RTX;
1058 data.autinc_to = 1;
1059 #ifdef STACK_GROWS_DOWNWARD
1060 data.reverse = 1;
1061 #else
1062 data.reverse = 0;
1063 #endif
1064 }
1065 data.to_addr = to_addr;
1066 data.from = from;
1067 data.autinc_from
1068 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1069 || GET_CODE (from_addr) == POST_INC
1070 || GET_CODE (from_addr) == POST_DEC);
1071
1072 data.explicit_inc_from = 0;
1073 data.explicit_inc_to = 0;
1074 if (data.reverse) data.offset = len;
1075 data.len = len;
1076
1077 /* If copying requires more than two move insns,
1078 copy addresses to registers (to make displacements shorter)
1079 and use post-increment if available. */
1080 if (!(data.autinc_from && data.autinc_to)
1081 && move_by_pieces_ninsns (len, align) > 2)
1082 {
1083 /* Find the mode of the largest move... */
1084 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1085 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1086 if (GET_MODE_SIZE (tmode) < max_size)
1087 mode = tmode;
1088
1089 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1090 {
1091 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1092 data.autinc_from = 1;
1093 data.explicit_inc_from = -1;
1094 }
1095 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1096 {
1097 data.from_addr = copy_addr_to_reg (from_addr);
1098 data.autinc_from = 1;
1099 data.explicit_inc_from = 1;
1100 }
1101 if (!data.autinc_from && CONSTANT_P (from_addr))
1102 data.from_addr = copy_addr_to_reg (from_addr);
1103 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1104 {
1105 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1106 data.autinc_to = 1;
1107 data.explicit_inc_to = -1;
1108 }
1109 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1110 {
1111 data.to_addr = copy_addr_to_reg (to_addr);
1112 data.autinc_to = 1;
1113 data.explicit_inc_to = 1;
1114 }
1115 if (!data.autinc_to && CONSTANT_P (to_addr))
1116 data.to_addr = copy_addr_to_reg (to_addr);
1117 }
1118
1119 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1120 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1121 align = MOVE_MAX * BITS_PER_UNIT;
1122
1123 /* First move what we can in the largest integer mode, then go to
1124 successively smaller modes. */
1125
1126 while (max_size > 1)
1127 {
1128 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1129 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1130 if (GET_MODE_SIZE (tmode) < max_size)
1131 mode = tmode;
1132
1133 if (mode == VOIDmode)
1134 break;
1135
1136 icode = mov_optab->handlers[(int) mode].insn_code;
1137 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1138 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1139
1140 max_size = GET_MODE_SIZE (mode);
1141 }
1142
1143 /* The code above should have handled everything. */
1144 if (data.len > 0)
1145 abort ();
1146
1147 if (endp)
1148 {
1149 rtx to1;
1150
1151 if (data.reverse)
1152 abort ();
1153 if (data.autinc_to)
1154 {
1155 if (endp == 2)
1156 {
1157 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1158 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1159 else
1160 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1161 -1));
1162 }
1163 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1164 data.offset);
1165 }
1166 else
1167 {
1168 if (endp == 2)
1169 --data.offset;
1170 to1 = adjust_address (data.to, QImode, data.offset);
1171 }
1172 return to1;
1173 }
1174 else
1175 return data.to;
1176 }
1177
1178 /* Return number of insns required to move L bytes by pieces.
1179 ALIGN (in bits) is maximum alignment we can assume. */
1180
1181 static unsigned HOST_WIDE_INT
1182 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1183 {
1184 unsigned HOST_WIDE_INT n_insns = 0;
1185 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1186
1187 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1188 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1189 align = MOVE_MAX * BITS_PER_UNIT;
1190
1191 while (max_size > 1)
1192 {
1193 enum machine_mode mode = VOIDmode, tmode;
1194 enum insn_code icode;
1195
1196 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1197 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1198 if (GET_MODE_SIZE (tmode) < max_size)
1199 mode = tmode;
1200
1201 if (mode == VOIDmode)
1202 break;
1203
1204 icode = mov_optab->handlers[(int) mode].insn_code;
1205 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1206 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1207
1208 max_size = GET_MODE_SIZE (mode);
1209 }
1210
1211 if (l)
1212 abort ();
1213 return n_insns;
1214 }
1215
1216 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1217 with move instructions for mode MODE. GENFUN is the gen_... function
1218 to make a move insn for that mode. DATA has all the other info. */
1219
1220 static void
1221 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1222 struct move_by_pieces *data)
1223 {
1224 unsigned int size = GET_MODE_SIZE (mode);
1225 rtx to1 = NULL_RTX, from1;
1226
1227 while (data->len >= size)
1228 {
1229 if (data->reverse)
1230 data->offset -= size;
1231
1232 if (data->to)
1233 {
1234 if (data->autinc_to)
1235 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1236 data->offset);
1237 else
1238 to1 = adjust_address (data->to, mode, data->offset);
1239 }
1240
1241 if (data->autinc_from)
1242 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1243 data->offset);
1244 else
1245 from1 = adjust_address (data->from, mode, data->offset);
1246
1247 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1248 emit_insn (gen_add2_insn (data->to_addr,
1249 GEN_INT (-(HOST_WIDE_INT)size)));
1250 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1251 emit_insn (gen_add2_insn (data->from_addr,
1252 GEN_INT (-(HOST_WIDE_INT)size)));
1253
1254 if (data->to)
1255 emit_insn ((*genfun) (to1, from1));
1256 else
1257 {
1258 #ifdef PUSH_ROUNDING
1259 emit_single_push_insn (mode, from1, NULL);
1260 #else
1261 abort ();
1262 #endif
1263 }
1264
1265 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1266 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1267 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1268 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1269
1270 if (! data->reverse)
1271 data->offset += size;
1272
1273 data->len -= size;
1274 }
1275 }
1276 \f
1277 /* Emit code to move a block Y to a block X. This may be done with
1278 string-move instructions, with multiple scalar move instructions,
1279 or with a library call.
1280
1281 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1282 SIZE is an rtx that says how long they are.
1283 ALIGN is the maximum alignment we can assume they have.
1284 METHOD describes what kind of copy this is, and what mechanisms may be used.
1285
1286 Return the address of the new block, if memcpy is called and returns it,
1287 0 otherwise. */
1288
1289 rtx
1290 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1291 {
1292 bool may_use_call;
1293 rtx retval = 0;
1294 unsigned int align;
1295
1296 switch (method)
1297 {
1298 case BLOCK_OP_NORMAL:
1299 may_use_call = true;
1300 break;
1301
1302 case BLOCK_OP_CALL_PARM:
1303 may_use_call = block_move_libcall_safe_for_call_parm ();
1304
1305 /* Make inhibit_defer_pop nonzero around the library call
1306 to force it to pop the arguments right away. */
1307 NO_DEFER_POP;
1308 break;
1309
1310 case BLOCK_OP_NO_LIBCALL:
1311 may_use_call = false;
1312 break;
1313
1314 default:
1315 abort ();
1316 }
1317
1318 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1319
1320 if (GET_MODE (x) != BLKmode)
1321 abort ();
1322 if (GET_MODE (y) != BLKmode)
1323 abort ();
1324
1325 x = protect_from_queue (x, 1);
1326 y = protect_from_queue (y, 0);
1327 size = protect_from_queue (size, 0);
1328
1329 if (GET_CODE (x) != MEM)
1330 abort ();
1331 if (GET_CODE (y) != MEM)
1332 abort ();
1333 if (size == 0)
1334 abort ();
1335
1336 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1337 can be incorrect is coming from __builtin_memcpy. */
1338 if (GET_CODE (size) == CONST_INT)
1339 {
1340 if (INTVAL (size) == 0)
1341 return 0;
1342
1343 x = shallow_copy_rtx (x);
1344 y = shallow_copy_rtx (y);
1345 set_mem_size (x, size);
1346 set_mem_size (y, size);
1347 }
1348
1349 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1350 move_by_pieces (x, y, INTVAL (size), align, 0);
1351 else if (emit_block_move_via_movstr (x, y, size, align))
1352 ;
1353 else if (may_use_call)
1354 retval = emit_block_move_via_libcall (x, y, size);
1355 else
1356 emit_block_move_via_loop (x, y, size, align);
1357
1358 if (method == BLOCK_OP_CALL_PARM)
1359 OK_DEFER_POP;
1360
1361 return retval;
1362 }
1363
1364 /* A subroutine of emit_block_move. Returns true if calling the
1365 block move libcall will not clobber any parameters which may have
1366 already been placed on the stack. */
1367
1368 static bool
1369 block_move_libcall_safe_for_call_parm (void)
1370 {
1371 /* If arguments are pushed on the stack, then they're safe. */
1372 if (PUSH_ARGS)
1373 return true;
1374
1375 /* If registers go on the stack anyway, any argument is sure to clobber
1376 an outgoing argument. */
1377 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1378 {
1379 tree fn = emit_block_move_libcall_fn (false);
1380 (void) fn;
1381 if (REG_PARM_STACK_SPACE (fn) != 0)
1382 return false;
1383 }
1384 #endif
1385
1386 /* If any argument goes in memory, then it might clobber an outgoing
1387 argument. */
1388 {
1389 CUMULATIVE_ARGS args_so_far;
1390 tree fn, arg;
1391
1392 fn = emit_block_move_libcall_fn (false);
1393 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1394
1395 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1396 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1397 {
1398 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1399 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1400 if (!tmp || !REG_P (tmp))
1401 return false;
1402 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1403 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1404 NULL_TREE, 1))
1405 return false;
1406 #endif
1407 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1408 }
1409 }
1410 return true;
1411 }
1412
1413 /* A subroutine of emit_block_move. Expand a movstr pattern;
1414 return true if successful. */
1415
1416 static bool
1417 emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
1418 {
1419 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1420 enum machine_mode mode;
1421
1422 /* Since this is a move insn, we don't care about volatility. */
1423 volatile_ok = 1;
1424
1425 /* Try the most limited insn first, because there's no point
1426 including more than one in the machine description unless
1427 the more limited one has some advantage. */
1428
1429 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1430 mode = GET_MODE_WIDER_MODE (mode))
1431 {
1432 enum insn_code code = movstr_optab[(int) mode];
1433 insn_operand_predicate_fn pred;
1434
1435 if (code != CODE_FOR_nothing
1436 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1437 here because if SIZE is less than the mode mask, as it is
1438 returned by the macro, it will definitely be less than the
1439 actual mode mask. */
1440 && ((GET_CODE (size) == CONST_INT
1441 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1442 <= (GET_MODE_MASK (mode) >> 1)))
1443 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1444 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1445 || (*pred) (x, BLKmode))
1446 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1447 || (*pred) (y, BLKmode))
1448 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1449 || (*pred) (opalign, VOIDmode)))
1450 {
1451 rtx op2;
1452 rtx last = get_last_insn ();
1453 rtx pat;
1454
1455 op2 = convert_to_mode (mode, size, 1);
1456 pred = insn_data[(int) code].operand[2].predicate;
1457 if (pred != 0 && ! (*pred) (op2, mode))
1458 op2 = copy_to_mode_reg (mode, op2);
1459
1460 /* ??? When called via emit_block_move_for_call, it'd be
1461 nice if there were some way to inform the backend, so
1462 that it doesn't fail the expansion because it thinks
1463 emitting the libcall would be more efficient. */
1464
1465 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1466 if (pat)
1467 {
1468 emit_insn (pat);
1469 volatile_ok = 0;
1470 return true;
1471 }
1472 else
1473 delete_insns_since (last);
1474 }
1475 }
1476
1477 volatile_ok = 0;
1478 return false;
1479 }
1480
1481 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1482 Return the return value from memcpy, 0 otherwise. */
1483
1484 static rtx
1485 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1486 {
1487 rtx dst_addr, src_addr;
1488 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1489 enum machine_mode size_mode;
1490 rtx retval;
1491
1492 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1493
1494 It is unsafe to save the value generated by protect_from_queue and reuse
1495 it later. Consider what happens if emit_queue is called before the
1496 return value from protect_from_queue is used.
1497
1498 Expansion of the CALL_EXPR below will call emit_queue before we are
1499 finished emitting RTL for argument setup. So if we are not careful we
1500 could get the wrong value for an argument.
1501
1502 To avoid this problem we go ahead and emit code to copy the addresses of
1503 DST and SRC and SIZE into new pseudos. We can then place those new
1504 pseudos into an RTL_EXPR and use them later, even after a call to
1505 emit_queue.
1506
1507 Note this is not strictly needed for library calls since they do not call
1508 emit_queue before loading their arguments. However, we may need to have
1509 library calls call emit_queue in the future since failing to do so could
1510 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1511 arguments in registers. */
1512
1513 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1514 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1515
1516 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1517 src_addr = convert_memory_address (ptr_mode, src_addr);
1518
1519 dst_tree = make_tree (ptr_type_node, dst_addr);
1520 src_tree = make_tree (ptr_type_node, src_addr);
1521
1522 if (TARGET_MEM_FUNCTIONS)
1523 size_mode = TYPE_MODE (sizetype);
1524 else
1525 size_mode = TYPE_MODE (unsigned_type_node);
1526
1527 size = convert_to_mode (size_mode, size, 1);
1528 size = copy_to_mode_reg (size_mode, size);
1529
1530 /* It is incorrect to use the libcall calling conventions to call
1531 memcpy in this context. This could be a user call to memcpy and
1532 the user may wish to examine the return value from memcpy. For
1533 targets where libcalls and normal calls have different conventions
1534 for returning pointers, we could end up generating incorrect code.
1535
1536 For convenience, we generate the call to bcopy this way as well. */
1537
1538 if (TARGET_MEM_FUNCTIONS)
1539 size_tree = make_tree (sizetype, size);
1540 else
1541 size_tree = make_tree (unsigned_type_node, size);
1542
1543 fn = emit_block_move_libcall_fn (true);
1544 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1545 if (TARGET_MEM_FUNCTIONS)
1546 {
1547 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1548 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1549 }
1550 else
1551 {
1552 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1553 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1554 }
1555
1556 /* Now we have to build up the CALL_EXPR itself. */
1557 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1558 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1559 call_expr, arg_list, NULL_TREE);
1560
1561 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1562
1563 /* If we are initializing a readonly value, show the above call clobbered
1564 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1565 the delay slot scheduler might overlook conflicts and take nasty
1566 decisions. */
1567 if (RTX_UNCHANGING_P (dst))
1568 add_function_usage_to
1569 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1570 gen_rtx_CLOBBER (VOIDmode, dst),
1571 NULL_RTX));
1572
1573 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
1574 }
1575
1576 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1577 for the function we use for block copies. The first time FOR_CALL
1578 is true, we call assemble_external. */
1579
1580 static GTY(()) tree block_move_fn;
1581
1582 void
1583 init_block_move_fn (const char *asmspec)
1584 {
1585 if (!block_move_fn)
1586 {
1587 tree args, fn;
1588
1589 if (TARGET_MEM_FUNCTIONS)
1590 {
1591 fn = get_identifier ("memcpy");
1592 args = build_function_type_list (ptr_type_node, ptr_type_node,
1593 const_ptr_type_node, sizetype,
1594 NULL_TREE);
1595 }
1596 else
1597 {
1598 fn = get_identifier ("bcopy");
1599 args = build_function_type_list (void_type_node, const_ptr_type_node,
1600 ptr_type_node, unsigned_type_node,
1601 NULL_TREE);
1602 }
1603
1604 fn = build_decl (FUNCTION_DECL, fn, args);
1605 DECL_EXTERNAL (fn) = 1;
1606 TREE_PUBLIC (fn) = 1;
1607 DECL_ARTIFICIAL (fn) = 1;
1608 TREE_NOTHROW (fn) = 1;
1609
1610 block_move_fn = fn;
1611 }
1612
1613 if (asmspec)
1614 {
1615 SET_DECL_RTL (block_move_fn, NULL_RTX);
1616 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1617 }
1618 }
1619
1620 static tree
1621 emit_block_move_libcall_fn (int for_call)
1622 {
1623 static bool emitted_extern;
1624
1625 if (!block_move_fn)
1626 init_block_move_fn (NULL);
1627
1628 if (for_call && !emitted_extern)
1629 {
1630 emitted_extern = true;
1631 make_decl_rtl (block_move_fn, NULL);
1632 assemble_external (block_move_fn);
1633 }
1634
1635 return block_move_fn;
1636 }
1637
1638 /* A subroutine of emit_block_move. Copy the data via an explicit
1639 loop. This is used only when libcalls are forbidden. */
1640 /* ??? It'd be nice to copy in hunks larger than QImode. */
1641
1642 static void
1643 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1644 unsigned int align ATTRIBUTE_UNUSED)
1645 {
1646 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1647 enum machine_mode iter_mode;
1648
1649 iter_mode = GET_MODE (size);
1650 if (iter_mode == VOIDmode)
1651 iter_mode = word_mode;
1652
1653 top_label = gen_label_rtx ();
1654 cmp_label = gen_label_rtx ();
1655 iter = gen_reg_rtx (iter_mode);
1656
1657 emit_move_insn (iter, const0_rtx);
1658
1659 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1660 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1661 do_pending_stack_adjust ();
1662
1663 emit_note (NOTE_INSN_LOOP_BEG);
1664
1665 emit_jump (cmp_label);
1666 emit_label (top_label);
1667
1668 tmp = convert_modes (Pmode, iter_mode, iter, true);
1669 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1670 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1671 x = change_address (x, QImode, x_addr);
1672 y = change_address (y, QImode, y_addr);
1673
1674 emit_move_insn (x, y);
1675
1676 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1677 true, OPTAB_LIB_WIDEN);
1678 if (tmp != iter)
1679 emit_move_insn (iter, tmp);
1680
1681 emit_note (NOTE_INSN_LOOP_CONT);
1682 emit_label (cmp_label);
1683
1684 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1685 true, top_label);
1686
1687 emit_note (NOTE_INSN_LOOP_END);
1688 }
1689 \f
1690 /* Copy all or part of a value X into registers starting at REGNO.
1691 The number of registers to be filled is NREGS. */
1692
1693 void
1694 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1695 {
1696 int i;
1697 #ifdef HAVE_load_multiple
1698 rtx pat;
1699 rtx last;
1700 #endif
1701
1702 if (nregs == 0)
1703 return;
1704
1705 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1706 x = validize_mem (force_const_mem (mode, x));
1707
1708 /* See if the machine can do this with a load multiple insn. */
1709 #ifdef HAVE_load_multiple
1710 if (HAVE_load_multiple)
1711 {
1712 last = get_last_insn ();
1713 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1714 GEN_INT (nregs));
1715 if (pat)
1716 {
1717 emit_insn (pat);
1718 return;
1719 }
1720 else
1721 delete_insns_since (last);
1722 }
1723 #endif
1724
1725 for (i = 0; i < nregs; i++)
1726 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1727 operand_subword_force (x, i, mode));
1728 }
1729
1730 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1731 The number of registers to be filled is NREGS. */
1732
1733 void
1734 move_block_from_reg (int regno, rtx x, int nregs)
1735 {
1736 int i;
1737
1738 if (nregs == 0)
1739 return;
1740
1741 /* See if the machine can do this with a store multiple insn. */
1742 #ifdef HAVE_store_multiple
1743 if (HAVE_store_multiple)
1744 {
1745 rtx last = get_last_insn ();
1746 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1747 GEN_INT (nregs));
1748 if (pat)
1749 {
1750 emit_insn (pat);
1751 return;
1752 }
1753 else
1754 delete_insns_since (last);
1755 }
1756 #endif
1757
1758 for (i = 0; i < nregs; i++)
1759 {
1760 rtx tem = operand_subword (x, i, 1, BLKmode);
1761
1762 if (tem == 0)
1763 abort ();
1764
1765 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1766 }
1767 }
1768
1769 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1770 ORIG, where ORIG is a non-consecutive group of registers represented by
1771 a PARALLEL. The clone is identical to the original except in that the
1772 original set of registers is replaced by a new set of pseudo registers.
1773 The new set has the same modes as the original set. */
1774
1775 rtx
1776 gen_group_rtx (rtx orig)
1777 {
1778 int i, length;
1779 rtx *tmps;
1780
1781 if (GET_CODE (orig) != PARALLEL)
1782 abort ();
1783
1784 length = XVECLEN (orig, 0);
1785 tmps = alloca (sizeof (rtx) * length);
1786
1787 /* Skip a NULL entry in first slot. */
1788 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1789
1790 if (i)
1791 tmps[0] = 0;
1792
1793 for (; i < length; i++)
1794 {
1795 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1796 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1797
1798 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1799 }
1800
1801 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1802 }
1803
1804 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1805 where DST is non-consecutive registers represented by a PARALLEL.
1806 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1807 if not known. */
1808
1809 void
1810 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1811 {
1812 rtx *tmps, src;
1813 int start, i;
1814
1815 if (GET_CODE (dst) != PARALLEL)
1816 abort ();
1817
1818 /* Check for a NULL entry, used to indicate that the parameter goes
1819 both on the stack and in registers. */
1820 if (XEXP (XVECEXP (dst, 0, 0), 0))
1821 start = 0;
1822 else
1823 start = 1;
1824
1825 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1826
1827 /* Process the pieces. */
1828 for (i = start; i < XVECLEN (dst, 0); i++)
1829 {
1830 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1831 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1832 unsigned int bytelen = GET_MODE_SIZE (mode);
1833 int shift = 0;
1834
1835 /* Handle trailing fragments that run over the size of the struct. */
1836 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1837 {
1838 /* Arrange to shift the fragment to where it belongs.
1839 extract_bit_field loads to the lsb of the reg. */
1840 if (
1841 #ifdef BLOCK_REG_PADDING
1842 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1843 == (BYTES_BIG_ENDIAN ? upward : downward)
1844 #else
1845 BYTES_BIG_ENDIAN
1846 #endif
1847 )
1848 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1849 bytelen = ssize - bytepos;
1850 if (bytelen <= 0)
1851 abort ();
1852 }
1853
1854 /* If we won't be loading directly from memory, protect the real source
1855 from strange tricks we might play; but make sure that the source can
1856 be loaded directly into the destination. */
1857 src = orig_src;
1858 if (GET_CODE (orig_src) != MEM
1859 && (!CONSTANT_P (orig_src)
1860 || (GET_MODE (orig_src) != mode
1861 && GET_MODE (orig_src) != VOIDmode)))
1862 {
1863 if (GET_MODE (orig_src) == VOIDmode)
1864 src = gen_reg_rtx (mode);
1865 else
1866 src = gen_reg_rtx (GET_MODE (orig_src));
1867
1868 emit_move_insn (src, orig_src);
1869 }
1870
1871 /* Optimize the access just a bit. */
1872 if (GET_CODE (src) == MEM
1873 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1874 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1875 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1876 && bytelen == GET_MODE_SIZE (mode))
1877 {
1878 tmps[i] = gen_reg_rtx (mode);
1879 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1880 }
1881 else if (GET_CODE (src) == CONCAT)
1882 {
1883 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1884 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1885
1886 if ((bytepos == 0 && bytelen == slen0)
1887 || (bytepos != 0 && bytepos + bytelen <= slen))
1888 {
1889 /* The following assumes that the concatenated objects all
1890 have the same size. In this case, a simple calculation
1891 can be used to determine the object and the bit field
1892 to be extracted. */
1893 tmps[i] = XEXP (src, bytepos / slen0);
1894 if (! CONSTANT_P (tmps[i])
1895 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
1896 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1897 (bytepos % slen0) * BITS_PER_UNIT,
1898 1, NULL_RTX, mode, mode, ssize);
1899 }
1900 else if (bytepos == 0)
1901 {
1902 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1903 emit_move_insn (mem, src);
1904 tmps[i] = adjust_address (mem, mode, 0);
1905 }
1906 else
1907 abort ();
1908 }
1909 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1910 SIMD register, which is currently broken. While we get GCC
1911 to emit proper RTL for these cases, let's dump to memory. */
1912 else if (VECTOR_MODE_P (GET_MODE (dst))
1913 && GET_CODE (src) == REG)
1914 {
1915 int slen = GET_MODE_SIZE (GET_MODE (src));
1916 rtx mem;
1917
1918 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1919 emit_move_insn (mem, src);
1920 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1921 }
1922 else if (CONSTANT_P (src)
1923 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
1924 tmps[i] = src;
1925 else
1926 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1927 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1928 mode, mode, ssize);
1929
1930 if (shift)
1931 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1932 tmps[i], 0, OPTAB_WIDEN);
1933 }
1934
1935 emit_queue ();
1936
1937 /* Copy the extracted pieces into the proper (probable) hard regs. */
1938 for (i = start; i < XVECLEN (dst, 0); i++)
1939 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1940 }
1941
1942 /* Emit code to move a block SRC to block DST, where SRC and DST are
1943 non-consecutive groups of registers, each represented by a PARALLEL. */
1944
1945 void
1946 emit_group_move (rtx dst, rtx src)
1947 {
1948 int i;
1949
1950 if (GET_CODE (src) != PARALLEL
1951 || GET_CODE (dst) != PARALLEL
1952 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1953 abort ();
1954
1955 /* Skip first entry if NULL. */
1956 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1957 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1958 XEXP (XVECEXP (src, 0, i), 0));
1959 }
1960
1961 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1962 where SRC is non-consecutive registers represented by a PARALLEL.
1963 SSIZE represents the total size of block ORIG_DST, or -1 if not
1964 known. */
1965
1966 void
1967 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1968 {
1969 rtx *tmps, dst;
1970 int start, i;
1971
1972 if (GET_CODE (src) != PARALLEL)
1973 abort ();
1974
1975 /* Check for a NULL entry, used to indicate that the parameter goes
1976 both on the stack and in registers. */
1977 if (XEXP (XVECEXP (src, 0, 0), 0))
1978 start = 0;
1979 else
1980 start = 1;
1981
1982 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1983
1984 /* Copy the (probable) hard regs into pseudos. */
1985 for (i = start; i < XVECLEN (src, 0); i++)
1986 {
1987 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1988 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1989 emit_move_insn (tmps[i], reg);
1990 }
1991 emit_queue ();
1992
1993 /* If we won't be storing directly into memory, protect the real destination
1994 from strange tricks we might play. */
1995 dst = orig_dst;
1996 if (GET_CODE (dst) == PARALLEL)
1997 {
1998 rtx temp;
1999
2000 /* We can get a PARALLEL dst if there is a conditional expression in
2001 a return statement. In that case, the dst and src are the same,
2002 so no action is necessary. */
2003 if (rtx_equal_p (dst, src))
2004 return;
2005
2006 /* It is unclear if we can ever reach here, but we may as well handle
2007 it. Allocate a temporary, and split this into a store/load to/from
2008 the temporary. */
2009
2010 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2011 emit_group_store (temp, src, type, ssize);
2012 emit_group_load (dst, temp, type, ssize);
2013 return;
2014 }
2015 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2016 {
2017 dst = gen_reg_rtx (GET_MODE (orig_dst));
2018 /* Make life a bit easier for combine. */
2019 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2020 }
2021
2022 /* Process the pieces. */
2023 for (i = start; i < XVECLEN (src, 0); i++)
2024 {
2025 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2026 enum machine_mode mode = GET_MODE (tmps[i]);
2027 unsigned int bytelen = GET_MODE_SIZE (mode);
2028 rtx dest = dst;
2029
2030 /* Handle trailing fragments that run over the size of the struct. */
2031 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2032 {
2033 /* store_bit_field always takes its value from the lsb.
2034 Move the fragment to the lsb if it's not already there. */
2035 if (
2036 #ifdef BLOCK_REG_PADDING
2037 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2038 == (BYTES_BIG_ENDIAN ? upward : downward)
2039 #else
2040 BYTES_BIG_ENDIAN
2041 #endif
2042 )
2043 {
2044 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2045 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2046 tmps[i], 0, OPTAB_WIDEN);
2047 }
2048 bytelen = ssize - bytepos;
2049 }
2050
2051 if (GET_CODE (dst) == CONCAT)
2052 {
2053 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2054 dest = XEXP (dst, 0);
2055 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2056 {
2057 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2058 dest = XEXP (dst, 1);
2059 }
2060 else if (bytepos == 0 && XVECLEN (src, 0))
2061 {
2062 dest = assign_stack_temp (GET_MODE (dest),
2063 GET_MODE_SIZE (GET_MODE (dest)), 0);
2064 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2065 tmps[i]);
2066 dst = dest;
2067 break;
2068 }
2069 else
2070 abort ();
2071 }
2072
2073 /* Optimize the access just a bit. */
2074 if (GET_CODE (dest) == MEM
2075 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2076 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2077 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2078 && bytelen == GET_MODE_SIZE (mode))
2079 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2080 else
2081 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2082 mode, tmps[i], ssize);
2083 }
2084
2085 emit_queue ();
2086
2087 /* Copy from the pseudo into the (probable) hard reg. */
2088 if (orig_dst != dst)
2089 emit_move_insn (orig_dst, dst);
2090 }
2091
2092 /* Generate code to copy a BLKmode object of TYPE out of a
2093 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2094 is null, a stack temporary is created. TGTBLK is returned.
2095
2096 The purpose of this routine is to handle functions that return
2097 BLKmode structures in registers. Some machines (the PA for example)
2098 want to return all small structures in registers regardless of the
2099 structure's alignment. */
2100
2101 rtx
2102 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2103 {
2104 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2105 rtx src = NULL, dst = NULL;
2106 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2107 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2108
2109 if (tgtblk == 0)
2110 {
2111 tgtblk = assign_temp (build_qualified_type (type,
2112 (TYPE_QUALS (type)
2113 | TYPE_QUAL_CONST)),
2114 0, 1, 1);
2115 preserve_temp_slots (tgtblk);
2116 }
2117
2118 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2119 into a new pseudo which is a full word. */
2120
2121 if (GET_MODE (srcreg) != BLKmode
2122 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2123 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2124
2125 /* If the structure doesn't take up a whole number of words, see whether
2126 SRCREG is padded on the left or on the right. If it's on the left,
2127 set PADDING_CORRECTION to the number of bits to skip.
2128
2129 In most ABIs, the structure will be returned at the least end of
2130 the register, which translates to right padding on little-endian
2131 targets and left padding on big-endian targets. The opposite
2132 holds if the structure is returned at the most significant
2133 end of the register. */
2134 if (bytes % UNITS_PER_WORD != 0
2135 && (targetm.calls.return_in_msb (type)
2136 ? !BYTES_BIG_ENDIAN
2137 : BYTES_BIG_ENDIAN))
2138 padding_correction
2139 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2140
2141 /* Copy the structure BITSIZE bites at a time.
2142
2143 We could probably emit more efficient code for machines which do not use
2144 strict alignment, but it doesn't seem worth the effort at the current
2145 time. */
2146 for (bitpos = 0, xbitpos = padding_correction;
2147 bitpos < bytes * BITS_PER_UNIT;
2148 bitpos += bitsize, xbitpos += bitsize)
2149 {
2150 /* We need a new source operand each time xbitpos is on a
2151 word boundary and when xbitpos == padding_correction
2152 (the first time through). */
2153 if (xbitpos % BITS_PER_WORD == 0
2154 || xbitpos == padding_correction)
2155 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2156 GET_MODE (srcreg));
2157
2158 /* We need a new destination operand each time bitpos is on
2159 a word boundary. */
2160 if (bitpos % BITS_PER_WORD == 0)
2161 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2162
2163 /* Use xbitpos for the source extraction (right justified) and
2164 xbitpos for the destination store (left justified). */
2165 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2166 extract_bit_field (src, bitsize,
2167 xbitpos % BITS_PER_WORD, 1,
2168 NULL_RTX, word_mode, word_mode,
2169 BITS_PER_WORD),
2170 BITS_PER_WORD);
2171 }
2172
2173 return tgtblk;
2174 }
2175
2176 /* Add a USE expression for REG to the (possibly empty) list pointed
2177 to by CALL_FUSAGE. REG must denote a hard register. */
2178
2179 void
2180 use_reg (rtx *call_fusage, rtx reg)
2181 {
2182 if (GET_CODE (reg) != REG
2183 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2184 abort ();
2185
2186 *call_fusage
2187 = gen_rtx_EXPR_LIST (VOIDmode,
2188 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2189 }
2190
2191 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2192 starting at REGNO. All of these registers must be hard registers. */
2193
2194 void
2195 use_regs (rtx *call_fusage, int regno, int nregs)
2196 {
2197 int i;
2198
2199 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2200 abort ();
2201
2202 for (i = 0; i < nregs; i++)
2203 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2204 }
2205
2206 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2207 PARALLEL REGS. This is for calls that pass values in multiple
2208 non-contiguous locations. The Irix 6 ABI has examples of this. */
2209
2210 void
2211 use_group_regs (rtx *call_fusage, rtx regs)
2212 {
2213 int i;
2214
2215 for (i = 0; i < XVECLEN (regs, 0); i++)
2216 {
2217 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2218
2219 /* A NULL entry means the parameter goes both on the stack and in
2220 registers. This can also be a MEM for targets that pass values
2221 partially on the stack and partially in registers. */
2222 if (reg != 0 && GET_CODE (reg) == REG)
2223 use_reg (call_fusage, reg);
2224 }
2225 }
2226 \f
2227
2228 /* Determine whether the LEN bytes generated by CONSTFUN can be
2229 stored to memory using several move instructions. CONSTFUNDATA is
2230 a pointer which will be passed as argument in every CONSTFUN call.
2231 ALIGN is maximum alignment we can assume. Return nonzero if a
2232 call to store_by_pieces should succeed. */
2233
2234 int
2235 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2236 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2237 void *constfundata, unsigned int align)
2238 {
2239 unsigned HOST_WIDE_INT max_size, l;
2240 HOST_WIDE_INT offset = 0;
2241 enum machine_mode mode, tmode;
2242 enum insn_code icode;
2243 int reverse;
2244 rtx cst;
2245
2246 if (len == 0)
2247 return 1;
2248
2249 if (! STORE_BY_PIECES_P (len, align))
2250 return 0;
2251
2252 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2253 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2254 align = MOVE_MAX * BITS_PER_UNIT;
2255
2256 /* We would first store what we can in the largest integer mode, then go to
2257 successively smaller modes. */
2258
2259 for (reverse = 0;
2260 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2261 reverse++)
2262 {
2263 l = len;
2264 mode = VOIDmode;
2265 max_size = STORE_MAX_PIECES + 1;
2266 while (max_size > 1)
2267 {
2268 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2269 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2270 if (GET_MODE_SIZE (tmode) < max_size)
2271 mode = tmode;
2272
2273 if (mode == VOIDmode)
2274 break;
2275
2276 icode = mov_optab->handlers[(int) mode].insn_code;
2277 if (icode != CODE_FOR_nothing
2278 && align >= GET_MODE_ALIGNMENT (mode))
2279 {
2280 unsigned int size = GET_MODE_SIZE (mode);
2281
2282 while (l >= size)
2283 {
2284 if (reverse)
2285 offset -= size;
2286
2287 cst = (*constfun) (constfundata, offset, mode);
2288 if (!LEGITIMATE_CONSTANT_P (cst))
2289 return 0;
2290
2291 if (!reverse)
2292 offset += size;
2293
2294 l -= size;
2295 }
2296 }
2297
2298 max_size = GET_MODE_SIZE (mode);
2299 }
2300
2301 /* The code above should have handled everything. */
2302 if (l != 0)
2303 abort ();
2304 }
2305
2306 return 1;
2307 }
2308
2309 /* Generate several move instructions to store LEN bytes generated by
2310 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2311 pointer which will be passed as argument in every CONSTFUN call.
2312 ALIGN is maximum alignment we can assume.
2313 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2314 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2315 stpcpy. */
2316
2317 rtx
2318 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2319 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2320 void *constfundata, unsigned int align, int endp)
2321 {
2322 struct store_by_pieces data;
2323
2324 if (len == 0)
2325 {
2326 if (endp == 2)
2327 abort ();
2328 return to;
2329 }
2330
2331 if (! STORE_BY_PIECES_P (len, align))
2332 abort ();
2333 to = protect_from_queue (to, 1);
2334 data.constfun = constfun;
2335 data.constfundata = constfundata;
2336 data.len = len;
2337 data.to = to;
2338 store_by_pieces_1 (&data, align);
2339 if (endp)
2340 {
2341 rtx to1;
2342
2343 if (data.reverse)
2344 abort ();
2345 if (data.autinc_to)
2346 {
2347 if (endp == 2)
2348 {
2349 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2350 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2351 else
2352 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2353 -1));
2354 }
2355 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2356 data.offset);
2357 }
2358 else
2359 {
2360 if (endp == 2)
2361 --data.offset;
2362 to1 = adjust_address (data.to, QImode, data.offset);
2363 }
2364 return to1;
2365 }
2366 else
2367 return data.to;
2368 }
2369
2370 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2371 rtx with BLKmode). The caller must pass TO through protect_from_queue
2372 before calling. ALIGN is maximum alignment we can assume. */
2373
2374 static void
2375 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2376 {
2377 struct store_by_pieces data;
2378
2379 if (len == 0)
2380 return;
2381
2382 data.constfun = clear_by_pieces_1;
2383 data.constfundata = NULL;
2384 data.len = len;
2385 data.to = to;
2386 store_by_pieces_1 (&data, align);
2387 }
2388
2389 /* Callback routine for clear_by_pieces.
2390 Return const0_rtx unconditionally. */
2391
2392 static rtx
2393 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2394 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2395 enum machine_mode mode ATTRIBUTE_UNUSED)
2396 {
2397 return const0_rtx;
2398 }
2399
2400 /* Subroutine of clear_by_pieces and store_by_pieces.
2401 Generate several move instructions to store LEN bytes of block TO. (A MEM
2402 rtx with BLKmode). The caller must pass TO through protect_from_queue
2403 before calling. ALIGN is maximum alignment we can assume. */
2404
2405 static void
2406 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2407 unsigned int align ATTRIBUTE_UNUSED)
2408 {
2409 rtx to_addr = XEXP (data->to, 0);
2410 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2411 enum machine_mode mode = VOIDmode, tmode;
2412 enum insn_code icode;
2413
2414 data->offset = 0;
2415 data->to_addr = to_addr;
2416 data->autinc_to
2417 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2418 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2419
2420 data->explicit_inc_to = 0;
2421 data->reverse
2422 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2423 if (data->reverse)
2424 data->offset = data->len;
2425
2426 /* If storing requires more than two move insns,
2427 copy addresses to registers (to make displacements shorter)
2428 and use post-increment if available. */
2429 if (!data->autinc_to
2430 && move_by_pieces_ninsns (data->len, align) > 2)
2431 {
2432 /* Determine the main mode we'll be using. */
2433 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2434 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2435 if (GET_MODE_SIZE (tmode) < max_size)
2436 mode = tmode;
2437
2438 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2439 {
2440 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2441 data->autinc_to = 1;
2442 data->explicit_inc_to = -1;
2443 }
2444
2445 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2446 && ! data->autinc_to)
2447 {
2448 data->to_addr = copy_addr_to_reg (to_addr);
2449 data->autinc_to = 1;
2450 data->explicit_inc_to = 1;
2451 }
2452
2453 if ( !data->autinc_to && CONSTANT_P (to_addr))
2454 data->to_addr = copy_addr_to_reg (to_addr);
2455 }
2456
2457 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2458 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2459 align = MOVE_MAX * BITS_PER_UNIT;
2460
2461 /* First store what we can in the largest integer mode, then go to
2462 successively smaller modes. */
2463
2464 while (max_size > 1)
2465 {
2466 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2467 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2468 if (GET_MODE_SIZE (tmode) < max_size)
2469 mode = tmode;
2470
2471 if (mode == VOIDmode)
2472 break;
2473
2474 icode = mov_optab->handlers[(int) mode].insn_code;
2475 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2476 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2477
2478 max_size = GET_MODE_SIZE (mode);
2479 }
2480
2481 /* The code above should have handled everything. */
2482 if (data->len != 0)
2483 abort ();
2484 }
2485
2486 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2487 with move instructions for mode MODE. GENFUN is the gen_... function
2488 to make a move insn for that mode. DATA has all the other info. */
2489
2490 static void
2491 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2492 struct store_by_pieces *data)
2493 {
2494 unsigned int size = GET_MODE_SIZE (mode);
2495 rtx to1, cst;
2496
2497 while (data->len >= size)
2498 {
2499 if (data->reverse)
2500 data->offset -= size;
2501
2502 if (data->autinc_to)
2503 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2504 data->offset);
2505 else
2506 to1 = adjust_address (data->to, mode, data->offset);
2507
2508 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2509 emit_insn (gen_add2_insn (data->to_addr,
2510 GEN_INT (-(HOST_WIDE_INT) size)));
2511
2512 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2513 emit_insn ((*genfun) (to1, cst));
2514
2515 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2516 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2517
2518 if (! data->reverse)
2519 data->offset += size;
2520
2521 data->len -= size;
2522 }
2523 }
2524 \f
2525 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2526 its length in bytes. */
2527
2528 rtx
2529 clear_storage (rtx object, rtx size)
2530 {
2531 rtx retval = 0;
2532 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2533 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2534
2535 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2536 just move a zero. Otherwise, do this a piece at a time. */
2537 if (GET_MODE (object) != BLKmode
2538 && GET_CODE (size) == CONST_INT
2539 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2540 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2541 else
2542 {
2543 object = protect_from_queue (object, 1);
2544 size = protect_from_queue (size, 0);
2545
2546 if (size == const0_rtx)
2547 ;
2548 else if (GET_CODE (size) == CONST_INT
2549 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2550 clear_by_pieces (object, INTVAL (size), align);
2551 else if (clear_storage_via_clrstr (object, size, align))
2552 ;
2553 else
2554 retval = clear_storage_via_libcall (object, size);
2555 }
2556
2557 return retval;
2558 }
2559
2560 /* A subroutine of clear_storage. Expand a clrstr pattern;
2561 return true if successful. */
2562
2563 static bool
2564 clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
2565 {
2566 /* Try the most limited insn first, because there's no point
2567 including more than one in the machine description unless
2568 the more limited one has some advantage. */
2569
2570 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2571 enum machine_mode mode;
2572
2573 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2574 mode = GET_MODE_WIDER_MODE (mode))
2575 {
2576 enum insn_code code = clrstr_optab[(int) mode];
2577 insn_operand_predicate_fn pred;
2578
2579 if (code != CODE_FOR_nothing
2580 /* We don't need MODE to be narrower than
2581 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2582 the mode mask, as it is returned by the macro, it will
2583 definitely be less than the actual mode mask. */
2584 && ((GET_CODE (size) == CONST_INT
2585 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2586 <= (GET_MODE_MASK (mode) >> 1)))
2587 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2588 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2589 || (*pred) (object, BLKmode))
2590 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2591 || (*pred) (opalign, VOIDmode)))
2592 {
2593 rtx op1;
2594 rtx last = get_last_insn ();
2595 rtx pat;
2596
2597 op1 = convert_to_mode (mode, size, 1);
2598 pred = insn_data[(int) code].operand[1].predicate;
2599 if (pred != 0 && ! (*pred) (op1, mode))
2600 op1 = copy_to_mode_reg (mode, op1);
2601
2602 pat = GEN_FCN ((int) code) (object, op1, opalign);
2603 if (pat)
2604 {
2605 emit_insn (pat);
2606 return true;
2607 }
2608 else
2609 delete_insns_since (last);
2610 }
2611 }
2612
2613 return false;
2614 }
2615
2616 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2617 Return the return value of memset, 0 otherwise. */
2618
2619 static rtx
2620 clear_storage_via_libcall (rtx object, rtx size)
2621 {
2622 tree call_expr, arg_list, fn, object_tree, size_tree;
2623 enum machine_mode size_mode;
2624 rtx retval;
2625
2626 /* OBJECT or SIZE may have been passed through protect_from_queue.
2627
2628 It is unsafe to save the value generated by protect_from_queue
2629 and reuse it later. Consider what happens if emit_queue is
2630 called before the return value from protect_from_queue is used.
2631
2632 Expansion of the CALL_EXPR below will call emit_queue before
2633 we are finished emitting RTL for argument setup. So if we are
2634 not careful we could get the wrong value for an argument.
2635
2636 To avoid this problem we go ahead and emit code to copy OBJECT
2637 and SIZE into new pseudos. We can then place those new pseudos
2638 into an RTL_EXPR and use them later, even after a call to
2639 emit_queue.
2640
2641 Note this is not strictly needed for library calls since they
2642 do not call emit_queue before loading their arguments. However,
2643 we may need to have library calls call emit_queue in the future
2644 since failing to do so could cause problems for targets which
2645 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2646
2647 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2648
2649 if (TARGET_MEM_FUNCTIONS)
2650 size_mode = TYPE_MODE (sizetype);
2651 else
2652 size_mode = TYPE_MODE (unsigned_type_node);
2653 size = convert_to_mode (size_mode, size, 1);
2654 size = copy_to_mode_reg (size_mode, size);
2655
2656 /* It is incorrect to use the libcall calling conventions to call
2657 memset in this context. This could be a user call to memset and
2658 the user may wish to examine the return value from memset. For
2659 targets where libcalls and normal calls have different conventions
2660 for returning pointers, we could end up generating incorrect code.
2661
2662 For convenience, we generate the call to bzero this way as well. */
2663
2664 object_tree = make_tree (ptr_type_node, object);
2665 if (TARGET_MEM_FUNCTIONS)
2666 size_tree = make_tree (sizetype, size);
2667 else
2668 size_tree = make_tree (unsigned_type_node, size);
2669
2670 fn = clear_storage_libcall_fn (true);
2671 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2672 if (TARGET_MEM_FUNCTIONS)
2673 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2674 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2675
2676 /* Now we have to build up the CALL_EXPR itself. */
2677 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2678 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2679 call_expr, arg_list, NULL_TREE);
2680
2681 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2682
2683 /* If we are initializing a readonly value, show the above call
2684 clobbered it. Otherwise, a load from it may erroneously be
2685 hoisted from a loop. */
2686 if (RTX_UNCHANGING_P (object))
2687 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2688
2689 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
2690 }
2691
2692 /* A subroutine of clear_storage_via_libcall. Create the tree node
2693 for the function we use for block clears. The first time FOR_CALL
2694 is true, we call assemble_external. */
2695
2696 static GTY(()) tree block_clear_fn;
2697
2698 void
2699 init_block_clear_fn (const char *asmspec)
2700 {
2701 if (!block_clear_fn)
2702 {
2703 tree fn, args;
2704
2705 if (TARGET_MEM_FUNCTIONS)
2706 {
2707 fn = get_identifier ("memset");
2708 args = build_function_type_list (ptr_type_node, ptr_type_node,
2709 integer_type_node, sizetype,
2710 NULL_TREE);
2711 }
2712 else
2713 {
2714 fn = get_identifier ("bzero");
2715 args = build_function_type_list (void_type_node, ptr_type_node,
2716 unsigned_type_node, NULL_TREE);
2717 }
2718
2719 fn = build_decl (FUNCTION_DECL, fn, args);
2720 DECL_EXTERNAL (fn) = 1;
2721 TREE_PUBLIC (fn) = 1;
2722 DECL_ARTIFICIAL (fn) = 1;
2723 TREE_NOTHROW (fn) = 1;
2724
2725 block_clear_fn = fn;
2726 }
2727
2728 if (asmspec)
2729 {
2730 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2731 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2732 }
2733 }
2734
2735 static tree
2736 clear_storage_libcall_fn (int for_call)
2737 {
2738 static bool emitted_extern;
2739
2740 if (!block_clear_fn)
2741 init_block_clear_fn (NULL);
2742
2743 if (for_call && !emitted_extern)
2744 {
2745 emitted_extern = true;
2746 make_decl_rtl (block_clear_fn, NULL);
2747 assemble_external (block_clear_fn);
2748 }
2749
2750 return block_clear_fn;
2751 }
2752 \f
2753 /* Generate code to copy Y into X.
2754 Both Y and X must have the same mode, except that
2755 Y can be a constant with VOIDmode.
2756 This mode cannot be BLKmode; use emit_block_move for that.
2757
2758 Return the last instruction emitted. */
2759
2760 rtx
2761 emit_move_insn (rtx x, rtx y)
2762 {
2763 enum machine_mode mode = GET_MODE (x);
2764 rtx y_cst = NULL_RTX;
2765 rtx last_insn, set;
2766
2767 x = protect_from_queue (x, 1);
2768 y = protect_from_queue (y, 0);
2769
2770 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2771 abort ();
2772
2773 /* Never force constant_p_rtx to memory. */
2774 if (GET_CODE (y) == CONSTANT_P_RTX)
2775 ;
2776 else if (CONSTANT_P (y))
2777 {
2778 if (optimize
2779 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2780 && (last_insn = compress_float_constant (x, y)))
2781 return last_insn;
2782
2783 y_cst = y;
2784
2785 if (!LEGITIMATE_CONSTANT_P (y))
2786 {
2787 y = force_const_mem (mode, y);
2788
2789 /* If the target's cannot_force_const_mem prevented the spill,
2790 assume that the target's move expanders will also take care
2791 of the non-legitimate constant. */
2792 if (!y)
2793 y = y_cst;
2794 }
2795 }
2796
2797 /* If X or Y are memory references, verify that their addresses are valid
2798 for the machine. */
2799 if (GET_CODE (x) == MEM
2800 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2801 && ! push_operand (x, GET_MODE (x)))
2802 || (flag_force_addr
2803 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2804 x = validize_mem (x);
2805
2806 if (GET_CODE (y) == MEM
2807 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2808 || (flag_force_addr
2809 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2810 y = validize_mem (y);
2811
2812 if (mode == BLKmode)
2813 abort ();
2814
2815 last_insn = emit_move_insn_1 (x, y);
2816
2817 if (y_cst && GET_CODE (x) == REG
2818 && (set = single_set (last_insn)) != NULL_RTX
2819 && SET_DEST (set) == x
2820 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2821 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2822
2823 return last_insn;
2824 }
2825
2826 /* Low level part of emit_move_insn.
2827 Called just like emit_move_insn, but assumes X and Y
2828 are basically valid. */
2829
2830 rtx
2831 emit_move_insn_1 (rtx x, rtx y)
2832 {
2833 enum machine_mode mode = GET_MODE (x);
2834 enum machine_mode submode;
2835 enum mode_class class = GET_MODE_CLASS (mode);
2836
2837 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2838 abort ();
2839
2840 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2841 return
2842 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2843
2844 /* Expand complex moves by moving real part and imag part, if possible. */
2845 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2846 && BLKmode != (submode = GET_MODE_INNER (mode))
2847 && (mov_optab->handlers[(int) submode].insn_code
2848 != CODE_FOR_nothing))
2849 {
2850 /* Don't split destination if it is a stack push. */
2851 int stack = push_operand (x, GET_MODE (x));
2852
2853 #ifdef PUSH_ROUNDING
2854 /* In case we output to the stack, but the size is smaller than the
2855 machine can push exactly, we need to use move instructions. */
2856 if (stack
2857 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2858 != GET_MODE_SIZE (submode)))
2859 {
2860 rtx temp;
2861 HOST_WIDE_INT offset1, offset2;
2862
2863 /* Do not use anti_adjust_stack, since we don't want to update
2864 stack_pointer_delta. */
2865 temp = expand_binop (Pmode,
2866 #ifdef STACK_GROWS_DOWNWARD
2867 sub_optab,
2868 #else
2869 add_optab,
2870 #endif
2871 stack_pointer_rtx,
2872 GEN_INT
2873 (PUSH_ROUNDING
2874 (GET_MODE_SIZE (GET_MODE (x)))),
2875 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2876
2877 if (temp != stack_pointer_rtx)
2878 emit_move_insn (stack_pointer_rtx, temp);
2879
2880 #ifdef STACK_GROWS_DOWNWARD
2881 offset1 = 0;
2882 offset2 = GET_MODE_SIZE (submode);
2883 #else
2884 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2885 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2886 + GET_MODE_SIZE (submode));
2887 #endif
2888
2889 emit_move_insn (change_address (x, submode,
2890 gen_rtx_PLUS (Pmode,
2891 stack_pointer_rtx,
2892 GEN_INT (offset1))),
2893 gen_realpart (submode, y));
2894 emit_move_insn (change_address (x, submode,
2895 gen_rtx_PLUS (Pmode,
2896 stack_pointer_rtx,
2897 GEN_INT (offset2))),
2898 gen_imagpart (submode, y));
2899 }
2900 else
2901 #endif
2902 /* If this is a stack, push the highpart first, so it
2903 will be in the argument order.
2904
2905 In that case, change_address is used only to convert
2906 the mode, not to change the address. */
2907 if (stack)
2908 {
2909 /* Note that the real part always precedes the imag part in memory
2910 regardless of machine's endianness. */
2911 #ifdef STACK_GROWS_DOWNWARD
2912 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2913 gen_imagpart (submode, y));
2914 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2915 gen_realpart (submode, y));
2916 #else
2917 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2918 gen_realpart (submode, y));
2919 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2920 gen_imagpart (submode, y));
2921 #endif
2922 }
2923 else
2924 {
2925 rtx realpart_x, realpart_y;
2926 rtx imagpart_x, imagpart_y;
2927
2928 /* If this is a complex value with each part being smaller than a
2929 word, the usual calling sequence will likely pack the pieces into
2930 a single register. Unfortunately, SUBREG of hard registers only
2931 deals in terms of words, so we have a problem converting input
2932 arguments to the CONCAT of two registers that is used elsewhere
2933 for complex values. If this is before reload, we can copy it into
2934 memory and reload. FIXME, we should see about using extract and
2935 insert on integer registers, but complex short and complex char
2936 variables should be rarely used. */
2937 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2938 && (reload_in_progress | reload_completed) == 0)
2939 {
2940 int packed_dest_p
2941 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2942 int packed_src_p
2943 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2944
2945 if (packed_dest_p || packed_src_p)
2946 {
2947 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2948 ? MODE_FLOAT : MODE_INT);
2949
2950 enum machine_mode reg_mode
2951 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2952
2953 if (reg_mode != BLKmode)
2954 {
2955 rtx mem = assign_stack_temp (reg_mode,
2956 GET_MODE_SIZE (mode), 0);
2957 rtx cmem = adjust_address (mem, mode, 0);
2958
2959 cfun->cannot_inline
2960 = N_("function using short complex types cannot be inline");
2961
2962 if (packed_dest_p)
2963 {
2964 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2965
2966 emit_move_insn_1 (cmem, y);
2967 return emit_move_insn_1 (sreg, mem);
2968 }
2969 else
2970 {
2971 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2972
2973 emit_move_insn_1 (mem, sreg);
2974 return emit_move_insn_1 (x, cmem);
2975 }
2976 }
2977 }
2978 }
2979
2980 realpart_x = gen_realpart (submode, x);
2981 realpart_y = gen_realpart (submode, y);
2982 imagpart_x = gen_imagpart (submode, x);
2983 imagpart_y = gen_imagpart (submode, y);
2984
2985 /* Show the output dies here. This is necessary for SUBREGs
2986 of pseudos since we cannot track their lifetimes correctly;
2987 hard regs shouldn't appear here except as return values.
2988 We never want to emit such a clobber after reload. */
2989 if (x != y
2990 && ! (reload_in_progress || reload_completed)
2991 && (GET_CODE (realpart_x) == SUBREG
2992 || GET_CODE (imagpart_x) == SUBREG))
2993 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2994
2995 emit_move_insn (realpart_x, realpart_y);
2996 emit_move_insn (imagpart_x, imagpart_y);
2997 }
2998
2999 return get_last_insn ();
3000 }
3001
3002 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3003 find a mode to do it in. If we have a movcc, use it. Otherwise,
3004 find the MODE_INT mode of the same width. */
3005 else if (GET_MODE_CLASS (mode) == MODE_CC
3006 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3007 {
3008 enum insn_code insn_code;
3009 enum machine_mode tmode = VOIDmode;
3010 rtx x1 = x, y1 = y;
3011
3012 if (mode != CCmode
3013 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3014 tmode = CCmode;
3015 else
3016 for (tmode = QImode; tmode != VOIDmode;
3017 tmode = GET_MODE_WIDER_MODE (tmode))
3018 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3019 break;
3020
3021 if (tmode == VOIDmode)
3022 abort ();
3023
3024 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3025 may call change_address which is not appropriate if we were
3026 called when a reload was in progress. We don't have to worry
3027 about changing the address since the size in bytes is supposed to
3028 be the same. Copy the MEM to change the mode and move any
3029 substitutions from the old MEM to the new one. */
3030
3031 if (reload_in_progress)
3032 {
3033 x = gen_lowpart_common (tmode, x1);
3034 if (x == 0 && GET_CODE (x1) == MEM)
3035 {
3036 x = adjust_address_nv (x1, tmode, 0);
3037 copy_replacements (x1, x);
3038 }
3039
3040 y = gen_lowpart_common (tmode, y1);
3041 if (y == 0 && GET_CODE (y1) == MEM)
3042 {
3043 y = adjust_address_nv (y1, tmode, 0);
3044 copy_replacements (y1, y);
3045 }
3046 }
3047 else
3048 {
3049 x = gen_lowpart (tmode, x);
3050 y = gen_lowpart (tmode, y);
3051 }
3052
3053 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3054 return emit_insn (GEN_FCN (insn_code) (x, y));
3055 }
3056
3057 /* Try using a move pattern for the corresponding integer mode. This is
3058 only safe when simplify_subreg can convert MODE constants into integer
3059 constants. At present, it can only do this reliably if the value
3060 fits within a HOST_WIDE_INT. */
3061 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3062 && (submode = int_mode_for_mode (mode)) != BLKmode
3063 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3064 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3065 (simplify_gen_subreg (submode, x, mode, 0),
3066 simplify_gen_subreg (submode, y, mode, 0)));
3067
3068 /* This will handle any multi-word or full-word mode that lacks a move_insn
3069 pattern. However, you will get better code if you define such patterns,
3070 even if they must turn into multiple assembler instructions. */
3071 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3072 {
3073 rtx last_insn = 0;
3074 rtx seq, inner;
3075 int need_clobber;
3076 int i;
3077
3078 #ifdef PUSH_ROUNDING
3079
3080 /* If X is a push on the stack, do the push now and replace
3081 X with a reference to the stack pointer. */
3082 if (push_operand (x, GET_MODE (x)))
3083 {
3084 rtx temp;
3085 enum rtx_code code;
3086
3087 /* Do not use anti_adjust_stack, since we don't want to update
3088 stack_pointer_delta. */
3089 temp = expand_binop (Pmode,
3090 #ifdef STACK_GROWS_DOWNWARD
3091 sub_optab,
3092 #else
3093 add_optab,
3094 #endif
3095 stack_pointer_rtx,
3096 GEN_INT
3097 (PUSH_ROUNDING
3098 (GET_MODE_SIZE (GET_MODE (x)))),
3099 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3100
3101 if (temp != stack_pointer_rtx)
3102 emit_move_insn (stack_pointer_rtx, temp);
3103
3104 code = GET_CODE (XEXP (x, 0));
3105
3106 /* Just hope that small offsets off SP are OK. */
3107 if (code == POST_INC)
3108 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3109 GEN_INT (-((HOST_WIDE_INT)
3110 GET_MODE_SIZE (GET_MODE (x)))));
3111 else if (code == POST_DEC)
3112 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3113 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3114 else
3115 temp = stack_pointer_rtx;
3116
3117 x = change_address (x, VOIDmode, temp);
3118 }
3119 #endif
3120
3121 /* If we are in reload, see if either operand is a MEM whose address
3122 is scheduled for replacement. */
3123 if (reload_in_progress && GET_CODE (x) == MEM
3124 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3125 x = replace_equiv_address_nv (x, inner);
3126 if (reload_in_progress && GET_CODE (y) == MEM
3127 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3128 y = replace_equiv_address_nv (y, inner);
3129
3130 start_sequence ();
3131
3132 need_clobber = 0;
3133 for (i = 0;
3134 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3135 i++)
3136 {
3137 rtx xpart = operand_subword (x, i, 1, mode);
3138 rtx ypart = operand_subword (y, i, 1, mode);
3139
3140 /* If we can't get a part of Y, put Y into memory if it is a
3141 constant. Otherwise, force it into a register. If we still
3142 can't get a part of Y, abort. */
3143 if (ypart == 0 && CONSTANT_P (y))
3144 {
3145 y = force_const_mem (mode, y);
3146 ypart = operand_subword (y, i, 1, mode);
3147 }
3148 else if (ypart == 0)
3149 ypart = operand_subword_force (y, i, mode);
3150
3151 if (xpart == 0 || ypart == 0)
3152 abort ();
3153
3154 need_clobber |= (GET_CODE (xpart) == SUBREG);
3155
3156 last_insn = emit_move_insn (xpart, ypart);
3157 }
3158
3159 seq = get_insns ();
3160 end_sequence ();
3161
3162 /* Show the output dies here. This is necessary for SUBREGs
3163 of pseudos since we cannot track their lifetimes correctly;
3164 hard regs shouldn't appear here except as return values.
3165 We never want to emit such a clobber after reload. */
3166 if (x != y
3167 && ! (reload_in_progress || reload_completed)
3168 && need_clobber != 0)
3169 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3170
3171 emit_insn (seq);
3172
3173 return last_insn;
3174 }
3175 else
3176 abort ();
3177 }
3178
3179 /* If Y is representable exactly in a narrower mode, and the target can
3180 perform the extension directly from constant or memory, then emit the
3181 move as an extension. */
3182
3183 static rtx
3184 compress_float_constant (rtx x, rtx y)
3185 {
3186 enum machine_mode dstmode = GET_MODE (x);
3187 enum machine_mode orig_srcmode = GET_MODE (y);
3188 enum machine_mode srcmode;
3189 REAL_VALUE_TYPE r;
3190
3191 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3192
3193 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3194 srcmode != orig_srcmode;
3195 srcmode = GET_MODE_WIDER_MODE (srcmode))
3196 {
3197 enum insn_code ic;
3198 rtx trunc_y, last_insn;
3199
3200 /* Skip if the target can't extend this way. */
3201 ic = can_extend_p (dstmode, srcmode, 0);
3202 if (ic == CODE_FOR_nothing)
3203 continue;
3204
3205 /* Skip if the narrowed value isn't exact. */
3206 if (! exact_real_truncate (srcmode, &r))
3207 continue;
3208
3209 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3210
3211 if (LEGITIMATE_CONSTANT_P (trunc_y))
3212 {
3213 /* Skip if the target needs extra instructions to perform
3214 the extension. */
3215 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3216 continue;
3217 }
3218 else if (float_extend_from_mem[dstmode][srcmode])
3219 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3220 else
3221 continue;
3222
3223 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3224 last_insn = get_last_insn ();
3225
3226 if (GET_CODE (x) == REG)
3227 set_unique_reg_note (last_insn, REG_EQUAL, y);
3228
3229 return last_insn;
3230 }
3231
3232 return NULL_RTX;
3233 }
3234 \f
3235 /* Pushing data onto the stack. */
3236
3237 /* Push a block of length SIZE (perhaps variable)
3238 and return an rtx to address the beginning of the block.
3239 Note that it is not possible for the value returned to be a QUEUED.
3240 The value may be virtual_outgoing_args_rtx.
3241
3242 EXTRA is the number of bytes of padding to push in addition to SIZE.
3243 BELOW nonzero means this padding comes at low addresses;
3244 otherwise, the padding comes at high addresses. */
3245
3246 rtx
3247 push_block (rtx size, int extra, int below)
3248 {
3249 rtx temp;
3250
3251 size = convert_modes (Pmode, ptr_mode, size, 1);
3252 if (CONSTANT_P (size))
3253 anti_adjust_stack (plus_constant (size, extra));
3254 else if (GET_CODE (size) == REG && extra == 0)
3255 anti_adjust_stack (size);
3256 else
3257 {
3258 temp = copy_to_mode_reg (Pmode, size);
3259 if (extra != 0)
3260 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3261 temp, 0, OPTAB_LIB_WIDEN);
3262 anti_adjust_stack (temp);
3263 }
3264
3265 #ifndef STACK_GROWS_DOWNWARD
3266 if (0)
3267 #else
3268 if (1)
3269 #endif
3270 {
3271 temp = virtual_outgoing_args_rtx;
3272 if (extra != 0 && below)
3273 temp = plus_constant (temp, extra);
3274 }
3275 else
3276 {
3277 if (GET_CODE (size) == CONST_INT)
3278 temp = plus_constant (virtual_outgoing_args_rtx,
3279 -INTVAL (size) - (below ? 0 : extra));
3280 else if (extra != 0 && !below)
3281 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3282 negate_rtx (Pmode, plus_constant (size, extra)));
3283 else
3284 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3285 negate_rtx (Pmode, size));
3286 }
3287
3288 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3289 }
3290
3291 #ifdef PUSH_ROUNDING
3292
3293 /* Emit single push insn. */
3294
3295 static void
3296 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3297 {
3298 rtx dest_addr;
3299 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3300 rtx dest;
3301 enum insn_code icode;
3302 insn_operand_predicate_fn pred;
3303
3304 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3305 /* If there is push pattern, use it. Otherwise try old way of throwing
3306 MEM representing push operation to move expander. */
3307 icode = push_optab->handlers[(int) mode].insn_code;
3308 if (icode != CODE_FOR_nothing)
3309 {
3310 if (((pred = insn_data[(int) icode].operand[0].predicate)
3311 && !((*pred) (x, mode))))
3312 x = force_reg (mode, x);
3313 emit_insn (GEN_FCN (icode) (x));
3314 return;
3315 }
3316 if (GET_MODE_SIZE (mode) == rounded_size)
3317 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3318 /* If we are to pad downward, adjust the stack pointer first and
3319 then store X into the stack location using an offset. This is
3320 because emit_move_insn does not know how to pad; it does not have
3321 access to type. */
3322 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3323 {
3324 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3325 HOST_WIDE_INT offset;
3326
3327 emit_move_insn (stack_pointer_rtx,
3328 expand_binop (Pmode,
3329 #ifdef STACK_GROWS_DOWNWARD
3330 sub_optab,
3331 #else
3332 add_optab,
3333 #endif
3334 stack_pointer_rtx,
3335 GEN_INT (rounded_size),
3336 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3337
3338 offset = (HOST_WIDE_INT) padding_size;
3339 #ifdef STACK_GROWS_DOWNWARD
3340 if (STACK_PUSH_CODE == POST_DEC)
3341 /* We have already decremented the stack pointer, so get the
3342 previous value. */
3343 offset += (HOST_WIDE_INT) rounded_size;
3344 #else
3345 if (STACK_PUSH_CODE == POST_INC)
3346 /* We have already incremented the stack pointer, so get the
3347 previous value. */
3348 offset -= (HOST_WIDE_INT) rounded_size;
3349 #endif
3350 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3351 }
3352 else
3353 {
3354 #ifdef STACK_GROWS_DOWNWARD
3355 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3356 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3357 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3358 #else
3359 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3360 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3361 GEN_INT (rounded_size));
3362 #endif
3363 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3364 }
3365
3366 dest = gen_rtx_MEM (mode, dest_addr);
3367
3368 if (type != 0)
3369 {
3370 set_mem_attributes (dest, type, 1);
3371
3372 if (flag_optimize_sibling_calls)
3373 /* Function incoming arguments may overlap with sibling call
3374 outgoing arguments and we cannot allow reordering of reads
3375 from function arguments with stores to outgoing arguments
3376 of sibling calls. */
3377 set_mem_alias_set (dest, 0);
3378 }
3379 emit_move_insn (dest, x);
3380 }
3381 #endif
3382
3383 /* Generate code to push X onto the stack, assuming it has mode MODE and
3384 type TYPE.
3385 MODE is redundant except when X is a CONST_INT (since they don't
3386 carry mode info).
3387 SIZE is an rtx for the size of data to be copied (in bytes),
3388 needed only if X is BLKmode.
3389
3390 ALIGN (in bits) is maximum alignment we can assume.
3391
3392 If PARTIAL and REG are both nonzero, then copy that many of the first
3393 words of X into registers starting with REG, and push the rest of X.
3394 The amount of space pushed is decreased by PARTIAL words,
3395 rounded *down* to a multiple of PARM_BOUNDARY.
3396 REG must be a hard register in this case.
3397 If REG is zero but PARTIAL is not, take any all others actions for an
3398 argument partially in registers, but do not actually load any
3399 registers.
3400
3401 EXTRA is the amount in bytes of extra space to leave next to this arg.
3402 This is ignored if an argument block has already been allocated.
3403
3404 On a machine that lacks real push insns, ARGS_ADDR is the address of
3405 the bottom of the argument block for this call. We use indexing off there
3406 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3407 argument block has not been preallocated.
3408
3409 ARGS_SO_FAR is the size of args previously pushed for this call.
3410
3411 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3412 for arguments passed in registers. If nonzero, it will be the number
3413 of bytes required. */
3414
3415 void
3416 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3417 unsigned int align, int partial, rtx reg, int extra,
3418 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3419 rtx alignment_pad)
3420 {
3421 rtx xinner;
3422 enum direction stack_direction
3423 #ifdef STACK_GROWS_DOWNWARD
3424 = downward;
3425 #else
3426 = upward;
3427 #endif
3428
3429 /* Decide where to pad the argument: `downward' for below,
3430 `upward' for above, or `none' for don't pad it.
3431 Default is below for small data on big-endian machines; else above. */
3432 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3433
3434 /* Invert direction if stack is post-decrement.
3435 FIXME: why? */
3436 if (STACK_PUSH_CODE == POST_DEC)
3437 if (where_pad != none)
3438 where_pad = (where_pad == downward ? upward : downward);
3439
3440 xinner = x = protect_from_queue (x, 0);
3441
3442 if (mode == BLKmode)
3443 {
3444 /* Copy a block into the stack, entirely or partially. */
3445
3446 rtx temp;
3447 int used = partial * UNITS_PER_WORD;
3448 int offset;
3449 int skip;
3450
3451 if (reg && GET_CODE (reg) == PARALLEL)
3452 {
3453 /* Use the size of the elt to compute offset. */
3454 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3455 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3456 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3457 }
3458 else
3459 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3460
3461 if (size == 0)
3462 abort ();
3463
3464 used -= offset;
3465
3466 /* USED is now the # of bytes we need not copy to the stack
3467 because registers will take care of them. */
3468
3469 if (partial != 0)
3470 xinner = adjust_address (xinner, BLKmode, used);
3471
3472 /* If the partial register-part of the arg counts in its stack size,
3473 skip the part of stack space corresponding to the registers.
3474 Otherwise, start copying to the beginning of the stack space,
3475 by setting SKIP to 0. */
3476 skip = (reg_parm_stack_space == 0) ? 0 : used;
3477
3478 #ifdef PUSH_ROUNDING
3479 /* Do it with several push insns if that doesn't take lots of insns
3480 and if there is no difficulty with push insns that skip bytes
3481 on the stack for alignment purposes. */
3482 if (args_addr == 0
3483 && PUSH_ARGS
3484 && GET_CODE (size) == CONST_INT
3485 && skip == 0
3486 && MEM_ALIGN (xinner) >= align
3487 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3488 /* Here we avoid the case of a structure whose weak alignment
3489 forces many pushes of a small amount of data,
3490 and such small pushes do rounding that causes trouble. */
3491 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3492 || align >= BIGGEST_ALIGNMENT
3493 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3494 == (align / BITS_PER_UNIT)))
3495 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3496 {
3497 /* Push padding now if padding above and stack grows down,
3498 or if padding below and stack grows up.
3499 But if space already allocated, this has already been done. */
3500 if (extra && args_addr == 0
3501 && where_pad != none && where_pad != stack_direction)
3502 anti_adjust_stack (GEN_INT (extra));
3503
3504 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3505 }
3506 else
3507 #endif /* PUSH_ROUNDING */
3508 {
3509 rtx target;
3510
3511 /* Otherwise make space on the stack and copy the data
3512 to the address of that space. */
3513
3514 /* Deduct words put into registers from the size we must copy. */
3515 if (partial != 0)
3516 {
3517 if (GET_CODE (size) == CONST_INT)
3518 size = GEN_INT (INTVAL (size) - used);
3519 else
3520 size = expand_binop (GET_MODE (size), sub_optab, size,
3521 GEN_INT (used), NULL_RTX, 0,
3522 OPTAB_LIB_WIDEN);
3523 }
3524
3525 /* Get the address of the stack space.
3526 In this case, we do not deal with EXTRA separately.
3527 A single stack adjust will do. */
3528 if (! args_addr)
3529 {
3530 temp = push_block (size, extra, where_pad == downward);
3531 extra = 0;
3532 }
3533 else if (GET_CODE (args_so_far) == CONST_INT)
3534 temp = memory_address (BLKmode,
3535 plus_constant (args_addr,
3536 skip + INTVAL (args_so_far)));
3537 else
3538 temp = memory_address (BLKmode,
3539 plus_constant (gen_rtx_PLUS (Pmode,
3540 args_addr,
3541 args_so_far),
3542 skip));
3543
3544 if (!ACCUMULATE_OUTGOING_ARGS)
3545 {
3546 /* If the source is referenced relative to the stack pointer,
3547 copy it to another register to stabilize it. We do not need
3548 to do this if we know that we won't be changing sp. */
3549
3550 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3551 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3552 temp = copy_to_reg (temp);
3553 }
3554
3555 target = gen_rtx_MEM (BLKmode, temp);
3556
3557 if (type != 0)
3558 {
3559 set_mem_attributes (target, type, 1);
3560 /* Function incoming arguments may overlap with sibling call
3561 outgoing arguments and we cannot allow reordering of reads
3562 from function arguments with stores to outgoing arguments
3563 of sibling calls. */
3564 set_mem_alias_set (target, 0);
3565 }
3566
3567 /* ALIGN may well be better aligned than TYPE, e.g. due to
3568 PARM_BOUNDARY. Assume the caller isn't lying. */
3569 set_mem_align (target, align);
3570
3571 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3572 }
3573 }
3574 else if (partial > 0)
3575 {
3576 /* Scalar partly in registers. */
3577
3578 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3579 int i;
3580 int not_stack;
3581 /* # words of start of argument
3582 that we must make space for but need not store. */
3583 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3584 int args_offset = INTVAL (args_so_far);
3585 int skip;
3586
3587 /* Push padding now if padding above and stack grows down,
3588 or if padding below and stack grows up.
3589 But if space already allocated, this has already been done. */
3590 if (extra && args_addr == 0
3591 && where_pad != none && where_pad != stack_direction)
3592 anti_adjust_stack (GEN_INT (extra));
3593
3594 /* If we make space by pushing it, we might as well push
3595 the real data. Otherwise, we can leave OFFSET nonzero
3596 and leave the space uninitialized. */
3597 if (args_addr == 0)
3598 offset = 0;
3599
3600 /* Now NOT_STACK gets the number of words that we don't need to
3601 allocate on the stack. */
3602 not_stack = partial - offset;
3603
3604 /* If the partial register-part of the arg counts in its stack size,
3605 skip the part of stack space corresponding to the registers.
3606 Otherwise, start copying to the beginning of the stack space,
3607 by setting SKIP to 0. */
3608 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3609
3610 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3611 x = validize_mem (force_const_mem (mode, x));
3612
3613 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3614 SUBREGs of such registers are not allowed. */
3615 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3616 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3617 x = copy_to_reg (x);
3618
3619 /* Loop over all the words allocated on the stack for this arg. */
3620 /* We can do it by words, because any scalar bigger than a word
3621 has a size a multiple of a word. */
3622 #ifndef PUSH_ARGS_REVERSED
3623 for (i = not_stack; i < size; i++)
3624 #else
3625 for (i = size - 1; i >= not_stack; i--)
3626 #endif
3627 if (i >= not_stack + offset)
3628 emit_push_insn (operand_subword_force (x, i, mode),
3629 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3630 0, args_addr,
3631 GEN_INT (args_offset + ((i - not_stack + skip)
3632 * UNITS_PER_WORD)),
3633 reg_parm_stack_space, alignment_pad);
3634 }
3635 else
3636 {
3637 rtx addr;
3638 rtx dest;
3639
3640 /* Push padding now if padding above and stack grows down,
3641 or if padding below and stack grows up.
3642 But if space already allocated, this has already been done. */
3643 if (extra && args_addr == 0
3644 && where_pad != none && where_pad != stack_direction)
3645 anti_adjust_stack (GEN_INT (extra));
3646
3647 #ifdef PUSH_ROUNDING
3648 if (args_addr == 0 && PUSH_ARGS)
3649 emit_single_push_insn (mode, x, type);
3650 else
3651 #endif
3652 {
3653 if (GET_CODE (args_so_far) == CONST_INT)
3654 addr
3655 = memory_address (mode,
3656 plus_constant (args_addr,
3657 INTVAL (args_so_far)));
3658 else
3659 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3660 args_so_far));
3661 dest = gen_rtx_MEM (mode, addr);
3662 if (type != 0)
3663 {
3664 set_mem_attributes (dest, type, 1);
3665 /* Function incoming arguments may overlap with sibling call
3666 outgoing arguments and we cannot allow reordering of reads
3667 from function arguments with stores to outgoing arguments
3668 of sibling calls. */
3669 set_mem_alias_set (dest, 0);
3670 }
3671
3672 emit_move_insn (dest, x);
3673 }
3674 }
3675
3676 /* If part should go in registers, copy that part
3677 into the appropriate registers. Do this now, at the end,
3678 since mem-to-mem copies above may do function calls. */
3679 if (partial > 0 && reg != 0)
3680 {
3681 /* Handle calls that pass values in multiple non-contiguous locations.
3682 The Irix 6 ABI has examples of this. */
3683 if (GET_CODE (reg) == PARALLEL)
3684 emit_group_load (reg, x, type, -1);
3685 else
3686 move_block_to_reg (REGNO (reg), x, partial, mode);
3687 }
3688
3689 if (extra && args_addr == 0 && where_pad == stack_direction)
3690 anti_adjust_stack (GEN_INT (extra));
3691
3692 if (alignment_pad && args_addr == 0)
3693 anti_adjust_stack (alignment_pad);
3694 }
3695 \f
3696 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3697 operations. */
3698
3699 static rtx
3700 get_subtarget (rtx x)
3701 {
3702 return ((x == 0
3703 /* Only registers can be subtargets. */
3704 || GET_CODE (x) != REG
3705 /* If the register is readonly, it can't be set more than once. */
3706 || RTX_UNCHANGING_P (x)
3707 /* Don't use hard regs to avoid extending their life. */
3708 || REGNO (x) < FIRST_PSEUDO_REGISTER
3709 /* Avoid subtargets inside loops,
3710 since they hide some invariant expressions. */
3711 || preserve_subexpressions_p ())
3712 ? 0 : x);
3713 }
3714
3715 /* Expand an assignment that stores the value of FROM into TO.
3716 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3717 (This may contain a QUEUED rtx;
3718 if the value is constant, this rtx is a constant.)
3719 Otherwise, the returned value is NULL_RTX. */
3720
3721 rtx
3722 expand_assignment (tree to, tree from, int want_value)
3723 {
3724 rtx to_rtx = 0;
3725 rtx result;
3726
3727 /* Don't crash if the lhs of the assignment was erroneous. */
3728
3729 if (TREE_CODE (to) == ERROR_MARK)
3730 {
3731 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3732 return want_value ? result : NULL_RTX;
3733 }
3734
3735 /* Assignment of a structure component needs special treatment
3736 if the structure component's rtx is not simply a MEM.
3737 Assignment of an array element at a constant index, and assignment of
3738 an array element in an unaligned packed structure field, has the same
3739 problem. */
3740
3741 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3742 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3743 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3744 {
3745 enum machine_mode mode1;
3746 HOST_WIDE_INT bitsize, bitpos;
3747 rtx orig_to_rtx;
3748 tree offset;
3749 int unsignedp;
3750 int volatilep = 0;
3751 tree tem;
3752
3753 push_temp_slots ();
3754 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3755 &unsignedp, &volatilep);
3756
3757 /* If we are going to use store_bit_field and extract_bit_field,
3758 make sure to_rtx will be safe for multiple use. */
3759
3760 if (mode1 == VOIDmode && want_value)
3761 tem = stabilize_reference (tem);
3762
3763 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3764
3765 if (offset != 0)
3766 {
3767 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3768
3769 if (GET_CODE (to_rtx) != MEM)
3770 abort ();
3771
3772 #ifdef POINTERS_EXTEND_UNSIGNED
3773 if (GET_MODE (offset_rtx) != Pmode)
3774 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3775 #else
3776 if (GET_MODE (offset_rtx) != ptr_mode)
3777 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3778 #endif
3779
3780 /* A constant address in TO_RTX can have VOIDmode, we must not try
3781 to call force_reg for that case. Avoid that case. */
3782 if (GET_CODE (to_rtx) == MEM
3783 && GET_MODE (to_rtx) == BLKmode
3784 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3785 && bitsize > 0
3786 && (bitpos % bitsize) == 0
3787 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3788 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3789 {
3790 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3791 bitpos = 0;
3792 }
3793
3794 to_rtx = offset_address (to_rtx, offset_rtx,
3795 highest_pow2_factor_for_type (TREE_TYPE (to),
3796 offset));
3797 }
3798
3799 if (GET_CODE (to_rtx) == MEM)
3800 {
3801 /* If the field is at offset zero, we could have been given the
3802 DECL_RTX of the parent struct. Don't munge it. */
3803 to_rtx = shallow_copy_rtx (to_rtx);
3804
3805 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3806 }
3807
3808 /* Deal with volatile and readonly fields. The former is only done
3809 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3810 if (volatilep && GET_CODE (to_rtx) == MEM)
3811 {
3812 if (to_rtx == orig_to_rtx)
3813 to_rtx = copy_rtx (to_rtx);
3814 MEM_VOLATILE_P (to_rtx) = 1;
3815 }
3816
3817 if (TREE_CODE (to) == COMPONENT_REF
3818 && TREE_READONLY (TREE_OPERAND (to, 1))
3819 /* We can't assert that a MEM won't be set more than once
3820 if the component is not addressable because another
3821 non-addressable component may be referenced by the same MEM. */
3822 && ! (GET_CODE (to_rtx) == MEM && ! can_address_p (to)))
3823 {
3824 if (to_rtx == orig_to_rtx)
3825 to_rtx = copy_rtx (to_rtx);
3826 RTX_UNCHANGING_P (to_rtx) = 1;
3827 }
3828
3829 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3830 {
3831 if (to_rtx == orig_to_rtx)
3832 to_rtx = copy_rtx (to_rtx);
3833 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3834 }
3835
3836 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3837 (want_value
3838 /* Spurious cast for HPUX compiler. */
3839 ? ((enum machine_mode)
3840 TYPE_MODE (TREE_TYPE (to)))
3841 : VOIDmode),
3842 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3843
3844 preserve_temp_slots (result);
3845 free_temp_slots ();
3846 pop_temp_slots ();
3847
3848 /* If the value is meaningful, convert RESULT to the proper mode.
3849 Otherwise, return nothing. */
3850 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3851 TYPE_MODE (TREE_TYPE (from)),
3852 result,
3853 TREE_UNSIGNED (TREE_TYPE (to)))
3854 : NULL_RTX);
3855 }
3856
3857 /* If the rhs is a function call and its value is not an aggregate,
3858 call the function before we start to compute the lhs.
3859 This is needed for correct code for cases such as
3860 val = setjmp (buf) on machines where reference to val
3861 requires loading up part of an address in a separate insn.
3862
3863 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3864 since it might be a promoted variable where the zero- or sign- extension
3865 needs to be done. Handling this in the normal way is safe because no
3866 computation is done before the call. */
3867 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3868 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3869 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3870 && GET_CODE (DECL_RTL (to)) == REG))
3871 {
3872 rtx value;
3873
3874 push_temp_slots ();
3875 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3876 if (to_rtx == 0)
3877 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3878
3879 /* Handle calls that return values in multiple non-contiguous locations.
3880 The Irix 6 ABI has examples of this. */
3881 if (GET_CODE (to_rtx) == PARALLEL)
3882 emit_group_load (to_rtx, value, TREE_TYPE (from),
3883 int_size_in_bytes (TREE_TYPE (from)));
3884 else if (GET_MODE (to_rtx) == BLKmode)
3885 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3886 else
3887 {
3888 if (POINTER_TYPE_P (TREE_TYPE (to)))
3889 value = convert_memory_address (GET_MODE (to_rtx), value);
3890 emit_move_insn (to_rtx, value);
3891 }
3892 preserve_temp_slots (to_rtx);
3893 free_temp_slots ();
3894 pop_temp_slots ();
3895 return want_value ? to_rtx : NULL_RTX;
3896 }
3897
3898 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3899 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3900
3901 if (to_rtx == 0)
3902 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3903
3904 /* Don't move directly into a return register. */
3905 if (TREE_CODE (to) == RESULT_DECL
3906 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3907 {
3908 rtx temp;
3909
3910 push_temp_slots ();
3911 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3912
3913 if (GET_CODE (to_rtx) == PARALLEL)
3914 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3915 int_size_in_bytes (TREE_TYPE (from)));
3916 else
3917 emit_move_insn (to_rtx, temp);
3918
3919 preserve_temp_slots (to_rtx);
3920 free_temp_slots ();
3921 pop_temp_slots ();
3922 return want_value ? to_rtx : NULL_RTX;
3923 }
3924
3925 /* In case we are returning the contents of an object which overlaps
3926 the place the value is being stored, use a safe function when copying
3927 a value through a pointer into a structure value return block. */
3928 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3929 && current_function_returns_struct
3930 && !current_function_returns_pcc_struct)
3931 {
3932 rtx from_rtx, size;
3933
3934 push_temp_slots ();
3935 size = expr_size (from);
3936 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3937
3938 if (TARGET_MEM_FUNCTIONS)
3939 emit_library_call (memmove_libfunc, LCT_NORMAL,
3940 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3941 XEXP (from_rtx, 0), Pmode,
3942 convert_to_mode (TYPE_MODE (sizetype),
3943 size, TREE_UNSIGNED (sizetype)),
3944 TYPE_MODE (sizetype));
3945 else
3946 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3947 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3948 XEXP (to_rtx, 0), Pmode,
3949 convert_to_mode (TYPE_MODE (integer_type_node),
3950 size,
3951 TREE_UNSIGNED (integer_type_node)),
3952 TYPE_MODE (integer_type_node));
3953
3954 preserve_temp_slots (to_rtx);
3955 free_temp_slots ();
3956 pop_temp_slots ();
3957 return want_value ? to_rtx : NULL_RTX;
3958 }
3959
3960 /* Compute FROM and store the value in the rtx we got. */
3961
3962 push_temp_slots ();
3963 result = store_expr (from, to_rtx, want_value);
3964 preserve_temp_slots (result);
3965 free_temp_slots ();
3966 pop_temp_slots ();
3967 return want_value ? result : NULL_RTX;
3968 }
3969
3970 /* Generate code for computing expression EXP,
3971 and storing the value into TARGET.
3972 TARGET may contain a QUEUED rtx.
3973
3974 If WANT_VALUE & 1 is nonzero, return a copy of the value
3975 not in TARGET, so that we can be sure to use the proper
3976 value in a containing expression even if TARGET has something
3977 else stored in it. If possible, we copy the value through a pseudo
3978 and return that pseudo. Or, if the value is constant, we try to
3979 return the constant. In some cases, we return a pseudo
3980 copied *from* TARGET.
3981
3982 If the mode is BLKmode then we may return TARGET itself.
3983 It turns out that in BLKmode it doesn't cause a problem.
3984 because C has no operators that could combine two different
3985 assignments into the same BLKmode object with different values
3986 with no sequence point. Will other languages need this to
3987 be more thorough?
3988
3989 If WANT_VALUE & 1 is 0, we return NULL, to make sure
3990 to catch quickly any cases where the caller uses the value
3991 and fails to set WANT_VALUE.
3992
3993 If WANT_VALUE & 2 is set, this is a store into a call param on the
3994 stack, and block moves may need to be treated specially. */
3995
3996 rtx
3997 store_expr (tree exp, rtx target, int want_value)
3998 {
3999 rtx temp;
4000 rtx alt_rtl = NULL_RTX;
4001 int dont_return_target = 0;
4002 int dont_store_target = 0;
4003
4004 if (VOID_TYPE_P (TREE_TYPE (exp)))
4005 {
4006 /* C++ can generate ?: expressions with a throw expression in one
4007 branch and an rvalue in the other. Here, we resolve attempts to
4008 store the throw expression's nonexistent result. */
4009 if (want_value)
4010 abort ();
4011 expand_expr (exp, const0_rtx, VOIDmode, 0);
4012 return NULL_RTX;
4013 }
4014 if (TREE_CODE (exp) == COMPOUND_EXPR)
4015 {
4016 /* Perform first part of compound expression, then assign from second
4017 part. */
4018 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4019 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4020 emit_queue ();
4021 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4022 }
4023 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4024 {
4025 /* For conditional expression, get safe form of the target. Then
4026 test the condition, doing the appropriate assignment on either
4027 side. This avoids the creation of unnecessary temporaries.
4028 For non-BLKmode, it is more efficient not to do this. */
4029
4030 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4031
4032 emit_queue ();
4033 target = protect_from_queue (target, 1);
4034
4035 do_pending_stack_adjust ();
4036 NO_DEFER_POP;
4037 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4038 start_cleanup_deferral ();
4039 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4040 end_cleanup_deferral ();
4041 emit_queue ();
4042 emit_jump_insn (gen_jump (lab2));
4043 emit_barrier ();
4044 emit_label (lab1);
4045 start_cleanup_deferral ();
4046 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4047 end_cleanup_deferral ();
4048 emit_queue ();
4049 emit_label (lab2);
4050 OK_DEFER_POP;
4051
4052 return want_value & 1 ? target : NULL_RTX;
4053 }
4054 else if (queued_subexp_p (target))
4055 /* If target contains a postincrement, let's not risk
4056 using it as the place to generate the rhs. */
4057 {
4058 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4059 {
4060 /* Expand EXP into a new pseudo. */
4061 temp = gen_reg_rtx (GET_MODE (target));
4062 temp = expand_expr (exp, temp, GET_MODE (target),
4063 (want_value & 2
4064 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4065 }
4066 else
4067 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4068 (want_value & 2
4069 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4070
4071 /* If target is volatile, ANSI requires accessing the value
4072 *from* the target, if it is accessed. So make that happen.
4073 In no case return the target itself. */
4074 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4075 dont_return_target = 1;
4076 }
4077 else if ((want_value & 1) != 0
4078 && GET_CODE (target) == MEM
4079 && ! MEM_VOLATILE_P (target)
4080 && GET_MODE (target) != BLKmode)
4081 /* If target is in memory and caller wants value in a register instead,
4082 arrange that. Pass TARGET as target for expand_expr so that,
4083 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4084 We know expand_expr will not use the target in that case.
4085 Don't do this if TARGET is volatile because we are supposed
4086 to write it and then read it. */
4087 {
4088 temp = expand_expr (exp, target, GET_MODE (target),
4089 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4090 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4091 {
4092 /* If TEMP is already in the desired TARGET, only copy it from
4093 memory and don't store it there again. */
4094 if (temp == target
4095 || (rtx_equal_p (temp, target)
4096 && ! side_effects_p (temp) && ! side_effects_p (target)))
4097 dont_store_target = 1;
4098 temp = copy_to_reg (temp);
4099 }
4100 dont_return_target = 1;
4101 }
4102 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4103 /* If this is a scalar in a register that is stored in a wider mode
4104 than the declared mode, compute the result into its declared mode
4105 and then convert to the wider mode. Our value is the computed
4106 expression. */
4107 {
4108 rtx inner_target = 0;
4109
4110 /* If we don't want a value, we can do the conversion inside EXP,
4111 which will often result in some optimizations. Do the conversion
4112 in two steps: first change the signedness, if needed, then
4113 the extend. But don't do this if the type of EXP is a subtype
4114 of something else since then the conversion might involve
4115 more than just converting modes. */
4116 if ((want_value & 1) == 0
4117 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4118 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4119 {
4120 if (TREE_UNSIGNED (TREE_TYPE (exp))
4121 != SUBREG_PROMOTED_UNSIGNED_P (target))
4122 exp = convert
4123 ((*lang_hooks.types.signed_or_unsigned_type)
4124 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4125
4126 exp = convert ((*lang_hooks.types.type_for_mode)
4127 (GET_MODE (SUBREG_REG (target)),
4128 SUBREG_PROMOTED_UNSIGNED_P (target)),
4129 exp);
4130
4131 inner_target = SUBREG_REG (target);
4132 }
4133
4134 temp = expand_expr (exp, inner_target, VOIDmode,
4135 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4136
4137 /* If TEMP is a MEM and we want a result value, make the access
4138 now so it gets done only once. Strictly speaking, this is
4139 only necessary if the MEM is volatile, or if the address
4140 overlaps TARGET. But not performing the load twice also
4141 reduces the amount of rtl we generate and then have to CSE. */
4142 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4143 temp = copy_to_reg (temp);
4144
4145 /* If TEMP is a VOIDmode constant, use convert_modes to make
4146 sure that we properly convert it. */
4147 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4148 {
4149 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4150 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4151 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4152 GET_MODE (target), temp,
4153 SUBREG_PROMOTED_UNSIGNED_P (target));
4154 }
4155
4156 convert_move (SUBREG_REG (target), temp,
4157 SUBREG_PROMOTED_UNSIGNED_P (target));
4158
4159 /* If we promoted a constant, change the mode back down to match
4160 target. Otherwise, the caller might get confused by a result whose
4161 mode is larger than expected. */
4162
4163 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4164 {
4165 if (GET_MODE (temp) != VOIDmode)
4166 {
4167 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4168 SUBREG_PROMOTED_VAR_P (temp) = 1;
4169 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4170 SUBREG_PROMOTED_UNSIGNED_P (target));
4171 }
4172 else
4173 temp = convert_modes (GET_MODE (target),
4174 GET_MODE (SUBREG_REG (target)),
4175 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4176 }
4177
4178 return want_value & 1 ? temp : NULL_RTX;
4179 }
4180 else
4181 {
4182 temp = expand_expr_real (exp, target, GET_MODE (target),
4183 (want_value & 2
4184 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4185 &alt_rtl);
4186 /* Return TARGET if it's a specified hardware register.
4187 If TARGET is a volatile mem ref, either return TARGET
4188 or return a reg copied *from* TARGET; ANSI requires this.
4189
4190 Otherwise, if TEMP is not TARGET, return TEMP
4191 if it is constant (for efficiency),
4192 or if we really want the correct value. */
4193 if (!(target && GET_CODE (target) == REG
4194 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4195 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4196 && ! rtx_equal_p (temp, target)
4197 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4198 dont_return_target = 1;
4199 }
4200
4201 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4202 the same as that of TARGET, adjust the constant. This is needed, for
4203 example, in case it is a CONST_DOUBLE and we want only a word-sized
4204 value. */
4205 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4206 && TREE_CODE (exp) != ERROR_MARK
4207 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4208 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4209 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4210
4211 /* If value was not generated in the target, store it there.
4212 Convert the value to TARGET's type first if necessary.
4213 If TEMP and TARGET compare equal according to rtx_equal_p, but
4214 one or both of them are volatile memory refs, we have to distinguish
4215 two cases:
4216 - expand_expr has used TARGET. In this case, we must not generate
4217 another copy. This can be detected by TARGET being equal according
4218 to == .
4219 - expand_expr has not used TARGET - that means that the source just
4220 happens to have the same RTX form. Since temp will have been created
4221 by expand_expr, it will compare unequal according to == .
4222 We must generate a copy in this case, to reach the correct number
4223 of volatile memory references. */
4224
4225 if ((! rtx_equal_p (temp, target)
4226 || (temp != target && (side_effects_p (temp)
4227 || side_effects_p (target))))
4228 && TREE_CODE (exp) != ERROR_MARK
4229 && ! dont_store_target
4230 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4231 but TARGET is not valid memory reference, TEMP will differ
4232 from TARGET although it is really the same location. */
4233 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4234 /* If there's nothing to copy, don't bother. Don't call expr_size
4235 unless necessary, because some front-ends (C++) expr_size-hook
4236 aborts on objects that are not supposed to be bit-copied or
4237 bit-initialized. */
4238 && expr_size (exp) != const0_rtx)
4239 {
4240 target = protect_from_queue (target, 1);
4241 if (GET_MODE (temp) != GET_MODE (target)
4242 && GET_MODE (temp) != VOIDmode)
4243 {
4244 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4245 if (dont_return_target)
4246 {
4247 /* In this case, we will return TEMP,
4248 so make sure it has the proper mode.
4249 But don't forget to store the value into TARGET. */
4250 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4251 emit_move_insn (target, temp);
4252 }
4253 else
4254 convert_move (target, temp, unsignedp);
4255 }
4256
4257 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4258 {
4259 /* Handle copying a string constant into an array. The string
4260 constant may be shorter than the array. So copy just the string's
4261 actual length, and clear the rest. First get the size of the data
4262 type of the string, which is actually the size of the target. */
4263 rtx size = expr_size (exp);
4264
4265 if (GET_CODE (size) == CONST_INT
4266 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4267 emit_block_move (target, temp, size,
4268 (want_value & 2
4269 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4270 else
4271 {
4272 /* Compute the size of the data to copy from the string. */
4273 tree copy_size
4274 = size_binop (MIN_EXPR,
4275 make_tree (sizetype, size),
4276 size_int (TREE_STRING_LENGTH (exp)));
4277 rtx copy_size_rtx
4278 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4279 (want_value & 2
4280 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4281 rtx label = 0;
4282
4283 /* Copy that much. */
4284 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4285 TREE_UNSIGNED (sizetype));
4286 emit_block_move (target, temp, copy_size_rtx,
4287 (want_value & 2
4288 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4289
4290 /* Figure out how much is left in TARGET that we have to clear.
4291 Do all calculations in ptr_mode. */
4292 if (GET_CODE (copy_size_rtx) == CONST_INT)
4293 {
4294 size = plus_constant (size, -INTVAL (copy_size_rtx));
4295 target = adjust_address (target, BLKmode,
4296 INTVAL (copy_size_rtx));
4297 }
4298 else
4299 {
4300 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4301 copy_size_rtx, NULL_RTX, 0,
4302 OPTAB_LIB_WIDEN);
4303
4304 #ifdef POINTERS_EXTEND_UNSIGNED
4305 if (GET_MODE (copy_size_rtx) != Pmode)
4306 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4307 TREE_UNSIGNED (sizetype));
4308 #endif
4309
4310 target = offset_address (target, copy_size_rtx,
4311 highest_pow2_factor (copy_size));
4312 label = gen_label_rtx ();
4313 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4314 GET_MODE (size), 0, label);
4315 }
4316
4317 if (size != const0_rtx)
4318 clear_storage (target, size);
4319
4320 if (label)
4321 emit_label (label);
4322 }
4323 }
4324 /* Handle calls that return values in multiple non-contiguous locations.
4325 The Irix 6 ABI has examples of this. */
4326 else if (GET_CODE (target) == PARALLEL)
4327 emit_group_load (target, temp, TREE_TYPE (exp),
4328 int_size_in_bytes (TREE_TYPE (exp)));
4329 else if (GET_MODE (temp) == BLKmode)
4330 emit_block_move (target, temp, expr_size (exp),
4331 (want_value & 2
4332 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4333 else
4334 emit_move_insn (target, temp);
4335 }
4336
4337 /* If we don't want a value, return NULL_RTX. */
4338 if ((want_value & 1) == 0)
4339 return NULL_RTX;
4340
4341 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4342 ??? The latter test doesn't seem to make sense. */
4343 else if (dont_return_target && GET_CODE (temp) != MEM)
4344 return temp;
4345
4346 /* Return TARGET itself if it is a hard register. */
4347 else if ((want_value & 1) != 0
4348 && GET_MODE (target) != BLKmode
4349 && ! (GET_CODE (target) == REG
4350 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4351 return copy_to_reg (target);
4352
4353 else
4354 return target;
4355 }
4356 \f
4357 /* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
4358
4359 static int
4360 is_zeros_p (tree exp)
4361 {
4362 tree elt;
4363
4364 switch (TREE_CODE (exp))
4365 {
4366 case CONVERT_EXPR:
4367 case NOP_EXPR:
4368 case NON_LVALUE_EXPR:
4369 case VIEW_CONVERT_EXPR:
4370 return is_zeros_p (TREE_OPERAND (exp, 0));
4371
4372 case INTEGER_CST:
4373 return integer_zerop (exp);
4374
4375 case COMPLEX_CST:
4376 return
4377 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4378
4379 case REAL_CST:
4380 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4381
4382 case VECTOR_CST:
4383 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4384 elt = TREE_CHAIN (elt))
4385 if (!is_zeros_p (TREE_VALUE (elt)))
4386 return 0;
4387
4388 return 1;
4389
4390 case CONSTRUCTOR:
4391 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4392 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4393 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4394 if (! is_zeros_p (TREE_VALUE (elt)))
4395 return 0;
4396
4397 return 1;
4398
4399 default:
4400 return 0;
4401 }
4402 }
4403
4404 /* Return 1 if EXP contains mostly (3/4) zeros. */
4405
4406 int
4407 mostly_zeros_p (tree exp)
4408 {
4409 if (TREE_CODE (exp) == CONSTRUCTOR)
4410 {
4411 int elts = 0, zeros = 0;
4412 tree elt = CONSTRUCTOR_ELTS (exp);
4413 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4414 {
4415 /* If there are no ranges of true bits, it is all zero. */
4416 return elt == NULL_TREE;
4417 }
4418 for (; elt; elt = TREE_CHAIN (elt))
4419 {
4420 /* We do not handle the case where the index is a RANGE_EXPR,
4421 so the statistic will be somewhat inaccurate.
4422 We do make a more accurate count in store_constructor itself,
4423 so since this function is only used for nested array elements,
4424 this should be close enough. */
4425 if (mostly_zeros_p (TREE_VALUE (elt)))
4426 zeros++;
4427 elts++;
4428 }
4429
4430 return 4 * zeros >= 3 * elts;
4431 }
4432
4433 return is_zeros_p (exp);
4434 }
4435 \f
4436 /* Helper function for store_constructor.
4437 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4438 TYPE is the type of the CONSTRUCTOR, not the element type.
4439 CLEARED is as for store_constructor.
4440 ALIAS_SET is the alias set to use for any stores.
4441
4442 This provides a recursive shortcut back to store_constructor when it isn't
4443 necessary to go through store_field. This is so that we can pass through
4444 the cleared field to let store_constructor know that we may not have to
4445 clear a substructure if the outer structure has already been cleared. */
4446
4447 static void
4448 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4449 HOST_WIDE_INT bitpos, enum machine_mode mode,
4450 tree exp, tree type, int cleared, int alias_set)
4451 {
4452 if (TREE_CODE (exp) == CONSTRUCTOR
4453 && bitpos % BITS_PER_UNIT == 0
4454 /* If we have a nonzero bitpos for a register target, then we just
4455 let store_field do the bitfield handling. This is unlikely to
4456 generate unnecessary clear instructions anyways. */
4457 && (bitpos == 0 || GET_CODE (target) == MEM))
4458 {
4459 if (GET_CODE (target) == MEM)
4460 target
4461 = adjust_address (target,
4462 GET_MODE (target) == BLKmode
4463 || 0 != (bitpos
4464 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4465 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4466
4467
4468 /* Update the alias set, if required. */
4469 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4470 && MEM_ALIAS_SET (target) != 0)
4471 {
4472 target = copy_rtx (target);
4473 set_mem_alias_set (target, alias_set);
4474 }
4475
4476 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4477 }
4478 else
4479 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4480 alias_set);
4481 }
4482
4483 /* Store the value of constructor EXP into the rtx TARGET.
4484 TARGET is either a REG or a MEM; we know it cannot conflict, since
4485 safe_from_p has been called.
4486 CLEARED is true if TARGET is known to have been zero'd.
4487 SIZE is the number of bytes of TARGET we are allowed to modify: this
4488 may not be the same as the size of EXP if we are assigning to a field
4489 which has been packed to exclude padding bits. */
4490
4491 static void
4492 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4493 {
4494 tree type = TREE_TYPE (exp);
4495 #ifdef WORD_REGISTER_OPERATIONS
4496 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4497 #endif
4498
4499 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4500 || TREE_CODE (type) == QUAL_UNION_TYPE)
4501 {
4502 tree elt;
4503
4504 /* If size is zero or the target is already cleared, do nothing. */
4505 if (size == 0 || cleared)
4506 cleared = 1;
4507 /* We either clear the aggregate or indicate the value is dead. */
4508 else if ((TREE_CODE (type) == UNION_TYPE
4509 || TREE_CODE (type) == QUAL_UNION_TYPE)
4510 && ! CONSTRUCTOR_ELTS (exp))
4511 /* If the constructor is empty, clear the union. */
4512 {
4513 clear_storage (target, expr_size (exp));
4514 cleared = 1;
4515 }
4516
4517 /* If we are building a static constructor into a register,
4518 set the initial value as zero so we can fold the value into
4519 a constant. But if more than one register is involved,
4520 this probably loses. */
4521 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4522 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4523 {
4524 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4525 cleared = 1;
4526 }
4527
4528 /* If the constructor has fewer fields than the structure
4529 or if we are initializing the structure to mostly zeros,
4530 clear the whole structure first. Don't do this if TARGET is a
4531 register whose mode size isn't equal to SIZE since clear_storage
4532 can't handle this case. */
4533 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4534 || mostly_zeros_p (exp))
4535 && (GET_CODE (target) != REG
4536 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4537 == size)))
4538 {
4539 rtx xtarget = target;
4540
4541 if (readonly_fields_p (type))
4542 {
4543 xtarget = copy_rtx (xtarget);
4544 RTX_UNCHANGING_P (xtarget) = 1;
4545 }
4546
4547 clear_storage (xtarget, GEN_INT (size));
4548 cleared = 1;
4549 }
4550
4551 if (! cleared)
4552 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4553
4554 /* Store each element of the constructor into
4555 the corresponding field of TARGET. */
4556
4557 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4558 {
4559 tree field = TREE_PURPOSE (elt);
4560 tree value = TREE_VALUE (elt);
4561 enum machine_mode mode;
4562 HOST_WIDE_INT bitsize;
4563 HOST_WIDE_INT bitpos = 0;
4564 tree offset;
4565 rtx to_rtx = target;
4566
4567 /* Just ignore missing fields.
4568 We cleared the whole structure, above,
4569 if any fields are missing. */
4570 if (field == 0)
4571 continue;
4572
4573 if (cleared && is_zeros_p (value))
4574 continue;
4575
4576 if (host_integerp (DECL_SIZE (field), 1))
4577 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4578 else
4579 bitsize = -1;
4580
4581 mode = DECL_MODE (field);
4582 if (DECL_BIT_FIELD (field))
4583 mode = VOIDmode;
4584
4585 offset = DECL_FIELD_OFFSET (field);
4586 if (host_integerp (offset, 0)
4587 && host_integerp (bit_position (field), 0))
4588 {
4589 bitpos = int_bit_position (field);
4590 offset = 0;
4591 }
4592 else
4593 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4594
4595 if (offset)
4596 {
4597 rtx offset_rtx;
4598
4599 if (CONTAINS_PLACEHOLDER_P (offset))
4600 offset = build (WITH_RECORD_EXPR, sizetype,
4601 offset, make_tree (TREE_TYPE (exp), target));
4602
4603 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4604 if (GET_CODE (to_rtx) != MEM)
4605 abort ();
4606
4607 #ifdef POINTERS_EXTEND_UNSIGNED
4608 if (GET_MODE (offset_rtx) != Pmode)
4609 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4610 #else
4611 if (GET_MODE (offset_rtx) != ptr_mode)
4612 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4613 #endif
4614
4615 to_rtx = offset_address (to_rtx, offset_rtx,
4616 highest_pow2_factor (offset));
4617 }
4618
4619 /* If the constructor has been cleared, setting RTX_UNCHANGING_P
4620 on the MEM might lead to scheduling the clearing after the
4621 store. */
4622 if (TREE_READONLY (field) && !cleared)
4623 {
4624 if (GET_CODE (to_rtx) == MEM)
4625 to_rtx = copy_rtx (to_rtx);
4626
4627 RTX_UNCHANGING_P (to_rtx) = 1;
4628 }
4629
4630 #ifdef WORD_REGISTER_OPERATIONS
4631 /* If this initializes a field that is smaller than a word, at the
4632 start of a word, try to widen it to a full word.
4633 This special case allows us to output C++ member function
4634 initializations in a form that the optimizers can understand. */
4635 if (GET_CODE (target) == REG
4636 && bitsize < BITS_PER_WORD
4637 && bitpos % BITS_PER_WORD == 0
4638 && GET_MODE_CLASS (mode) == MODE_INT
4639 && TREE_CODE (value) == INTEGER_CST
4640 && exp_size >= 0
4641 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4642 {
4643 tree type = TREE_TYPE (value);
4644
4645 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4646 {
4647 type = (*lang_hooks.types.type_for_size)
4648 (BITS_PER_WORD, TREE_UNSIGNED (type));
4649 value = convert (type, value);
4650 }
4651
4652 if (BYTES_BIG_ENDIAN)
4653 value
4654 = fold (build (LSHIFT_EXPR, type, value,
4655 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4656 bitsize = BITS_PER_WORD;
4657 mode = word_mode;
4658 }
4659 #endif
4660
4661 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4662 && DECL_NONADDRESSABLE_P (field))
4663 {
4664 to_rtx = copy_rtx (to_rtx);
4665 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4666 }
4667
4668 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4669 value, type, cleared,
4670 get_alias_set (TREE_TYPE (field)));
4671 }
4672 }
4673 else if (TREE_CODE (type) == ARRAY_TYPE
4674 || TREE_CODE (type) == VECTOR_TYPE)
4675 {
4676 tree elt;
4677 int i;
4678 int need_to_clear;
4679 tree domain = TYPE_DOMAIN (type);
4680 tree elttype = TREE_TYPE (type);
4681 int const_bounds_p;
4682 HOST_WIDE_INT minelt = 0;
4683 HOST_WIDE_INT maxelt = 0;
4684 int icode = 0;
4685 rtx *vector = NULL;
4686 int elt_size = 0;
4687 unsigned n_elts = 0;
4688
4689 /* Vectors are like arrays, but the domain is stored via an array
4690 type indirectly. */
4691 if (TREE_CODE (type) == VECTOR_TYPE)
4692 {
4693 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4694 the same field as TYPE_DOMAIN, we are not guaranteed that
4695 it always will. */
4696 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4697 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4698 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4699 {
4700 enum machine_mode mode = GET_MODE (target);
4701
4702 icode = (int) vec_init_optab->handlers[mode].insn_code;
4703 if (icode != CODE_FOR_nothing)
4704 {
4705 unsigned int i;
4706
4707 elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4708 n_elts = (GET_MODE_SIZE (mode) / elt_size);
4709 vector = alloca (n_elts);
4710 for (i = 0; i < n_elts; i++)
4711 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4712 }
4713 }
4714 }
4715
4716 const_bounds_p = (TYPE_MIN_VALUE (domain)
4717 && TYPE_MAX_VALUE (domain)
4718 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4719 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4720
4721 /* If we have constant bounds for the range of the type, get them. */
4722 if (const_bounds_p)
4723 {
4724 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4725 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4726 }
4727
4728 /* If the constructor has fewer elements than the array,
4729 clear the whole array first. Similarly if this is
4730 static constructor of a non-BLKmode object. */
4731 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4732 need_to_clear = 1;
4733 else
4734 {
4735 HOST_WIDE_INT count = 0, zero_count = 0;
4736 need_to_clear = ! const_bounds_p;
4737
4738 /* This loop is a more accurate version of the loop in
4739 mostly_zeros_p (it handles RANGE_EXPR in an index).
4740 It is also needed to check for missing elements. */
4741 for (elt = CONSTRUCTOR_ELTS (exp);
4742 elt != NULL_TREE && ! need_to_clear;
4743 elt = TREE_CHAIN (elt))
4744 {
4745 tree index = TREE_PURPOSE (elt);
4746 HOST_WIDE_INT this_node_count;
4747
4748 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4749 {
4750 tree lo_index = TREE_OPERAND (index, 0);
4751 tree hi_index = TREE_OPERAND (index, 1);
4752
4753 if (! host_integerp (lo_index, 1)
4754 || ! host_integerp (hi_index, 1))
4755 {
4756 need_to_clear = 1;
4757 break;
4758 }
4759
4760 this_node_count = (tree_low_cst (hi_index, 1)
4761 - tree_low_cst (lo_index, 1) + 1);
4762 }
4763 else
4764 this_node_count = 1;
4765
4766 count += this_node_count;
4767 if (mostly_zeros_p (TREE_VALUE (elt)))
4768 zero_count += this_node_count;
4769 }
4770
4771 /* Clear the entire array first if there are any missing elements,
4772 or if the incidence of zero elements is >= 75%. */
4773 if (! need_to_clear
4774 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4775 need_to_clear = 1;
4776 }
4777
4778 if (need_to_clear && size > 0 && !vector)
4779 {
4780 if (! cleared)
4781 {
4782 if (REG_P (target))
4783 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4784 else
4785 clear_storage (target, GEN_INT (size));
4786 }
4787 cleared = 1;
4788 }
4789 else if (REG_P (target))
4790 /* Inform later passes that the old value is dead. */
4791 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4792
4793 /* Store each element of the constructor into
4794 the corresponding element of TARGET, determined
4795 by counting the elements. */
4796 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4797 elt;
4798 elt = TREE_CHAIN (elt), i++)
4799 {
4800 enum machine_mode mode;
4801 HOST_WIDE_INT bitsize;
4802 HOST_WIDE_INT bitpos;
4803 int unsignedp;
4804 tree value = TREE_VALUE (elt);
4805 tree index = TREE_PURPOSE (elt);
4806 rtx xtarget = target;
4807
4808 if (cleared && is_zeros_p (value))
4809 continue;
4810
4811 unsignedp = TREE_UNSIGNED (elttype);
4812 mode = TYPE_MODE (elttype);
4813 if (mode == BLKmode)
4814 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4815 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4816 : -1);
4817 else
4818 bitsize = GET_MODE_BITSIZE (mode);
4819
4820 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4821 {
4822 tree lo_index = TREE_OPERAND (index, 0);
4823 tree hi_index = TREE_OPERAND (index, 1);
4824 rtx index_r, pos_rtx, loop_end;
4825 struct nesting *loop;
4826 HOST_WIDE_INT lo, hi, count;
4827 tree position;
4828
4829 if (vector)
4830 abort ();
4831
4832 /* If the range is constant and "small", unroll the loop. */
4833 if (const_bounds_p
4834 && host_integerp (lo_index, 0)
4835 && host_integerp (hi_index, 0)
4836 && (lo = tree_low_cst (lo_index, 0),
4837 hi = tree_low_cst (hi_index, 0),
4838 count = hi - lo + 1,
4839 (GET_CODE (target) != MEM
4840 || count <= 2
4841 || (host_integerp (TYPE_SIZE (elttype), 1)
4842 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4843 <= 40 * 8)))))
4844 {
4845 lo -= minelt; hi -= minelt;
4846 for (; lo <= hi; lo++)
4847 {
4848 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4849
4850 if (GET_CODE (target) == MEM
4851 && !MEM_KEEP_ALIAS_SET_P (target)
4852 && TREE_CODE (type) == ARRAY_TYPE
4853 && TYPE_NONALIASED_COMPONENT (type))
4854 {
4855 target = copy_rtx (target);
4856 MEM_KEEP_ALIAS_SET_P (target) = 1;
4857 }
4858
4859 store_constructor_field
4860 (target, bitsize, bitpos, mode, value, type, cleared,
4861 get_alias_set (elttype));
4862 }
4863 }
4864 else
4865 {
4866 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4867 loop_end = gen_label_rtx ();
4868
4869 unsignedp = TREE_UNSIGNED (domain);
4870
4871 index = build_decl (VAR_DECL, NULL_TREE, domain);
4872
4873 index_r
4874 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4875 &unsignedp, 0));
4876 SET_DECL_RTL (index, index_r);
4877 if (TREE_CODE (value) == SAVE_EXPR
4878 && SAVE_EXPR_RTL (value) == 0)
4879 {
4880 /* Make sure value gets expanded once before the
4881 loop. */
4882 expand_expr (value, const0_rtx, VOIDmode, 0);
4883 emit_queue ();
4884 }
4885 store_expr (lo_index, index_r, 0);
4886 loop = expand_start_loop (0);
4887
4888 /* Assign value to element index. */
4889 position
4890 = convert (ssizetype,
4891 fold (build (MINUS_EXPR, TREE_TYPE (index),
4892 index, TYPE_MIN_VALUE (domain))));
4893 position = size_binop (MULT_EXPR, position,
4894 convert (ssizetype,
4895 TYPE_SIZE_UNIT (elttype)));
4896
4897 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4898 xtarget = offset_address (target, pos_rtx,
4899 highest_pow2_factor (position));
4900 xtarget = adjust_address (xtarget, mode, 0);
4901 if (TREE_CODE (value) == CONSTRUCTOR)
4902 store_constructor (value, xtarget, cleared,
4903 bitsize / BITS_PER_UNIT);
4904 else
4905 store_expr (value, xtarget, 0);
4906
4907 expand_exit_loop_if_false (loop,
4908 build (LT_EXPR, integer_type_node,
4909 index, hi_index));
4910
4911 expand_increment (build (PREINCREMENT_EXPR,
4912 TREE_TYPE (index),
4913 index, integer_one_node), 0, 0);
4914 expand_end_loop ();
4915 emit_label (loop_end);
4916 }
4917 }
4918 else if ((index != 0 && ! host_integerp (index, 0))
4919 || ! host_integerp (TYPE_SIZE (elttype), 1))
4920 {
4921 tree position;
4922
4923 if (vector)
4924 abort ();
4925
4926 if (index == 0)
4927 index = ssize_int (1);
4928
4929 if (minelt)
4930 index = convert (ssizetype,
4931 fold (build (MINUS_EXPR, index,
4932 TYPE_MIN_VALUE (domain))));
4933
4934 position = size_binop (MULT_EXPR, index,
4935 convert (ssizetype,
4936 TYPE_SIZE_UNIT (elttype)));
4937 xtarget = offset_address (target,
4938 expand_expr (position, 0, VOIDmode, 0),
4939 highest_pow2_factor (position));
4940 xtarget = adjust_address (xtarget, mode, 0);
4941 store_expr (value, xtarget, 0);
4942 }
4943 else if (vector)
4944 {
4945 int pos;
4946
4947 if (index != 0)
4948 pos = tree_low_cst (index, 0) - minelt;
4949 else
4950 pos = i;
4951 vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4952 }
4953 else
4954 {
4955 if (index != 0)
4956 bitpos = ((tree_low_cst (index, 0) - minelt)
4957 * tree_low_cst (TYPE_SIZE (elttype), 1));
4958 else
4959 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4960
4961 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4962 && TREE_CODE (type) == ARRAY_TYPE
4963 && TYPE_NONALIASED_COMPONENT (type))
4964 {
4965 target = copy_rtx (target);
4966 MEM_KEEP_ALIAS_SET_P (target) = 1;
4967 }
4968 store_constructor_field (target, bitsize, bitpos, mode, value,
4969 type, cleared, get_alias_set (elttype));
4970 }
4971 }
4972 if (vector)
4973 {
4974 emit_insn (GEN_FCN (icode) (target,
4975 gen_rtx_PARALLEL (GET_MODE (target),
4976 gen_rtvec_v (n_elts, vector))));
4977 }
4978 }
4979
4980 /* Set constructor assignments. */
4981 else if (TREE_CODE (type) == SET_TYPE)
4982 {
4983 tree elt = CONSTRUCTOR_ELTS (exp);
4984 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4985 tree domain = TYPE_DOMAIN (type);
4986 tree domain_min, domain_max, bitlength;
4987
4988 /* The default implementation strategy is to extract the constant
4989 parts of the constructor, use that to initialize the target,
4990 and then "or" in whatever non-constant ranges we need in addition.
4991
4992 If a large set is all zero or all ones, it is
4993 probably better to set it using memset (if available) or bzero.
4994 Also, if a large set has just a single range, it may also be
4995 better to first clear all the first clear the set (using
4996 bzero/memset), and set the bits we want. */
4997
4998 /* Check for all zeros. */
4999 if (elt == NULL_TREE && size > 0)
5000 {
5001 if (!cleared)
5002 clear_storage (target, GEN_INT (size));
5003 return;
5004 }
5005
5006 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5007 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5008 bitlength = size_binop (PLUS_EXPR,
5009 size_diffop (domain_max, domain_min),
5010 ssize_int (1));
5011
5012 nbits = tree_low_cst (bitlength, 1);
5013
5014 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5015 are "complicated" (more than one range), initialize (the
5016 constant parts) by copying from a constant. */
5017 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5018 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5019 {
5020 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5021 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5022 char *bit_buffer = alloca (nbits);
5023 HOST_WIDE_INT word = 0;
5024 unsigned int bit_pos = 0;
5025 unsigned int ibit = 0;
5026 unsigned int offset = 0; /* In bytes from beginning of set. */
5027
5028 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5029 for (;;)
5030 {
5031 if (bit_buffer[ibit])
5032 {
5033 if (BYTES_BIG_ENDIAN)
5034 word |= (1 << (set_word_size - 1 - bit_pos));
5035 else
5036 word |= 1 << bit_pos;
5037 }
5038
5039 bit_pos++; ibit++;
5040 if (bit_pos >= set_word_size || ibit == nbits)
5041 {
5042 if (word != 0 || ! cleared)
5043 {
5044 rtx datum = GEN_INT (word);
5045 rtx to_rtx;
5046
5047 /* The assumption here is that it is safe to use
5048 XEXP if the set is multi-word, but not if
5049 it's single-word. */
5050 if (GET_CODE (target) == MEM)
5051 to_rtx = adjust_address (target, mode, offset);
5052 else if (offset == 0)
5053 to_rtx = target;
5054 else
5055 abort ();
5056 emit_move_insn (to_rtx, datum);
5057 }
5058
5059 if (ibit == nbits)
5060 break;
5061 word = 0;
5062 bit_pos = 0;
5063 offset += set_word_size / BITS_PER_UNIT;
5064 }
5065 }
5066 }
5067 else if (!cleared)
5068 /* Don't bother clearing storage if the set is all ones. */
5069 if (TREE_CHAIN (elt) != NULL_TREE
5070 || (TREE_PURPOSE (elt) == NULL_TREE
5071 ? nbits != 1
5072 : ( ! host_integerp (TREE_VALUE (elt), 0)
5073 || ! host_integerp (TREE_PURPOSE (elt), 0)
5074 || (tree_low_cst (TREE_VALUE (elt), 0)
5075 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5076 != (HOST_WIDE_INT) nbits))))
5077 clear_storage (target, expr_size (exp));
5078
5079 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5080 {
5081 /* Start of range of element or NULL. */
5082 tree startbit = TREE_PURPOSE (elt);
5083 /* End of range of element, or element value. */
5084 tree endbit = TREE_VALUE (elt);
5085 HOST_WIDE_INT startb, endb;
5086 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5087
5088 bitlength_rtx = expand_expr (bitlength,
5089 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5090
5091 /* Handle non-range tuple element like [ expr ]. */
5092 if (startbit == NULL_TREE)
5093 {
5094 startbit = save_expr (endbit);
5095 endbit = startbit;
5096 }
5097
5098 startbit = convert (sizetype, startbit);
5099 endbit = convert (sizetype, endbit);
5100 if (! integer_zerop (domain_min))
5101 {
5102 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5103 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5104 }
5105 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5106 EXPAND_CONST_ADDRESS);
5107 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5108 EXPAND_CONST_ADDRESS);
5109
5110 if (REG_P (target))
5111 {
5112 targetx
5113 = assign_temp
5114 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5115 (GET_MODE (target), 0),
5116 TYPE_QUAL_CONST)),
5117 0, 1, 1);
5118 emit_move_insn (targetx, target);
5119 }
5120
5121 else if (GET_CODE (target) == MEM)
5122 targetx = target;
5123 else
5124 abort ();
5125
5126 /* Optimization: If startbit and endbit are constants divisible
5127 by BITS_PER_UNIT, call memset instead. */
5128 if (TARGET_MEM_FUNCTIONS
5129 && TREE_CODE (startbit) == INTEGER_CST
5130 && TREE_CODE (endbit) == INTEGER_CST
5131 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5132 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5133 {
5134 emit_library_call (memset_libfunc, LCT_NORMAL,
5135 VOIDmode, 3,
5136 plus_constant (XEXP (targetx, 0),
5137 startb / BITS_PER_UNIT),
5138 Pmode,
5139 constm1_rtx, TYPE_MODE (integer_type_node),
5140 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5141 TYPE_MODE (sizetype));
5142 }
5143 else
5144 emit_library_call (setbits_libfunc, LCT_NORMAL,
5145 VOIDmode, 4, XEXP (targetx, 0),
5146 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5147 startbit_rtx, TYPE_MODE (sizetype),
5148 endbit_rtx, TYPE_MODE (sizetype));
5149
5150 if (REG_P (target))
5151 emit_move_insn (target, targetx);
5152 }
5153 }
5154
5155 else
5156 abort ();
5157 }
5158
5159 /* Store the value of EXP (an expression tree)
5160 into a subfield of TARGET which has mode MODE and occupies
5161 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5162 If MODE is VOIDmode, it means that we are storing into a bit-field.
5163
5164 If VALUE_MODE is VOIDmode, return nothing in particular.
5165 UNSIGNEDP is not used in this case.
5166
5167 Otherwise, return an rtx for the value stored. This rtx
5168 has mode VALUE_MODE if that is convenient to do.
5169 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5170
5171 TYPE is the type of the underlying object,
5172
5173 ALIAS_SET is the alias set for the destination. This value will
5174 (in general) be different from that for TARGET, since TARGET is a
5175 reference to the containing structure. */
5176
5177 static rtx
5178 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5179 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5180 int unsignedp, tree type, int alias_set)
5181 {
5182 HOST_WIDE_INT width_mask = 0;
5183
5184 if (TREE_CODE (exp) == ERROR_MARK)
5185 return const0_rtx;
5186
5187 /* If we have nothing to store, do nothing unless the expression has
5188 side-effects. */
5189 if (bitsize == 0)
5190 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5191 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5192 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5193
5194 /* If we are storing into an unaligned field of an aligned union that is
5195 in a register, we may have the mode of TARGET being an integer mode but
5196 MODE == BLKmode. In that case, get an aligned object whose size and
5197 alignment are the same as TARGET and store TARGET into it (we can avoid
5198 the store if the field being stored is the entire width of TARGET). Then
5199 call ourselves recursively to store the field into a BLKmode version of
5200 that object. Finally, load from the object into TARGET. This is not
5201 very efficient in general, but should only be slightly more expensive
5202 than the otherwise-required unaligned accesses. Perhaps this can be
5203 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5204 twice, once with emit_move_insn and once via store_field. */
5205
5206 if (mode == BLKmode
5207 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5208 {
5209 rtx object = assign_temp (type, 0, 1, 1);
5210 rtx blk_object = adjust_address (object, BLKmode, 0);
5211
5212 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5213 emit_move_insn (object, target);
5214
5215 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5216 alias_set);
5217
5218 emit_move_insn (target, object);
5219
5220 /* We want to return the BLKmode version of the data. */
5221 return blk_object;
5222 }
5223
5224 if (GET_CODE (target) == CONCAT)
5225 {
5226 /* We're storing into a struct containing a single __complex. */
5227
5228 if (bitpos != 0)
5229 abort ();
5230 return store_expr (exp, target, 0);
5231 }
5232
5233 /* If the structure is in a register or if the component
5234 is a bit field, we cannot use addressing to access it.
5235 Use bit-field techniques or SUBREG to store in it. */
5236
5237 if (mode == VOIDmode
5238 || (mode != BLKmode && ! direct_store[(int) mode]
5239 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5240 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5241 || GET_CODE (target) == REG
5242 || GET_CODE (target) == SUBREG
5243 /* If the field isn't aligned enough to store as an ordinary memref,
5244 store it as a bit field. */
5245 || (mode != BLKmode
5246 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5247 || bitpos % GET_MODE_ALIGNMENT (mode))
5248 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5249 || (bitpos % BITS_PER_UNIT != 0)))
5250 /* If the RHS and field are a constant size and the size of the
5251 RHS isn't the same size as the bitfield, we must use bitfield
5252 operations. */
5253 || (bitsize >= 0
5254 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5255 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5256 {
5257 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5258
5259 /* If BITSIZE is narrower than the size of the type of EXP
5260 we will be narrowing TEMP. Normally, what's wanted are the
5261 low-order bits. However, if EXP's type is a record and this is
5262 big-endian machine, we want the upper BITSIZE bits. */
5263 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5264 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5265 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5266 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5267 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5268 - bitsize),
5269 NULL_RTX, 1);
5270
5271 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5272 MODE. */
5273 if (mode != VOIDmode && mode != BLKmode
5274 && mode != TYPE_MODE (TREE_TYPE (exp)))
5275 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5276
5277 /* If the modes of TARGET and TEMP are both BLKmode, both
5278 must be in memory and BITPOS must be aligned on a byte
5279 boundary. If so, we simply do a block copy. */
5280 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5281 {
5282 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5283 || bitpos % BITS_PER_UNIT != 0)
5284 abort ();
5285
5286 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5287 emit_block_move (target, temp,
5288 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5289 / BITS_PER_UNIT),
5290 BLOCK_OP_NORMAL);
5291
5292 return value_mode == VOIDmode ? const0_rtx : target;
5293 }
5294
5295 /* Store the value in the bitfield. */
5296 store_bit_field (target, bitsize, bitpos, mode, temp,
5297 int_size_in_bytes (type));
5298
5299 if (value_mode != VOIDmode)
5300 {
5301 /* The caller wants an rtx for the value.
5302 If possible, avoid refetching from the bitfield itself. */
5303 if (width_mask != 0
5304 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5305 {
5306 tree count;
5307 enum machine_mode tmode;
5308
5309 tmode = GET_MODE (temp);
5310 if (tmode == VOIDmode)
5311 tmode = value_mode;
5312
5313 if (unsignedp)
5314 return expand_and (tmode, temp,
5315 gen_int_mode (width_mask, tmode),
5316 NULL_RTX);
5317
5318 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5319 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5320 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5321 }
5322
5323 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5324 NULL_RTX, value_mode, VOIDmode,
5325 int_size_in_bytes (type));
5326 }
5327 return const0_rtx;
5328 }
5329 else
5330 {
5331 rtx addr = XEXP (target, 0);
5332 rtx to_rtx = target;
5333
5334 /* If a value is wanted, it must be the lhs;
5335 so make the address stable for multiple use. */
5336
5337 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5338 && ! CONSTANT_ADDRESS_P (addr)
5339 /* A frame-pointer reference is already stable. */
5340 && ! (GET_CODE (addr) == PLUS
5341 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5342 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5343 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5344 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5345
5346 /* Now build a reference to just the desired component. */
5347
5348 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5349
5350 if (to_rtx == target)
5351 to_rtx = copy_rtx (to_rtx);
5352
5353 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5354 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5355 set_mem_alias_set (to_rtx, alias_set);
5356
5357 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5358 }
5359 }
5360 \f
5361 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5362 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5363 codes and find the ultimate containing object, which we return.
5364
5365 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5366 bit position, and *PUNSIGNEDP to the signedness of the field.
5367 If the position of the field is variable, we store a tree
5368 giving the variable offset (in units) in *POFFSET.
5369 This offset is in addition to the bit position.
5370 If the position is not variable, we store 0 in *POFFSET.
5371
5372 If any of the extraction expressions is volatile,
5373 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5374
5375 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5376 is a mode that can be used to access the field. In that case, *PBITSIZE
5377 is redundant.
5378
5379 If the field describes a variable-sized object, *PMODE is set to
5380 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5381 this case, but the address of the object can be found. */
5382
5383 tree
5384 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5385 HOST_WIDE_INT *pbitpos, tree *poffset,
5386 enum machine_mode *pmode, int *punsignedp,
5387 int *pvolatilep)
5388 {
5389 tree size_tree = 0;
5390 enum machine_mode mode = VOIDmode;
5391 tree offset = size_zero_node;
5392 tree bit_offset = bitsize_zero_node;
5393 tree placeholder_ptr = 0;
5394 tree tem;
5395
5396 /* First get the mode, signedness, and size. We do this from just the
5397 outermost expression. */
5398 if (TREE_CODE (exp) == COMPONENT_REF)
5399 {
5400 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5401 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5402 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5403
5404 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5405 }
5406 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5407 {
5408 size_tree = TREE_OPERAND (exp, 1);
5409 *punsignedp = TREE_UNSIGNED (exp);
5410 }
5411 else
5412 {
5413 mode = TYPE_MODE (TREE_TYPE (exp));
5414 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5415
5416 if (mode == BLKmode)
5417 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5418 else
5419 *pbitsize = GET_MODE_BITSIZE (mode);
5420 }
5421
5422 if (size_tree != 0)
5423 {
5424 if (! host_integerp (size_tree, 1))
5425 mode = BLKmode, *pbitsize = -1;
5426 else
5427 *pbitsize = tree_low_cst (size_tree, 1);
5428 }
5429
5430 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5431 and find the ultimate containing object. */
5432 while (1)
5433 {
5434 if (TREE_CODE (exp) == BIT_FIELD_REF)
5435 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5436 else if (TREE_CODE (exp) == COMPONENT_REF)
5437 {
5438 tree field = TREE_OPERAND (exp, 1);
5439 tree this_offset = DECL_FIELD_OFFSET (field);
5440
5441 /* If this field hasn't been filled in yet, don't go
5442 past it. This should only happen when folding expressions
5443 made during type construction. */
5444 if (this_offset == 0)
5445 break;
5446 else if (CONTAINS_PLACEHOLDER_P (this_offset))
5447 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5448
5449 offset = size_binop (PLUS_EXPR, offset, this_offset);
5450 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5451 DECL_FIELD_BIT_OFFSET (field));
5452
5453 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5454 }
5455
5456 else if (TREE_CODE (exp) == ARRAY_REF
5457 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5458 {
5459 tree index = TREE_OPERAND (exp, 1);
5460 tree array = TREE_OPERAND (exp, 0);
5461 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5462 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5463 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5464
5465 /* We assume all arrays have sizes that are a multiple of a byte.
5466 First subtract the lower bound, if any, in the type of the
5467 index, then convert to sizetype and multiply by the size of the
5468 array element. */
5469 if (low_bound != 0 && ! integer_zerop (low_bound))
5470 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5471 index, low_bound));
5472
5473 /* If the index has a self-referential type, pass it to a
5474 WITH_RECORD_EXPR; if the component size is, pass our
5475 component to one. */
5476 if (CONTAINS_PLACEHOLDER_P (index))
5477 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5478 if (CONTAINS_PLACEHOLDER_P (unit_size))
5479 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5480
5481 offset = size_binop (PLUS_EXPR, offset,
5482 size_binop (MULT_EXPR,
5483 convert (sizetype, index),
5484 unit_size));
5485 }
5486
5487 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5488 {
5489 tree new = find_placeholder (exp, &placeholder_ptr);
5490
5491 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5492 We might have been called from tree optimization where we
5493 haven't set up an object yet. */
5494 if (new == 0)
5495 break;
5496 else
5497 exp = new;
5498
5499 continue;
5500 }
5501
5502 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5503 conversions that don't change the mode, and all view conversions
5504 except those that need to "step up" the alignment. */
5505 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5506 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5507 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5508 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5509 && STRICT_ALIGNMENT
5510 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5511 < BIGGEST_ALIGNMENT)
5512 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5513 || TYPE_ALIGN_OK (TREE_TYPE
5514 (TREE_OPERAND (exp, 0))))))
5515 && ! ((TREE_CODE (exp) == NOP_EXPR
5516 || TREE_CODE (exp) == CONVERT_EXPR)
5517 && (TYPE_MODE (TREE_TYPE (exp))
5518 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5519 break;
5520
5521 /* If any reference in the chain is volatile, the effect is volatile. */
5522 if (TREE_THIS_VOLATILE (exp))
5523 *pvolatilep = 1;
5524
5525 exp = TREE_OPERAND (exp, 0);
5526 }
5527
5528 /* If OFFSET is constant, see if we can return the whole thing as a
5529 constant bit position. Otherwise, split it up. */
5530 if (host_integerp (offset, 0)
5531 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5532 bitsize_unit_node))
5533 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5534 && host_integerp (tem, 0))
5535 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5536 else
5537 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5538
5539 *pmode = mode;
5540 return exp;
5541 }
5542
5543 /* Return 1 if T is an expression that get_inner_reference handles. */
5544
5545 int
5546 handled_component_p (tree t)
5547 {
5548 switch (TREE_CODE (t))
5549 {
5550 case BIT_FIELD_REF:
5551 case COMPONENT_REF:
5552 case ARRAY_REF:
5553 case ARRAY_RANGE_REF:
5554 case NON_LVALUE_EXPR:
5555 case VIEW_CONVERT_EXPR:
5556 return 1;
5557
5558 /* ??? Sure they are handled, but get_inner_reference may return
5559 a different PBITSIZE, depending upon whether the expression is
5560 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5561 case NOP_EXPR:
5562 case CONVERT_EXPR:
5563 return (TYPE_MODE (TREE_TYPE (t))
5564 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5565
5566 default:
5567 return 0;
5568 }
5569 }
5570 \f
5571 /* Given an rtx VALUE that may contain additions and multiplications, return
5572 an equivalent value that just refers to a register, memory, or constant.
5573 This is done by generating instructions to perform the arithmetic and
5574 returning a pseudo-register containing the value.
5575
5576 The returned value may be a REG, SUBREG, MEM or constant. */
5577
5578 rtx
5579 force_operand (rtx value, rtx target)
5580 {
5581 rtx op1, op2;
5582 /* Use subtarget as the target for operand 0 of a binary operation. */
5583 rtx subtarget = get_subtarget (target);
5584 enum rtx_code code = GET_CODE (value);
5585
5586 /* Check for a PIC address load. */
5587 if ((code == PLUS || code == MINUS)
5588 && XEXP (value, 0) == pic_offset_table_rtx
5589 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5590 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5591 || GET_CODE (XEXP (value, 1)) == CONST))
5592 {
5593 if (!subtarget)
5594 subtarget = gen_reg_rtx (GET_MODE (value));
5595 emit_move_insn (subtarget, value);
5596 return subtarget;
5597 }
5598
5599 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5600 {
5601 if (!target)
5602 target = gen_reg_rtx (GET_MODE (value));
5603 convert_move (target, force_operand (XEXP (value, 0), NULL),
5604 code == ZERO_EXTEND);
5605 return target;
5606 }
5607
5608 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5609 {
5610 op2 = XEXP (value, 1);
5611 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5612 subtarget = 0;
5613 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5614 {
5615 code = PLUS;
5616 op2 = negate_rtx (GET_MODE (value), op2);
5617 }
5618
5619 /* Check for an addition with OP2 a constant integer and our first
5620 operand a PLUS of a virtual register and something else. In that
5621 case, we want to emit the sum of the virtual register and the
5622 constant first and then add the other value. This allows virtual
5623 register instantiation to simply modify the constant rather than
5624 creating another one around this addition. */
5625 if (code == PLUS && GET_CODE (op2) == CONST_INT
5626 && GET_CODE (XEXP (value, 0)) == PLUS
5627 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5628 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5629 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5630 {
5631 rtx temp = expand_simple_binop (GET_MODE (value), code,
5632 XEXP (XEXP (value, 0), 0), op2,
5633 subtarget, 0, OPTAB_LIB_WIDEN);
5634 return expand_simple_binop (GET_MODE (value), code, temp,
5635 force_operand (XEXP (XEXP (value,
5636 0), 1), 0),
5637 target, 0, OPTAB_LIB_WIDEN);
5638 }
5639
5640 op1 = force_operand (XEXP (value, 0), subtarget);
5641 op2 = force_operand (op2, NULL_RTX);
5642 switch (code)
5643 {
5644 case MULT:
5645 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5646 case DIV:
5647 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5648 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5649 target, 1, OPTAB_LIB_WIDEN);
5650 else
5651 return expand_divmod (0,
5652 FLOAT_MODE_P (GET_MODE (value))
5653 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5654 GET_MODE (value), op1, op2, target, 0);
5655 break;
5656 case MOD:
5657 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5658 target, 0);
5659 break;
5660 case UDIV:
5661 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5662 target, 1);
5663 break;
5664 case UMOD:
5665 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5666 target, 1);
5667 break;
5668 case ASHIFTRT:
5669 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5670 target, 0, OPTAB_LIB_WIDEN);
5671 break;
5672 default:
5673 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5674 target, 1, OPTAB_LIB_WIDEN);
5675 }
5676 }
5677 if (GET_RTX_CLASS (code) == '1')
5678 {
5679 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5680 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5681 }
5682
5683 #ifdef INSN_SCHEDULING
5684 /* On machines that have insn scheduling, we want all memory reference to be
5685 explicit, so we need to deal with such paradoxical SUBREGs. */
5686 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5687 && (GET_MODE_SIZE (GET_MODE (value))
5688 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5689 value
5690 = simplify_gen_subreg (GET_MODE (value),
5691 force_reg (GET_MODE (SUBREG_REG (value)),
5692 force_operand (SUBREG_REG (value),
5693 NULL_RTX)),
5694 GET_MODE (SUBREG_REG (value)),
5695 SUBREG_BYTE (value));
5696 #endif
5697
5698 return value;
5699 }
5700 \f
5701 /* Subroutine of expand_expr: return nonzero iff there is no way that
5702 EXP can reference X, which is being modified. TOP_P is nonzero if this
5703 call is going to be used to determine whether we need a temporary
5704 for EXP, as opposed to a recursive call to this function.
5705
5706 It is always safe for this routine to return zero since it merely
5707 searches for optimization opportunities. */
5708
5709 int
5710 safe_from_p (rtx x, tree exp, int top_p)
5711 {
5712 rtx exp_rtl = 0;
5713 int i, nops;
5714 static tree save_expr_list;
5715
5716 if (x == 0
5717 /* If EXP has varying size, we MUST use a target since we currently
5718 have no way of allocating temporaries of variable size
5719 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5720 So we assume here that something at a higher level has prevented a
5721 clash. This is somewhat bogus, but the best we can do. Only
5722 do this when X is BLKmode and when we are at the top level. */
5723 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5724 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5725 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5726 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5727 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5728 != INTEGER_CST)
5729 && GET_MODE (x) == BLKmode)
5730 /* If X is in the outgoing argument area, it is always safe. */
5731 || (GET_CODE (x) == MEM
5732 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5733 || (GET_CODE (XEXP (x, 0)) == PLUS
5734 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5735 return 1;
5736
5737 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5738 find the underlying pseudo. */
5739 if (GET_CODE (x) == SUBREG)
5740 {
5741 x = SUBREG_REG (x);
5742 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5743 return 0;
5744 }
5745
5746 /* A SAVE_EXPR might appear many times in the expression passed to the
5747 top-level safe_from_p call, and if it has a complex subexpression,
5748 examining it multiple times could result in a combinatorial explosion.
5749 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
5750 with optimization took about 28 minutes to compile -- even though it was
5751 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5752 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5753 we have processed. Note that the only test of top_p was above. */
5754
5755 if (top_p)
5756 {
5757 int rtn;
5758 tree t;
5759
5760 save_expr_list = 0;
5761
5762 rtn = safe_from_p (x, exp, 0);
5763
5764 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5765 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5766
5767 return rtn;
5768 }
5769
5770 /* Now look at our tree code and possibly recurse. */
5771 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5772 {
5773 case 'd':
5774 exp_rtl = DECL_RTL_IF_SET (exp);
5775 break;
5776
5777 case 'c':
5778 return 1;
5779
5780 case 'x':
5781 if (TREE_CODE (exp) == TREE_LIST)
5782 {
5783 while (1)
5784 {
5785 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5786 return 0;
5787 exp = TREE_CHAIN (exp);
5788 if (!exp)
5789 return 1;
5790 if (TREE_CODE (exp) != TREE_LIST)
5791 return safe_from_p (x, exp, 0);
5792 }
5793 }
5794 else if (TREE_CODE (exp) == ERROR_MARK)
5795 return 1; /* An already-visited SAVE_EXPR? */
5796 else
5797 return 0;
5798
5799 case '2':
5800 case '<':
5801 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5802 return 0;
5803 /* Fall through. */
5804
5805 case '1':
5806 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5807
5808 case 'e':
5809 case 'r':
5810 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5811 the expression. If it is set, we conflict iff we are that rtx or
5812 both are in memory. Otherwise, we check all operands of the
5813 expression recursively. */
5814
5815 switch (TREE_CODE (exp))
5816 {
5817 case ADDR_EXPR:
5818 /* If the operand is static or we are static, we can't conflict.
5819 Likewise if we don't conflict with the operand at all. */
5820 if (staticp (TREE_OPERAND (exp, 0))
5821 || TREE_STATIC (exp)
5822 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5823 return 1;
5824
5825 /* Otherwise, the only way this can conflict is if we are taking
5826 the address of a DECL a that address if part of X, which is
5827 very rare. */
5828 exp = TREE_OPERAND (exp, 0);
5829 if (DECL_P (exp))
5830 {
5831 if (!DECL_RTL_SET_P (exp)
5832 || GET_CODE (DECL_RTL (exp)) != MEM)
5833 return 0;
5834 else
5835 exp_rtl = XEXP (DECL_RTL (exp), 0);
5836 }
5837 break;
5838
5839 case INDIRECT_REF:
5840 if (GET_CODE (x) == MEM
5841 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5842 get_alias_set (exp)))
5843 return 0;
5844 break;
5845
5846 case CALL_EXPR:
5847 /* Assume that the call will clobber all hard registers and
5848 all of memory. */
5849 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5850 || GET_CODE (x) == MEM)
5851 return 0;
5852 break;
5853
5854 case RTL_EXPR:
5855 /* If a sequence exists, we would have to scan every instruction
5856 in the sequence to see if it was safe. This is probably not
5857 worthwhile. */
5858 if (RTL_EXPR_SEQUENCE (exp))
5859 return 0;
5860
5861 exp_rtl = RTL_EXPR_RTL (exp);
5862 break;
5863
5864 case WITH_CLEANUP_EXPR:
5865 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5866 break;
5867
5868 case CLEANUP_POINT_EXPR:
5869 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5870
5871 case SAVE_EXPR:
5872 exp_rtl = SAVE_EXPR_RTL (exp);
5873 if (exp_rtl)
5874 break;
5875
5876 /* If we've already scanned this, don't do it again. Otherwise,
5877 show we've scanned it and record for clearing the flag if we're
5878 going on. */
5879 if (TREE_PRIVATE (exp))
5880 return 1;
5881
5882 TREE_PRIVATE (exp) = 1;
5883 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5884 {
5885 TREE_PRIVATE (exp) = 0;
5886 return 0;
5887 }
5888
5889 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5890 return 1;
5891
5892 case BIND_EXPR:
5893 /* The only operand we look at is operand 1. The rest aren't
5894 part of the expression. */
5895 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5896
5897 default:
5898 break;
5899 }
5900
5901 /* If we have an rtx, we do not need to scan our operands. */
5902 if (exp_rtl)
5903 break;
5904
5905 nops = first_rtl_op (TREE_CODE (exp));
5906 for (i = 0; i < nops; i++)
5907 if (TREE_OPERAND (exp, i) != 0
5908 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5909 return 0;
5910
5911 /* If this is a language-specific tree code, it may require
5912 special handling. */
5913 if ((unsigned int) TREE_CODE (exp)
5914 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5915 && !(*lang_hooks.safe_from_p) (x, exp))
5916 return 0;
5917 }
5918
5919 /* If we have an rtl, find any enclosed object. Then see if we conflict
5920 with it. */
5921 if (exp_rtl)
5922 {
5923 if (GET_CODE (exp_rtl) == SUBREG)
5924 {
5925 exp_rtl = SUBREG_REG (exp_rtl);
5926 if (GET_CODE (exp_rtl) == REG
5927 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5928 return 0;
5929 }
5930
5931 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5932 are memory and they conflict. */
5933 return ! (rtx_equal_p (x, exp_rtl)
5934 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5935 && true_dependence (exp_rtl, VOIDmode, x,
5936 rtx_addr_varies_p)));
5937 }
5938
5939 /* If we reach here, it is safe. */
5940 return 1;
5941 }
5942
5943 /* Subroutine of expand_expr: return rtx if EXP is a
5944 variable or parameter; else return 0. */
5945
5946 static rtx
5947 var_rtx (tree exp)
5948 {
5949 STRIP_NOPS (exp);
5950 switch (TREE_CODE (exp))
5951 {
5952 case PARM_DECL:
5953 case VAR_DECL:
5954 return DECL_RTL (exp);
5955 default:
5956 return 0;
5957 }
5958 }
5959 \f
5960 /* Return the highest power of two that EXP is known to be a multiple of.
5961 This is used in updating alignment of MEMs in array references. */
5962
5963 static unsigned HOST_WIDE_INT
5964 highest_pow2_factor (tree exp)
5965 {
5966 unsigned HOST_WIDE_INT c0, c1;
5967
5968 switch (TREE_CODE (exp))
5969 {
5970 case INTEGER_CST:
5971 /* We can find the lowest bit that's a one. If the low
5972 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5973 We need to handle this case since we can find it in a COND_EXPR,
5974 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5975 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5976 later ICE. */
5977 if (TREE_CONSTANT_OVERFLOW (exp))
5978 return BIGGEST_ALIGNMENT;
5979 else
5980 {
5981 /* Note: tree_low_cst is intentionally not used here,
5982 we don't care about the upper bits. */
5983 c0 = TREE_INT_CST_LOW (exp);
5984 c0 &= -c0;
5985 return c0 ? c0 : BIGGEST_ALIGNMENT;
5986 }
5987 break;
5988
5989 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5990 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5991 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5992 return MIN (c0, c1);
5993
5994 case MULT_EXPR:
5995 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5996 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5997 return c0 * c1;
5998
5999 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6000 case CEIL_DIV_EXPR:
6001 if (integer_pow2p (TREE_OPERAND (exp, 1))
6002 && host_integerp (TREE_OPERAND (exp, 1), 1))
6003 {
6004 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6005 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6006 return MAX (1, c0 / c1);
6007 }
6008 break;
6009
6010 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6011 case SAVE_EXPR: case WITH_RECORD_EXPR:
6012 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6013
6014 case COMPOUND_EXPR:
6015 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6016
6017 case COND_EXPR:
6018 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6019 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6020 return MIN (c0, c1);
6021
6022 default:
6023 break;
6024 }
6025
6026 return 1;
6027 }
6028
6029 /* Similar, except that it is known that the expression must be a multiple
6030 of the alignment of TYPE. */
6031
6032 static unsigned HOST_WIDE_INT
6033 highest_pow2_factor_for_type (tree type, tree exp)
6034 {
6035 unsigned HOST_WIDE_INT type_align, factor;
6036
6037 factor = highest_pow2_factor (exp);
6038 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6039 return MAX (factor, type_align);
6040 }
6041 \f
6042 /* Return an object on the placeholder list that matches EXP, a
6043 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6044 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6045 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6046 is a location which initially points to a starting location in the
6047 placeholder list (zero means start of the list) and where a pointer into
6048 the placeholder list at which the object is found is placed. */
6049
6050 tree
6051 find_placeholder (tree exp, tree *plist)
6052 {
6053 tree type = TREE_TYPE (exp);
6054 tree placeholder_expr;
6055
6056 for (placeholder_expr
6057 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6058 placeholder_expr != 0;
6059 placeholder_expr = TREE_CHAIN (placeholder_expr))
6060 {
6061 tree need_type = TYPE_MAIN_VARIANT (type);
6062 tree elt;
6063
6064 /* Find the outermost reference that is of the type we want. If none,
6065 see if any object has a type that is a pointer to the type we
6066 want. */
6067 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6068 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6069 || TREE_CODE (elt) == COND_EXPR)
6070 ? TREE_OPERAND (elt, 1)
6071 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6072 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6073 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6074 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6075 ? TREE_OPERAND (elt, 0) : 0))
6076 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6077 {
6078 if (plist)
6079 *plist = placeholder_expr;
6080 return elt;
6081 }
6082
6083 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6084 elt
6085 = ((TREE_CODE (elt) == COMPOUND_EXPR
6086 || TREE_CODE (elt) == COND_EXPR)
6087 ? TREE_OPERAND (elt, 1)
6088 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6089 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6090 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6091 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6092 ? TREE_OPERAND (elt, 0) : 0))
6093 if (POINTER_TYPE_P (TREE_TYPE (elt))
6094 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6095 == need_type))
6096 {
6097 if (plist)
6098 *plist = placeholder_expr;
6099 return build1 (INDIRECT_REF, need_type, elt);
6100 }
6101 }
6102
6103 return 0;
6104 }
6105
6106 /* Subroutine of expand_expr. Expand the two operands of a binary
6107 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6108 The value may be stored in TARGET if TARGET is nonzero. The
6109 MODIFIER argument is as documented by expand_expr. */
6110
6111 static void
6112 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6113 enum expand_modifier modifier)
6114 {
6115 if (! safe_from_p (target, exp1, 1))
6116 target = 0;
6117 if (operand_equal_p (exp0, exp1, 0))
6118 {
6119 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6120 *op1 = copy_rtx (*op0);
6121 }
6122 else
6123 {
6124 /* If we need to preserve evaluation order, copy exp0 into its own
6125 temporary variable so that it can't be clobbered by exp1. */
6126 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6127 exp0 = save_expr (exp0);
6128 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6129 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6130 }
6131 }
6132
6133 \f
6134 /* expand_expr: generate code for computing expression EXP.
6135 An rtx for the computed value is returned. The value is never null.
6136 In the case of a void EXP, const0_rtx is returned.
6137
6138 The value may be stored in TARGET if TARGET is nonzero.
6139 TARGET is just a suggestion; callers must assume that
6140 the rtx returned may not be the same as TARGET.
6141
6142 If TARGET is CONST0_RTX, it means that the value will be ignored.
6143
6144 If TMODE is not VOIDmode, it suggests generating the
6145 result in mode TMODE. But this is done only when convenient.
6146 Otherwise, TMODE is ignored and the value generated in its natural mode.
6147 TMODE is just a suggestion; callers must assume that
6148 the rtx returned may not have mode TMODE.
6149
6150 Note that TARGET may have neither TMODE nor MODE. In that case, it
6151 probably will not be used.
6152
6153 If MODIFIER is EXPAND_SUM then when EXP is an addition
6154 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6155 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6156 products as above, or REG or MEM, or constant.
6157 Ordinarily in such cases we would output mul or add instructions
6158 and then return a pseudo reg containing the sum.
6159
6160 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6161 it also marks a label as absolutely required (it can't be dead).
6162 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6163 This is used for outputting expressions used in initializers.
6164
6165 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6166 with a constant address even if that address is not normally legitimate.
6167 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6168
6169 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6170 a call parameter. Such targets require special care as we haven't yet
6171 marked TARGET so that it's safe from being trashed by libcalls. We
6172 don't want to use TARGET for anything but the final result;
6173 Intermediate values must go elsewhere. Additionally, calls to
6174 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6175
6176 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6177 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6178 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6179 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6180 recursively. */
6181
6182 rtx
6183 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6184 enum expand_modifier modifier, rtx *alt_rtl)
6185 {
6186 rtx op0, op1, temp;
6187 tree type = TREE_TYPE (exp);
6188 int unsignedp = TREE_UNSIGNED (type);
6189 enum machine_mode mode;
6190 enum tree_code code = TREE_CODE (exp);
6191 optab this_optab;
6192 rtx subtarget, original_target;
6193 int ignore;
6194 tree context;
6195
6196 /* Handle ERROR_MARK before anybody tries to access its type. */
6197 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6198 {
6199 op0 = CONST0_RTX (tmode);
6200 if (op0 != 0)
6201 return op0;
6202 return const0_rtx;
6203 }
6204
6205 mode = TYPE_MODE (type);
6206 /* Use subtarget as the target for operand 0 of a binary operation. */
6207 subtarget = get_subtarget (target);
6208 original_target = target;
6209 ignore = (target == const0_rtx
6210 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6211 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6212 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6213 && TREE_CODE (type) == VOID_TYPE));
6214
6215 /* If we are going to ignore this result, we need only do something
6216 if there is a side-effect somewhere in the expression. If there
6217 is, short-circuit the most common cases here. Note that we must
6218 not call expand_expr with anything but const0_rtx in case this
6219 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6220
6221 if (ignore)
6222 {
6223 if (! TREE_SIDE_EFFECTS (exp))
6224 return const0_rtx;
6225
6226 /* Ensure we reference a volatile object even if value is ignored, but
6227 don't do this if all we are doing is taking its address. */
6228 if (TREE_THIS_VOLATILE (exp)
6229 && TREE_CODE (exp) != FUNCTION_DECL
6230 && mode != VOIDmode && mode != BLKmode
6231 && modifier != EXPAND_CONST_ADDRESS)
6232 {
6233 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6234 if (GET_CODE (temp) == MEM)
6235 temp = copy_to_reg (temp);
6236 return const0_rtx;
6237 }
6238
6239 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6240 || code == INDIRECT_REF || code == BUFFER_REF)
6241 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6242 modifier);
6243
6244 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6245 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6246 {
6247 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6248 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6249 return const0_rtx;
6250 }
6251 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6252 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6253 /* If the second operand has no side effects, just evaluate
6254 the first. */
6255 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6256 modifier);
6257 else if (code == BIT_FIELD_REF)
6258 {
6259 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6260 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6261 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6262 return const0_rtx;
6263 }
6264
6265 target = 0;
6266 }
6267
6268 /* If will do cse, generate all results into pseudo registers
6269 since 1) that allows cse to find more things
6270 and 2) otherwise cse could produce an insn the machine
6271 cannot support. An exception is a CONSTRUCTOR into a multi-word
6272 MEM: that's much more likely to be most efficient into the MEM.
6273 Another is a CALL_EXPR which must return in memory. */
6274
6275 if (! cse_not_expected && mode != BLKmode && target
6276 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6277 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6278 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6279 target = 0;
6280
6281 switch (code)
6282 {
6283 case LABEL_DECL:
6284 {
6285 tree function = decl_function_context (exp);
6286 /* Labels in containing functions, or labels used from initializers,
6287 must be forced. */
6288 if (modifier == EXPAND_INITIALIZER
6289 || (function != current_function_decl
6290 && function != inline_function_decl
6291 && function != 0))
6292 temp = force_label_rtx (exp);
6293 else
6294 temp = label_rtx (exp);
6295
6296 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6297 if (function != current_function_decl
6298 && function != inline_function_decl && function != 0)
6299 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6300 return temp;
6301 }
6302
6303 case PARM_DECL:
6304 if (!DECL_RTL_SET_P (exp))
6305 {
6306 error ("%Jprior parameter's size depends on '%D'", exp, exp);
6307 return CONST0_RTX (mode);
6308 }
6309
6310 /* ... fall through ... */
6311
6312 case VAR_DECL:
6313 /* If a static var's type was incomplete when the decl was written,
6314 but the type is complete now, lay out the decl now. */
6315 if (DECL_SIZE (exp) == 0
6316 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6317 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6318 layout_decl (exp, 0);
6319
6320 /* ... fall through ... */
6321
6322 case FUNCTION_DECL:
6323 case RESULT_DECL:
6324 if (DECL_RTL (exp) == 0)
6325 abort ();
6326
6327 /* Ensure variable marked as used even if it doesn't go through
6328 a parser. If it hasn't be used yet, write out an external
6329 definition. */
6330 if (! TREE_USED (exp))
6331 {
6332 assemble_external (exp);
6333 TREE_USED (exp) = 1;
6334 }
6335
6336 /* Show we haven't gotten RTL for this yet. */
6337 temp = 0;
6338
6339 /* Handle variables inherited from containing functions. */
6340 context = decl_function_context (exp);
6341
6342 /* We treat inline_function_decl as an alias for the current function
6343 because that is the inline function whose vars, types, etc.
6344 are being merged into the current function.
6345 See expand_inline_function. */
6346
6347 if (context != 0 && context != current_function_decl
6348 && context != inline_function_decl
6349 /* If var is static, we don't need a static chain to access it. */
6350 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6351 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6352 {
6353 rtx addr;
6354
6355 /* Mark as non-local and addressable. */
6356 DECL_NONLOCAL (exp) = 1;
6357 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6358 abort ();
6359 (*lang_hooks.mark_addressable) (exp);
6360 if (GET_CODE (DECL_RTL (exp)) != MEM)
6361 abort ();
6362 addr = XEXP (DECL_RTL (exp), 0);
6363 if (GET_CODE (addr) == MEM)
6364 addr
6365 = replace_equiv_address (addr,
6366 fix_lexical_addr (XEXP (addr, 0), exp));
6367 else
6368 addr = fix_lexical_addr (addr, exp);
6369
6370 temp = replace_equiv_address (DECL_RTL (exp), addr);
6371 }
6372
6373 /* This is the case of an array whose size is to be determined
6374 from its initializer, while the initializer is still being parsed.
6375 See expand_decl. */
6376
6377 else if (GET_CODE (DECL_RTL (exp)) == MEM
6378 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6379 temp = validize_mem (DECL_RTL (exp));
6380
6381 /* If DECL_RTL is memory, we are in the normal case and either
6382 the address is not valid or it is not a register and -fforce-addr
6383 is specified, get the address into a register. */
6384
6385 else if (GET_CODE (DECL_RTL (exp)) == MEM
6386 && modifier != EXPAND_CONST_ADDRESS
6387 && modifier != EXPAND_SUM
6388 && modifier != EXPAND_INITIALIZER
6389 && (! memory_address_p (DECL_MODE (exp),
6390 XEXP (DECL_RTL (exp), 0))
6391 || (flag_force_addr
6392 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6393 {
6394 if (alt_rtl)
6395 *alt_rtl = DECL_RTL (exp);
6396 temp = replace_equiv_address (DECL_RTL (exp),
6397 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6398 }
6399
6400 /* If we got something, return it. But first, set the alignment
6401 if the address is a register. */
6402 if (temp != 0)
6403 {
6404 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6405 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6406
6407 return temp;
6408 }
6409
6410 /* If the mode of DECL_RTL does not match that of the decl, it
6411 must be a promoted value. We return a SUBREG of the wanted mode,
6412 but mark it so that we know that it was already extended. */
6413
6414 if (GET_CODE (DECL_RTL (exp)) == REG
6415 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6416 {
6417 /* Get the signedness used for this variable. Ensure we get the
6418 same mode we got when the variable was declared. */
6419 if (GET_MODE (DECL_RTL (exp))
6420 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6421 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6422 abort ();
6423
6424 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6425 SUBREG_PROMOTED_VAR_P (temp) = 1;
6426 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6427 return temp;
6428 }
6429
6430 return DECL_RTL (exp);
6431
6432 case INTEGER_CST:
6433 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6434 TREE_INT_CST_HIGH (exp), mode);
6435
6436 /* ??? If overflow is set, fold will have done an incomplete job,
6437 which can result in (plus xx (const_int 0)), which can get
6438 simplified by validate_replace_rtx during virtual register
6439 instantiation, which can result in unrecognizable insns.
6440 Avoid this by forcing all overflows into registers. */
6441 if (TREE_CONSTANT_OVERFLOW (exp)
6442 && modifier != EXPAND_INITIALIZER)
6443 temp = force_reg (mode, temp);
6444
6445 return temp;
6446
6447 case VECTOR_CST:
6448 return const_vector_from_tree (exp);
6449
6450 case CONST_DECL:
6451 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6452
6453 case REAL_CST:
6454 /* If optimized, generate immediate CONST_DOUBLE
6455 which will be turned into memory by reload if necessary.
6456
6457 We used to force a register so that loop.c could see it. But
6458 this does not allow gen_* patterns to perform optimizations with
6459 the constants. It also produces two insns in cases like "x = 1.0;".
6460 On most machines, floating-point constants are not permitted in
6461 many insns, so we'd end up copying it to a register in any case.
6462
6463 Now, we do the copying in expand_binop, if appropriate. */
6464 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6465 TYPE_MODE (TREE_TYPE (exp)));
6466
6467 case COMPLEX_CST:
6468 /* Handle evaluating a complex constant in a CONCAT target. */
6469 if (original_target && GET_CODE (original_target) == CONCAT)
6470 {
6471 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6472 rtx rtarg, itarg;
6473
6474 rtarg = XEXP (original_target, 0);
6475 itarg = XEXP (original_target, 1);
6476
6477 /* Move the real and imaginary parts separately. */
6478 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6479 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6480
6481 if (op0 != rtarg)
6482 emit_move_insn (rtarg, op0);
6483 if (op1 != itarg)
6484 emit_move_insn (itarg, op1);
6485
6486 return original_target;
6487 }
6488
6489 /* ... fall through ... */
6490
6491 case STRING_CST:
6492 temp = output_constant_def (exp, 1);
6493
6494 /* temp contains a constant address.
6495 On RISC machines where a constant address isn't valid,
6496 make some insns to get that address into a register. */
6497 if (modifier != EXPAND_CONST_ADDRESS
6498 && modifier != EXPAND_INITIALIZER
6499 && modifier != EXPAND_SUM
6500 && (! memory_address_p (mode, XEXP (temp, 0))
6501 || flag_force_addr))
6502 return replace_equiv_address (temp,
6503 copy_rtx (XEXP (temp, 0)));
6504 return temp;
6505
6506 case EXPR_WITH_FILE_LOCATION:
6507 {
6508 rtx to_return;
6509 struct file_stack fs;
6510
6511 fs.location = input_location;
6512 fs.next = expr_wfl_stack;
6513 input_filename = EXPR_WFL_FILENAME (exp);
6514 input_line = EXPR_WFL_LINENO (exp);
6515 expr_wfl_stack = &fs;
6516 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6517 emit_line_note (input_location);
6518 /* Possibly avoid switching back and forth here. */
6519 to_return = expand_expr (EXPR_WFL_NODE (exp),
6520 (ignore ? const0_rtx : target),
6521 tmode, modifier);
6522 if (expr_wfl_stack != &fs)
6523 abort ();
6524 input_location = fs.location;
6525 expr_wfl_stack = fs.next;
6526 return to_return;
6527 }
6528
6529 case SAVE_EXPR:
6530 context = decl_function_context (exp);
6531
6532 /* If this SAVE_EXPR was at global context, assume we are an
6533 initialization function and move it into our context. */
6534 if (context == 0)
6535 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6536
6537 /* We treat inline_function_decl as an alias for the current function
6538 because that is the inline function whose vars, types, etc.
6539 are being merged into the current function.
6540 See expand_inline_function. */
6541 if (context == current_function_decl || context == inline_function_decl)
6542 context = 0;
6543
6544 /* If this is non-local, handle it. */
6545 if (context)
6546 {
6547 /* The following call just exists to abort if the context is
6548 not of a containing function. */
6549 find_function_data (context);
6550
6551 temp = SAVE_EXPR_RTL (exp);
6552 if (temp && GET_CODE (temp) == REG)
6553 {
6554 put_var_into_stack (exp, /*rescan=*/true);
6555 temp = SAVE_EXPR_RTL (exp);
6556 }
6557 if (temp == 0 || GET_CODE (temp) != MEM)
6558 abort ();
6559 return
6560 replace_equiv_address (temp,
6561 fix_lexical_addr (XEXP (temp, 0), exp));
6562 }
6563 if (SAVE_EXPR_RTL (exp) == 0)
6564 {
6565 if (mode == VOIDmode)
6566 temp = const0_rtx;
6567 else
6568 temp = assign_temp (build_qualified_type (type,
6569 (TYPE_QUALS (type)
6570 | TYPE_QUAL_CONST)),
6571 3, 0, 0);
6572
6573 SAVE_EXPR_RTL (exp) = temp;
6574 if (!optimize && GET_CODE (temp) == REG)
6575 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6576 save_expr_regs);
6577
6578 /* If the mode of TEMP does not match that of the expression, it
6579 must be a promoted value. We pass store_expr a SUBREG of the
6580 wanted mode but mark it so that we know that it was already
6581 extended. */
6582
6583 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6584 {
6585 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6586 promote_mode (type, mode, &unsignedp, 0);
6587 SUBREG_PROMOTED_VAR_P (temp) = 1;
6588 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6589 }
6590
6591 if (temp == const0_rtx)
6592 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6593 else
6594 store_expr (TREE_OPERAND (exp, 0), temp,
6595 modifier == EXPAND_STACK_PARM ? 2 : 0);
6596
6597 TREE_USED (exp) = 1;
6598 }
6599
6600 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6601 must be a promoted value. We return a SUBREG of the wanted mode,
6602 but mark it so that we know that it was already extended. */
6603
6604 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6605 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6606 {
6607 /* Compute the signedness and make the proper SUBREG. */
6608 promote_mode (type, mode, &unsignedp, 0);
6609 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6610 SUBREG_PROMOTED_VAR_P (temp) = 1;
6611 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6612 return temp;
6613 }
6614
6615 return SAVE_EXPR_RTL (exp);
6616
6617 case UNSAVE_EXPR:
6618 {
6619 rtx temp;
6620 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6621 TREE_OPERAND (exp, 0)
6622 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6623 return temp;
6624 }
6625
6626 case PLACEHOLDER_EXPR:
6627 {
6628 tree old_list = placeholder_list;
6629 tree placeholder_expr = 0;
6630
6631 exp = find_placeholder (exp, &placeholder_expr);
6632 if (exp == 0)
6633 abort ();
6634
6635 placeholder_list = TREE_CHAIN (placeholder_expr);
6636 temp = expand_expr (exp, original_target, tmode, modifier);
6637 placeholder_list = old_list;
6638 return temp;
6639 }
6640
6641 case WITH_RECORD_EXPR:
6642 /* Put the object on the placeholder list, expand our first operand,
6643 and pop the list. */
6644 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6645 placeholder_list);
6646 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6647 modifier);
6648 placeholder_list = TREE_CHAIN (placeholder_list);
6649 return target;
6650
6651 case GOTO_EXPR:
6652 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6653 expand_goto (TREE_OPERAND (exp, 0));
6654 else
6655 expand_computed_goto (TREE_OPERAND (exp, 0));
6656 return const0_rtx;
6657
6658 case EXIT_EXPR:
6659 expand_exit_loop_if_false (NULL,
6660 invert_truthvalue (TREE_OPERAND (exp, 0)));
6661 return const0_rtx;
6662
6663 case LABELED_BLOCK_EXPR:
6664 if (LABELED_BLOCK_BODY (exp))
6665 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6666 /* Should perhaps use expand_label, but this is simpler and safer. */
6667 do_pending_stack_adjust ();
6668 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6669 return const0_rtx;
6670
6671 case EXIT_BLOCK_EXPR:
6672 if (EXIT_BLOCK_RETURN (exp))
6673 sorry ("returned value in block_exit_expr");
6674 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6675 return const0_rtx;
6676
6677 case LOOP_EXPR:
6678 push_temp_slots ();
6679 expand_start_loop (1);
6680 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6681 expand_end_loop ();
6682 pop_temp_slots ();
6683
6684 return const0_rtx;
6685
6686 case BIND_EXPR:
6687 {
6688 tree vars = TREE_OPERAND (exp, 0);
6689
6690 /* Need to open a binding contour here because
6691 if there are any cleanups they must be contained here. */
6692 expand_start_bindings (2);
6693
6694 /* Mark the corresponding BLOCK for output in its proper place. */
6695 if (TREE_OPERAND (exp, 2) != 0
6696 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6697 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6698
6699 /* If VARS have not yet been expanded, expand them now. */
6700 while (vars)
6701 {
6702 if (!DECL_RTL_SET_P (vars))
6703 expand_decl (vars);
6704 expand_decl_init (vars);
6705 vars = TREE_CHAIN (vars);
6706 }
6707
6708 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6709
6710 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6711
6712 return temp;
6713 }
6714
6715 case RTL_EXPR:
6716 if (RTL_EXPR_SEQUENCE (exp))
6717 {
6718 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6719 abort ();
6720 emit_insn (RTL_EXPR_SEQUENCE (exp));
6721 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6722 }
6723 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6724 free_temps_for_rtl_expr (exp);
6725 if (alt_rtl)
6726 *alt_rtl = RTL_EXPR_ALT_RTL (exp);
6727 return RTL_EXPR_RTL (exp);
6728
6729 case CONSTRUCTOR:
6730 /* If we don't need the result, just ensure we evaluate any
6731 subexpressions. */
6732 if (ignore)
6733 {
6734 tree elt;
6735
6736 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6737 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6738
6739 return const0_rtx;
6740 }
6741
6742 /* All elts simple constants => refer to a constant in memory. But
6743 if this is a non-BLKmode mode, let it store a field at a time
6744 since that should make a CONST_INT or CONST_DOUBLE when we
6745 fold. Likewise, if we have a target we can use, it is best to
6746 store directly into the target unless the type is large enough
6747 that memcpy will be used. If we are making an initializer and
6748 all operands are constant, put it in memory as well.
6749
6750 FIXME: Avoid trying to fill vector constructors piece-meal.
6751 Output them with output_constant_def below unless we're sure
6752 they're zeros. This should go away when vector initializers
6753 are treated like VECTOR_CST instead of arrays.
6754 */
6755 else if ((TREE_STATIC (exp)
6756 && ((mode == BLKmode
6757 && ! (target != 0 && safe_from_p (target, exp, 1)))
6758 || TREE_ADDRESSABLE (exp)
6759 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6760 && (! MOVE_BY_PIECES_P
6761 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6762 TYPE_ALIGN (type)))
6763 && ((TREE_CODE (type) == VECTOR_TYPE
6764 && !is_zeros_p (exp))
6765 || ! mostly_zeros_p (exp)))))
6766 || ((modifier == EXPAND_INITIALIZER
6767 || modifier == EXPAND_CONST_ADDRESS)
6768 && TREE_CONSTANT (exp)))
6769 {
6770 rtx constructor = output_constant_def (exp, 1);
6771
6772 if (modifier != EXPAND_CONST_ADDRESS
6773 && modifier != EXPAND_INITIALIZER
6774 && modifier != EXPAND_SUM)
6775 constructor = validize_mem (constructor);
6776
6777 return constructor;
6778 }
6779 else
6780 {
6781 /* Handle calls that pass values in multiple non-contiguous
6782 locations. The Irix 6 ABI has examples of this. */
6783 if (target == 0 || ! safe_from_p (target, exp, 1)
6784 || GET_CODE (target) == PARALLEL
6785 || modifier == EXPAND_STACK_PARM)
6786 target
6787 = assign_temp (build_qualified_type (type,
6788 (TYPE_QUALS (type)
6789 | (TREE_READONLY (exp)
6790 * TYPE_QUAL_CONST))),
6791 0, TREE_ADDRESSABLE (exp), 1);
6792
6793 store_constructor (exp, target, 0, int_expr_size (exp));
6794 return target;
6795 }
6796
6797 case INDIRECT_REF:
6798 {
6799 tree exp1 = TREE_OPERAND (exp, 0);
6800 tree index;
6801 tree string = string_constant (exp1, &index);
6802
6803 /* Try to optimize reads from const strings. */
6804 if (string
6805 && TREE_CODE (string) == STRING_CST
6806 && TREE_CODE (index) == INTEGER_CST
6807 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6808 && GET_MODE_CLASS (mode) == MODE_INT
6809 && GET_MODE_SIZE (mode) == 1
6810 && modifier != EXPAND_WRITE)
6811 return gen_int_mode (TREE_STRING_POINTER (string)
6812 [TREE_INT_CST_LOW (index)], mode);
6813
6814 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6815 op0 = memory_address (mode, op0);
6816 temp = gen_rtx_MEM (mode, op0);
6817 set_mem_attributes (temp, exp, 0);
6818
6819 /* If we are writing to this object and its type is a record with
6820 readonly fields, we must mark it as readonly so it will
6821 conflict with readonly references to those fields. */
6822 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6823 RTX_UNCHANGING_P (temp) = 1;
6824
6825 return temp;
6826 }
6827
6828 case ARRAY_REF:
6829 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6830 abort ();
6831
6832 {
6833 tree array = TREE_OPERAND (exp, 0);
6834 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6835 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6836 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6837 HOST_WIDE_INT i;
6838
6839 /* Optimize the special-case of a zero lower bound.
6840
6841 We convert the low_bound to sizetype to avoid some problems
6842 with constant folding. (E.g. suppose the lower bound is 1,
6843 and its mode is QI. Without the conversion, (ARRAY
6844 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6845 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6846
6847 if (! integer_zerop (low_bound))
6848 index = size_diffop (index, convert (sizetype, low_bound));
6849
6850 /* Fold an expression like: "foo"[2].
6851 This is not done in fold so it won't happen inside &.
6852 Don't fold if this is for wide characters since it's too
6853 difficult to do correctly and this is a very rare case. */
6854
6855 if (modifier != EXPAND_CONST_ADDRESS
6856 && modifier != EXPAND_INITIALIZER
6857 && modifier != EXPAND_MEMORY
6858 && TREE_CODE (array) == STRING_CST
6859 && TREE_CODE (index) == INTEGER_CST
6860 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6861 && GET_MODE_CLASS (mode) == MODE_INT
6862 && GET_MODE_SIZE (mode) == 1)
6863 return gen_int_mode (TREE_STRING_POINTER (array)
6864 [TREE_INT_CST_LOW (index)], mode);
6865
6866 /* If this is a constant index into a constant array,
6867 just get the value from the array. Handle both the cases when
6868 we have an explicit constructor and when our operand is a variable
6869 that was declared const. */
6870
6871 if (modifier != EXPAND_CONST_ADDRESS
6872 && modifier != EXPAND_INITIALIZER
6873 && modifier != EXPAND_MEMORY
6874 && TREE_CODE (array) == CONSTRUCTOR
6875 && ! TREE_SIDE_EFFECTS (array)
6876 && TREE_CODE (index) == INTEGER_CST
6877 && 0 > compare_tree_int (index,
6878 list_length (CONSTRUCTOR_ELTS
6879 (TREE_OPERAND (exp, 0)))))
6880 {
6881 tree elem;
6882
6883 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6884 i = TREE_INT_CST_LOW (index);
6885 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6886 ;
6887
6888 if (elem)
6889 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6890 modifier);
6891 }
6892
6893 else if (optimize >= 1
6894 && modifier != EXPAND_CONST_ADDRESS
6895 && modifier != EXPAND_INITIALIZER
6896 && modifier != EXPAND_MEMORY
6897 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6898 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6899 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6900 && targetm.binds_local_p (array))
6901 {
6902 if (TREE_CODE (index) == INTEGER_CST)
6903 {
6904 tree init = DECL_INITIAL (array);
6905
6906 if (TREE_CODE (init) == CONSTRUCTOR)
6907 {
6908 tree elem;
6909
6910 for (elem = CONSTRUCTOR_ELTS (init);
6911 (elem
6912 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6913 elem = TREE_CHAIN (elem))
6914 ;
6915
6916 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6917 return expand_expr (fold (TREE_VALUE (elem)), target,
6918 tmode, modifier);
6919 }
6920 else if (TREE_CODE (init) == STRING_CST
6921 && 0 > compare_tree_int (index,
6922 TREE_STRING_LENGTH (init)))
6923 {
6924 tree type = TREE_TYPE (TREE_TYPE (init));
6925 enum machine_mode mode = TYPE_MODE (type);
6926
6927 if (GET_MODE_CLASS (mode) == MODE_INT
6928 && GET_MODE_SIZE (mode) == 1)
6929 return gen_int_mode (TREE_STRING_POINTER (init)
6930 [TREE_INT_CST_LOW (index)], mode);
6931 }
6932 }
6933 }
6934 }
6935 goto normal_inner_ref;
6936
6937 case COMPONENT_REF:
6938 /* If the operand is a CONSTRUCTOR, we can just extract the
6939 appropriate field if it is present. */
6940 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6941 {
6942 tree elt;
6943
6944 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6945 elt = TREE_CHAIN (elt))
6946 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6947 /* We can normally use the value of the field in the
6948 CONSTRUCTOR. However, if this is a bitfield in
6949 an integral mode that we can fit in a HOST_WIDE_INT,
6950 we must mask only the number of bits in the bitfield,
6951 since this is done implicitly by the constructor. If
6952 the bitfield does not meet either of those conditions,
6953 we can't do this optimization. */
6954 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6955 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6956 == MODE_INT)
6957 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6958 <= HOST_BITS_PER_WIDE_INT))))
6959 {
6960 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6961 && modifier == EXPAND_STACK_PARM)
6962 target = 0;
6963 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6964 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6965 {
6966 HOST_WIDE_INT bitsize
6967 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6968 enum machine_mode imode
6969 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6970
6971 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6972 {
6973 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6974 op0 = expand_and (imode, op0, op1, target);
6975 }
6976 else
6977 {
6978 tree count
6979 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6980 0);
6981
6982 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6983 target, 0);
6984 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6985 target, 0);
6986 }
6987 }
6988
6989 return op0;
6990 }
6991 }
6992 goto normal_inner_ref;
6993
6994 case BIT_FIELD_REF:
6995 case ARRAY_RANGE_REF:
6996 normal_inner_ref:
6997 {
6998 enum machine_mode mode1;
6999 HOST_WIDE_INT bitsize, bitpos;
7000 tree offset;
7001 int volatilep = 0;
7002 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7003 &mode1, &unsignedp, &volatilep);
7004 rtx orig_op0;
7005
7006 /* If we got back the original object, something is wrong. Perhaps
7007 we are evaluating an expression too early. In any event, don't
7008 infinitely recurse. */
7009 if (tem == exp)
7010 abort ();
7011
7012 /* If TEM's type is a union of variable size, pass TARGET to the inner
7013 computation, since it will need a temporary and TARGET is known
7014 to have to do. This occurs in unchecked conversion in Ada. */
7015
7016 orig_op0 = op0
7017 = expand_expr (tem,
7018 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7019 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7020 != INTEGER_CST)
7021 && modifier != EXPAND_STACK_PARM
7022 ? target : NULL_RTX),
7023 VOIDmode,
7024 (modifier == EXPAND_INITIALIZER
7025 || modifier == EXPAND_CONST_ADDRESS
7026 || modifier == EXPAND_STACK_PARM)
7027 ? modifier : EXPAND_NORMAL);
7028
7029 /* If this is a constant, put it into a register if it is a
7030 legitimate constant and OFFSET is 0 and memory if it isn't. */
7031 if (CONSTANT_P (op0))
7032 {
7033 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7034 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7035 && offset == 0)
7036 op0 = force_reg (mode, op0);
7037 else
7038 op0 = validize_mem (force_const_mem (mode, op0));
7039 }
7040
7041 /* Otherwise, if this object not in memory and we either have an
7042 offset or a BLKmode result, put it there. This case can't occur in
7043 C, but can in Ada if we have unchecked conversion of an expression
7044 from a scalar type to an array or record type or for an
7045 ARRAY_RANGE_REF whose type is BLKmode. */
7046 else if (GET_CODE (op0) != MEM
7047 && (offset != 0
7048 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7049 {
7050 /* If the operand is a SAVE_EXPR, we can deal with this by
7051 forcing the SAVE_EXPR into memory. */
7052 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7053 {
7054 put_var_into_stack (TREE_OPERAND (exp, 0),
7055 /*rescan=*/true);
7056 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7057 }
7058 else
7059 {
7060 tree nt
7061 = build_qualified_type (TREE_TYPE (tem),
7062 (TYPE_QUALS (TREE_TYPE (tem))
7063 | TYPE_QUAL_CONST));
7064 rtx memloc = assign_temp (nt, 1, 1, 1);
7065
7066 emit_move_insn (memloc, op0);
7067 op0 = memloc;
7068 }
7069 }
7070
7071 if (offset != 0)
7072 {
7073 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7074 EXPAND_SUM);
7075
7076 if (GET_CODE (op0) != MEM)
7077 abort ();
7078
7079 #ifdef POINTERS_EXTEND_UNSIGNED
7080 if (GET_MODE (offset_rtx) != Pmode)
7081 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7082 #else
7083 if (GET_MODE (offset_rtx) != ptr_mode)
7084 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7085 #endif
7086
7087 if (GET_MODE (op0) == BLKmode
7088 /* A constant address in OP0 can have VOIDmode, we must
7089 not try to call force_reg in that case. */
7090 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7091 && bitsize != 0
7092 && (bitpos % bitsize) == 0
7093 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7094 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7095 {
7096 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7097 bitpos = 0;
7098 }
7099
7100 op0 = offset_address (op0, offset_rtx,
7101 highest_pow2_factor (offset));
7102 }
7103
7104 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7105 record its alignment as BIGGEST_ALIGNMENT. */
7106 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7107 && is_aligning_offset (offset, tem))
7108 set_mem_align (op0, BIGGEST_ALIGNMENT);
7109
7110 /* Don't forget about volatility even if this is a bitfield. */
7111 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7112 {
7113 if (op0 == orig_op0)
7114 op0 = copy_rtx (op0);
7115
7116 MEM_VOLATILE_P (op0) = 1;
7117 }
7118
7119 /* The following code doesn't handle CONCAT.
7120 Assume only bitpos == 0 can be used for CONCAT, due to
7121 one element arrays having the same mode as its element. */
7122 if (GET_CODE (op0) == CONCAT)
7123 {
7124 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7125 abort ();
7126 return op0;
7127 }
7128
7129 /* In cases where an aligned union has an unaligned object
7130 as a field, we might be extracting a BLKmode value from
7131 an integer-mode (e.g., SImode) object. Handle this case
7132 by doing the extract into an object as wide as the field
7133 (which we know to be the width of a basic mode), then
7134 storing into memory, and changing the mode to BLKmode. */
7135 if (mode1 == VOIDmode
7136 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7137 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7138 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7139 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7140 && modifier != EXPAND_CONST_ADDRESS
7141 && modifier != EXPAND_INITIALIZER)
7142 /* If the field isn't aligned enough to fetch as a memref,
7143 fetch it as a bit field. */
7144 || (mode1 != BLKmode
7145 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7146 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7147 || (GET_CODE (op0) == MEM
7148 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7149 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7150 && ((modifier == EXPAND_CONST_ADDRESS
7151 || modifier == EXPAND_INITIALIZER)
7152 ? STRICT_ALIGNMENT
7153 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7154 || (bitpos % BITS_PER_UNIT != 0)))
7155 /* If the type and the field are a constant size and the
7156 size of the type isn't the same size as the bitfield,
7157 we must use bitfield operations. */
7158 || (bitsize >= 0
7159 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7160 == INTEGER_CST)
7161 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7162 bitsize)))
7163 {
7164 enum machine_mode ext_mode = mode;
7165
7166 if (ext_mode == BLKmode
7167 && ! (target != 0 && GET_CODE (op0) == MEM
7168 && GET_CODE (target) == MEM
7169 && bitpos % BITS_PER_UNIT == 0))
7170 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7171
7172 if (ext_mode == BLKmode)
7173 {
7174 if (target == 0)
7175 target = assign_temp (type, 0, 1, 1);
7176
7177 if (bitsize == 0)
7178 return target;
7179
7180 /* In this case, BITPOS must start at a byte boundary and
7181 TARGET, if specified, must be a MEM. */
7182 if (GET_CODE (op0) != MEM
7183 || (target != 0 && GET_CODE (target) != MEM)
7184 || bitpos % BITS_PER_UNIT != 0)
7185 abort ();
7186
7187 emit_block_move (target,
7188 adjust_address (op0, VOIDmode,
7189 bitpos / BITS_PER_UNIT),
7190 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7191 / BITS_PER_UNIT),
7192 (modifier == EXPAND_STACK_PARM
7193 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7194
7195 return target;
7196 }
7197
7198 op0 = validize_mem (op0);
7199
7200 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7201 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7202
7203 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7204 (modifier == EXPAND_STACK_PARM
7205 ? NULL_RTX : target),
7206 ext_mode, ext_mode,
7207 int_size_in_bytes (TREE_TYPE (tem)));
7208
7209 /* If the result is a record type and BITSIZE is narrower than
7210 the mode of OP0, an integral mode, and this is a big endian
7211 machine, we must put the field into the high-order bits. */
7212 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7213 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7214 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7215 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7216 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7217 - bitsize),
7218 op0, 1);
7219
7220 if (mode == BLKmode)
7221 {
7222 rtx new = assign_temp (build_qualified_type
7223 ((*lang_hooks.types.type_for_mode)
7224 (ext_mode, 0),
7225 TYPE_QUAL_CONST), 0, 1, 1);
7226
7227 emit_move_insn (new, op0);
7228 op0 = copy_rtx (new);
7229 PUT_MODE (op0, BLKmode);
7230 set_mem_attributes (op0, exp, 1);
7231 }
7232
7233 return op0;
7234 }
7235
7236 /* If the result is BLKmode, use that to access the object
7237 now as well. */
7238 if (mode == BLKmode)
7239 mode1 = BLKmode;
7240
7241 /* Get a reference to just this component. */
7242 if (modifier == EXPAND_CONST_ADDRESS
7243 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7244 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7245 else
7246 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7247
7248 if (op0 == orig_op0)
7249 op0 = copy_rtx (op0);
7250
7251 set_mem_attributes (op0, exp, 0);
7252 if (GET_CODE (XEXP (op0, 0)) == REG)
7253 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7254
7255 MEM_VOLATILE_P (op0) |= volatilep;
7256 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7257 || modifier == EXPAND_CONST_ADDRESS
7258 || modifier == EXPAND_INITIALIZER)
7259 return op0;
7260 else if (target == 0)
7261 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7262
7263 convert_move (target, op0, unsignedp);
7264 return target;
7265 }
7266
7267 case VTABLE_REF:
7268 {
7269 rtx insn, before = get_last_insn (), vtbl_ref;
7270
7271 /* Evaluate the interior expression. */
7272 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7273 tmode, modifier);
7274
7275 /* Get or create an instruction off which to hang a note. */
7276 if (REG_P (subtarget))
7277 {
7278 target = subtarget;
7279 insn = get_last_insn ();
7280 if (insn == before)
7281 abort ();
7282 if (! INSN_P (insn))
7283 insn = prev_nonnote_insn (insn);
7284 }
7285 else
7286 {
7287 target = gen_reg_rtx (GET_MODE (subtarget));
7288 insn = emit_move_insn (target, subtarget);
7289 }
7290
7291 /* Collect the data for the note. */
7292 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7293 vtbl_ref = plus_constant (vtbl_ref,
7294 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7295 /* Discard the initial CONST that was added. */
7296 vtbl_ref = XEXP (vtbl_ref, 0);
7297
7298 REG_NOTES (insn)
7299 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7300
7301 return target;
7302 }
7303
7304 /* Intended for a reference to a buffer of a file-object in Pascal.
7305 But it's not certain that a special tree code will really be
7306 necessary for these. INDIRECT_REF might work for them. */
7307 case BUFFER_REF:
7308 abort ();
7309
7310 case IN_EXPR:
7311 {
7312 /* Pascal set IN expression.
7313
7314 Algorithm:
7315 rlo = set_low - (set_low%bits_per_word);
7316 the_word = set [ (index - rlo)/bits_per_word ];
7317 bit_index = index % bits_per_word;
7318 bitmask = 1 << bit_index;
7319 return !!(the_word & bitmask); */
7320
7321 tree set = TREE_OPERAND (exp, 0);
7322 tree index = TREE_OPERAND (exp, 1);
7323 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7324 tree set_type = TREE_TYPE (set);
7325 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7326 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7327 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7328 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7329 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7330 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7331 rtx setaddr = XEXP (setval, 0);
7332 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7333 rtx rlow;
7334 rtx diff, quo, rem, addr, bit, result;
7335
7336 /* If domain is empty, answer is no. Likewise if index is constant
7337 and out of bounds. */
7338 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7339 && TREE_CODE (set_low_bound) == INTEGER_CST
7340 && tree_int_cst_lt (set_high_bound, set_low_bound))
7341 || (TREE_CODE (index) == INTEGER_CST
7342 && TREE_CODE (set_low_bound) == INTEGER_CST
7343 && tree_int_cst_lt (index, set_low_bound))
7344 || (TREE_CODE (set_high_bound) == INTEGER_CST
7345 && TREE_CODE (index) == INTEGER_CST
7346 && tree_int_cst_lt (set_high_bound, index))))
7347 return const0_rtx;
7348
7349 if (target == 0)
7350 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7351
7352 /* If we get here, we have to generate the code for both cases
7353 (in range and out of range). */
7354
7355 op0 = gen_label_rtx ();
7356 op1 = gen_label_rtx ();
7357
7358 if (! (GET_CODE (index_val) == CONST_INT
7359 && GET_CODE (lo_r) == CONST_INT))
7360 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7361 GET_MODE (index_val), iunsignedp, op1);
7362
7363 if (! (GET_CODE (index_val) == CONST_INT
7364 && GET_CODE (hi_r) == CONST_INT))
7365 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7366 GET_MODE (index_val), iunsignedp, op1);
7367
7368 /* Calculate the element number of bit zero in the first word
7369 of the set. */
7370 if (GET_CODE (lo_r) == CONST_INT)
7371 rlow = GEN_INT (INTVAL (lo_r)
7372 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7373 else
7374 rlow = expand_binop (index_mode, and_optab, lo_r,
7375 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7376 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7377
7378 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7379 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7380
7381 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7382 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7383 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7384 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7385
7386 addr = memory_address (byte_mode,
7387 expand_binop (index_mode, add_optab, diff,
7388 setaddr, NULL_RTX, iunsignedp,
7389 OPTAB_LIB_WIDEN));
7390
7391 /* Extract the bit we want to examine. */
7392 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7393 gen_rtx_MEM (byte_mode, addr),
7394 make_tree (TREE_TYPE (index), rem),
7395 NULL_RTX, 1);
7396 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7397 GET_MODE (target) == byte_mode ? target : 0,
7398 1, OPTAB_LIB_WIDEN);
7399
7400 if (result != target)
7401 convert_move (target, result, 1);
7402
7403 /* Output the code to handle the out-of-range case. */
7404 emit_jump (op0);
7405 emit_label (op1);
7406 emit_move_insn (target, const0_rtx);
7407 emit_label (op0);
7408 return target;
7409 }
7410
7411 case WITH_CLEANUP_EXPR:
7412 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7413 {
7414 WITH_CLEANUP_EXPR_RTL (exp)
7415 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7416 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7417 CLEANUP_EH_ONLY (exp));
7418
7419 /* That's it for this cleanup. */
7420 TREE_OPERAND (exp, 1) = 0;
7421 }
7422 return WITH_CLEANUP_EXPR_RTL (exp);
7423
7424 case CLEANUP_POINT_EXPR:
7425 {
7426 /* Start a new binding layer that will keep track of all cleanup
7427 actions to be performed. */
7428 expand_start_bindings (2);
7429
7430 target_temp_slot_level = temp_slot_level;
7431
7432 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7433 /* If we're going to use this value, load it up now. */
7434 if (! ignore)
7435 op0 = force_not_mem (op0);
7436 preserve_temp_slots (op0);
7437 expand_end_bindings (NULL_TREE, 0, 0);
7438 }
7439 return op0;
7440
7441 case CALL_EXPR:
7442 /* Check for a built-in function. */
7443 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7444 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7445 == FUNCTION_DECL)
7446 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7447 {
7448 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7449 == BUILT_IN_FRONTEND)
7450 return (*lang_hooks.expand_expr) (exp, original_target,
7451 tmode, modifier,
7452 alt_rtl);
7453 else
7454 return expand_builtin (exp, target, subtarget, tmode, ignore);
7455 }
7456
7457 return expand_call (exp, target, ignore);
7458
7459 case NON_LVALUE_EXPR:
7460 case NOP_EXPR:
7461 case CONVERT_EXPR:
7462 case REFERENCE_EXPR:
7463 if (TREE_OPERAND (exp, 0) == error_mark_node)
7464 return const0_rtx;
7465
7466 if (TREE_CODE (type) == UNION_TYPE)
7467 {
7468 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7469
7470 /* If both input and output are BLKmode, this conversion isn't doing
7471 anything except possibly changing memory attribute. */
7472 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7473 {
7474 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7475 modifier);
7476
7477 result = copy_rtx (result);
7478 set_mem_attributes (result, exp, 0);
7479 return result;
7480 }
7481
7482 if (target == 0)
7483 {
7484 if (TYPE_MODE (type) != BLKmode)
7485 target = gen_reg_rtx (TYPE_MODE (type));
7486 else
7487 target = assign_temp (type, 0, 1, 1);
7488 }
7489
7490 if (GET_CODE (target) == MEM)
7491 /* Store data into beginning of memory target. */
7492 store_expr (TREE_OPERAND (exp, 0),
7493 adjust_address (target, TYPE_MODE (valtype), 0),
7494 modifier == EXPAND_STACK_PARM ? 2 : 0);
7495
7496 else if (GET_CODE (target) == REG)
7497 /* Store this field into a union of the proper type. */
7498 store_field (target,
7499 MIN ((int_size_in_bytes (TREE_TYPE
7500 (TREE_OPERAND (exp, 0)))
7501 * BITS_PER_UNIT),
7502 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7503 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7504 VOIDmode, 0, type, 0);
7505 else
7506 abort ();
7507
7508 /* Return the entire union. */
7509 return target;
7510 }
7511
7512 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7513 {
7514 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7515 modifier);
7516
7517 /* If the signedness of the conversion differs and OP0 is
7518 a promoted SUBREG, clear that indication since we now
7519 have to do the proper extension. */
7520 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7521 && GET_CODE (op0) == SUBREG)
7522 SUBREG_PROMOTED_VAR_P (op0) = 0;
7523
7524 return op0;
7525 }
7526
7527 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7528 if (GET_MODE (op0) == mode)
7529 return op0;
7530
7531 /* If OP0 is a constant, just convert it into the proper mode. */
7532 if (CONSTANT_P (op0))
7533 {
7534 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7535 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7536
7537 if (modifier == EXPAND_INITIALIZER)
7538 return simplify_gen_subreg (mode, op0, inner_mode,
7539 subreg_lowpart_offset (mode,
7540 inner_mode));
7541 else
7542 return convert_modes (mode, inner_mode, op0,
7543 TREE_UNSIGNED (inner_type));
7544 }
7545
7546 if (modifier == EXPAND_INITIALIZER)
7547 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7548
7549 if (target == 0)
7550 return
7551 convert_to_mode (mode, op0,
7552 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7553 else
7554 convert_move (target, op0,
7555 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7556 return target;
7557
7558 case VIEW_CONVERT_EXPR:
7559 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7560
7561 /* If the input and output modes are both the same, we are done.
7562 Otherwise, if neither mode is BLKmode and both are integral and within
7563 a word, we can use gen_lowpart. If neither is true, make sure the
7564 operand is in memory and convert the MEM to the new mode. */
7565 if (TYPE_MODE (type) == GET_MODE (op0))
7566 ;
7567 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7568 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7569 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7570 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7571 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7572 op0 = gen_lowpart (TYPE_MODE (type), op0);
7573 else if (GET_CODE (op0) != MEM)
7574 {
7575 /* If the operand is not a MEM, force it into memory. Since we
7576 are going to be be changing the mode of the MEM, don't call
7577 force_const_mem for constants because we don't allow pool
7578 constants to change mode. */
7579 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7580
7581 if (TREE_ADDRESSABLE (exp))
7582 abort ();
7583
7584 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7585 target
7586 = assign_stack_temp_for_type
7587 (TYPE_MODE (inner_type),
7588 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7589
7590 emit_move_insn (target, op0);
7591 op0 = target;
7592 }
7593
7594 /* At this point, OP0 is in the correct mode. If the output type is such
7595 that the operand is known to be aligned, indicate that it is.
7596 Otherwise, we need only be concerned about alignment for non-BLKmode
7597 results. */
7598 if (GET_CODE (op0) == MEM)
7599 {
7600 op0 = copy_rtx (op0);
7601
7602 if (TYPE_ALIGN_OK (type))
7603 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7604 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7605 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7606 {
7607 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7608 HOST_WIDE_INT temp_size
7609 = MAX (int_size_in_bytes (inner_type),
7610 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7611 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7612 temp_size, 0, type);
7613 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7614
7615 if (TREE_ADDRESSABLE (exp))
7616 abort ();
7617
7618 if (GET_MODE (op0) == BLKmode)
7619 emit_block_move (new_with_op0_mode, op0,
7620 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7621 (modifier == EXPAND_STACK_PARM
7622 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7623 else
7624 emit_move_insn (new_with_op0_mode, op0);
7625
7626 op0 = new;
7627 }
7628
7629 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7630 }
7631
7632 return op0;
7633
7634 case PLUS_EXPR:
7635 this_optab = ! unsignedp && flag_trapv
7636 && (GET_MODE_CLASS (mode) == MODE_INT)
7637 ? addv_optab : add_optab;
7638
7639 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7640 something else, make sure we add the register to the constant and
7641 then to the other thing. This case can occur during strength
7642 reduction and doing it this way will produce better code if the
7643 frame pointer or argument pointer is eliminated.
7644
7645 fold-const.c will ensure that the constant is always in the inner
7646 PLUS_EXPR, so the only case we need to do anything about is if
7647 sp, ap, or fp is our second argument, in which case we must swap
7648 the innermost first argument and our second argument. */
7649
7650 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7651 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7652 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7653 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7654 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7655 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7656 {
7657 tree t = TREE_OPERAND (exp, 1);
7658
7659 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7660 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7661 }
7662
7663 /* If the result is to be ptr_mode and we are adding an integer to
7664 something, we might be forming a constant. So try to use
7665 plus_constant. If it produces a sum and we can't accept it,
7666 use force_operand. This allows P = &ARR[const] to generate
7667 efficient code on machines where a SYMBOL_REF is not a valid
7668 address.
7669
7670 If this is an EXPAND_SUM call, always return the sum. */
7671 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7672 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7673 {
7674 if (modifier == EXPAND_STACK_PARM)
7675 target = 0;
7676 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7677 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7678 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7679 {
7680 rtx constant_part;
7681
7682 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7683 EXPAND_SUM);
7684 /* Use immed_double_const to ensure that the constant is
7685 truncated according to the mode of OP1, then sign extended
7686 to a HOST_WIDE_INT. Using the constant directly can result
7687 in non-canonical RTL in a 64x32 cross compile. */
7688 constant_part
7689 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7690 (HOST_WIDE_INT) 0,
7691 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7692 op1 = plus_constant (op1, INTVAL (constant_part));
7693 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7694 op1 = force_operand (op1, target);
7695 return op1;
7696 }
7697
7698 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7699 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7700 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7701 {
7702 rtx constant_part;
7703
7704 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7705 (modifier == EXPAND_INITIALIZER
7706 ? EXPAND_INITIALIZER : EXPAND_SUM));
7707 if (! CONSTANT_P (op0))
7708 {
7709 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7710 VOIDmode, modifier);
7711 /* Return a PLUS if modifier says it's OK. */
7712 if (modifier == EXPAND_SUM
7713 || modifier == EXPAND_INITIALIZER)
7714 return simplify_gen_binary (PLUS, mode, op0, op1);
7715 goto binop2;
7716 }
7717 /* Use immed_double_const to ensure that the constant is
7718 truncated according to the mode of OP1, then sign extended
7719 to a HOST_WIDE_INT. Using the constant directly can result
7720 in non-canonical RTL in a 64x32 cross compile. */
7721 constant_part
7722 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7723 (HOST_WIDE_INT) 0,
7724 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7725 op0 = plus_constant (op0, INTVAL (constant_part));
7726 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7727 op0 = force_operand (op0, target);
7728 return op0;
7729 }
7730 }
7731
7732 /* No sense saving up arithmetic to be done
7733 if it's all in the wrong mode to form part of an address.
7734 And force_operand won't know whether to sign-extend or
7735 zero-extend. */
7736 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7737 || mode != ptr_mode)
7738 {
7739 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7740 subtarget, &op0, &op1, 0);
7741 if (op0 == const0_rtx)
7742 return op1;
7743 if (op1 == const0_rtx)
7744 return op0;
7745 goto binop2;
7746 }
7747
7748 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7749 subtarget, &op0, &op1, modifier);
7750 return simplify_gen_binary (PLUS, mode, op0, op1);
7751
7752 case MINUS_EXPR:
7753 /* For initializers, we are allowed to return a MINUS of two
7754 symbolic constants. Here we handle all cases when both operands
7755 are constant. */
7756 /* Handle difference of two symbolic constants,
7757 for the sake of an initializer. */
7758 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7759 && really_constant_p (TREE_OPERAND (exp, 0))
7760 && really_constant_p (TREE_OPERAND (exp, 1)))
7761 {
7762 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7763 NULL_RTX, &op0, &op1, modifier);
7764
7765 /* If the last operand is a CONST_INT, use plus_constant of
7766 the negated constant. Else make the MINUS. */
7767 if (GET_CODE (op1) == CONST_INT)
7768 return plus_constant (op0, - INTVAL (op1));
7769 else
7770 return gen_rtx_MINUS (mode, op0, op1);
7771 }
7772
7773 this_optab = ! unsignedp && flag_trapv
7774 && (GET_MODE_CLASS(mode) == MODE_INT)
7775 ? subv_optab : sub_optab;
7776
7777 /* No sense saving up arithmetic to be done
7778 if it's all in the wrong mode to form part of an address.
7779 And force_operand won't know whether to sign-extend or
7780 zero-extend. */
7781 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7782 || mode != ptr_mode)
7783 goto binop;
7784
7785 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7786 subtarget, &op0, &op1, modifier);
7787
7788 /* Convert A - const to A + (-const). */
7789 if (GET_CODE (op1) == CONST_INT)
7790 {
7791 op1 = negate_rtx (mode, op1);
7792 return simplify_gen_binary (PLUS, mode, op0, op1);
7793 }
7794
7795 goto binop2;
7796
7797 case MULT_EXPR:
7798 /* If first operand is constant, swap them.
7799 Thus the following special case checks need only
7800 check the second operand. */
7801 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7802 {
7803 tree t1 = TREE_OPERAND (exp, 0);
7804 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7805 TREE_OPERAND (exp, 1) = t1;
7806 }
7807
7808 /* Attempt to return something suitable for generating an
7809 indexed address, for machines that support that. */
7810
7811 if (modifier == EXPAND_SUM && mode == ptr_mode
7812 && host_integerp (TREE_OPERAND (exp, 1), 0))
7813 {
7814 tree exp1 = TREE_OPERAND (exp, 1);
7815
7816 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7817 EXPAND_SUM);
7818
7819 if (GET_CODE (op0) != REG)
7820 op0 = force_operand (op0, NULL_RTX);
7821 if (GET_CODE (op0) != REG)
7822 op0 = copy_to_mode_reg (mode, op0);
7823
7824 return gen_rtx_MULT (mode, op0,
7825 gen_int_mode (tree_low_cst (exp1, 0),
7826 TYPE_MODE (TREE_TYPE (exp1))));
7827 }
7828
7829 if (modifier == EXPAND_STACK_PARM)
7830 target = 0;
7831
7832 /* Check for multiplying things that have been extended
7833 from a narrower type. If this machine supports multiplying
7834 in that narrower type with a result in the desired type,
7835 do it that way, and avoid the explicit type-conversion. */
7836 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7837 && TREE_CODE (type) == INTEGER_TYPE
7838 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7839 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7840 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7841 && int_fits_type_p (TREE_OPERAND (exp, 1),
7842 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7843 /* Don't use a widening multiply if a shift will do. */
7844 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7845 > HOST_BITS_PER_WIDE_INT)
7846 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7847 ||
7848 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7849 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7850 ==
7851 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7852 /* If both operands are extended, they must either both
7853 be zero-extended or both be sign-extended. */
7854 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7855 ==
7856 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7857 {
7858 enum machine_mode innermode
7859 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7860 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7861 ? smul_widen_optab : umul_widen_optab);
7862 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7863 ? umul_widen_optab : smul_widen_optab);
7864 if (mode == GET_MODE_WIDER_MODE (innermode))
7865 {
7866 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7867 {
7868 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7869 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7870 TREE_OPERAND (exp, 1),
7871 NULL_RTX, &op0, &op1, 0);
7872 else
7873 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7874 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7875 NULL_RTX, &op0, &op1, 0);
7876 goto binop2;
7877 }
7878 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7879 && innermode == word_mode)
7880 {
7881 rtx htem;
7882 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7883 NULL_RTX, VOIDmode, 0);
7884 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7885 op1 = convert_modes (innermode, mode,
7886 expand_expr (TREE_OPERAND (exp, 1),
7887 NULL_RTX, VOIDmode, 0),
7888 unsignedp);
7889 else
7890 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7891 NULL_RTX, VOIDmode, 0);
7892 temp = expand_binop (mode, other_optab, op0, op1, target,
7893 unsignedp, OPTAB_LIB_WIDEN);
7894 htem = expand_mult_highpart_adjust (innermode,
7895 gen_highpart (innermode, temp),
7896 op0, op1,
7897 gen_highpart (innermode, temp),
7898 unsignedp);
7899 emit_move_insn (gen_highpart (innermode, temp), htem);
7900 return temp;
7901 }
7902 }
7903 }
7904 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7905 subtarget, &op0, &op1, 0);
7906 return expand_mult (mode, op0, op1, target, unsignedp);
7907
7908 case TRUNC_DIV_EXPR:
7909 case FLOOR_DIV_EXPR:
7910 case CEIL_DIV_EXPR:
7911 case ROUND_DIV_EXPR:
7912 case EXACT_DIV_EXPR:
7913 if (modifier == EXPAND_STACK_PARM)
7914 target = 0;
7915 /* Possible optimization: compute the dividend with EXPAND_SUM
7916 then if the divisor is constant can optimize the case
7917 where some terms of the dividend have coeffs divisible by it. */
7918 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7919 subtarget, &op0, &op1, 0);
7920 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7921
7922 case RDIV_EXPR:
7923 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7924 expensive divide. If not, combine will rebuild the original
7925 computation. */
7926 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7927 && TREE_CODE (type) == REAL_TYPE
7928 && !real_onep (TREE_OPERAND (exp, 0)))
7929 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7930 build (RDIV_EXPR, type,
7931 build_real (type, dconst1),
7932 TREE_OPERAND (exp, 1))),
7933 target, tmode, modifier);
7934 this_optab = sdiv_optab;
7935 goto binop;
7936
7937 case TRUNC_MOD_EXPR:
7938 case FLOOR_MOD_EXPR:
7939 case CEIL_MOD_EXPR:
7940 case ROUND_MOD_EXPR:
7941 if (modifier == EXPAND_STACK_PARM)
7942 target = 0;
7943 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7944 subtarget, &op0, &op1, 0);
7945 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7946
7947 case FIX_ROUND_EXPR:
7948 case FIX_FLOOR_EXPR:
7949 case FIX_CEIL_EXPR:
7950 abort (); /* Not used for C. */
7951
7952 case FIX_TRUNC_EXPR:
7953 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7954 if (target == 0 || modifier == EXPAND_STACK_PARM)
7955 target = gen_reg_rtx (mode);
7956 expand_fix (target, op0, unsignedp);
7957 return target;
7958
7959 case FLOAT_EXPR:
7960 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7961 if (target == 0 || modifier == EXPAND_STACK_PARM)
7962 target = gen_reg_rtx (mode);
7963 /* expand_float can't figure out what to do if FROM has VOIDmode.
7964 So give it the correct mode. With -O, cse will optimize this. */
7965 if (GET_MODE (op0) == VOIDmode)
7966 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7967 op0);
7968 expand_float (target, op0,
7969 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7970 return target;
7971
7972 case NEGATE_EXPR:
7973 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7974 if (modifier == EXPAND_STACK_PARM)
7975 target = 0;
7976 temp = expand_unop (mode,
7977 ! unsignedp && flag_trapv
7978 && (GET_MODE_CLASS(mode) == MODE_INT)
7979 ? negv_optab : neg_optab, op0, target, 0);
7980 if (temp == 0)
7981 abort ();
7982 return temp;
7983
7984 case ABS_EXPR:
7985 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7986 if (modifier == EXPAND_STACK_PARM)
7987 target = 0;
7988
7989 /* ABS_EXPR is not valid for complex arguments. */
7990 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7991 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7992 abort ();
7993
7994 /* Unsigned abs is simply the operand. Testing here means we don't
7995 risk generating incorrect code below. */
7996 if (TREE_UNSIGNED (type))
7997 return op0;
7998
7999 return expand_abs (mode, op0, target, unsignedp,
8000 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8001
8002 case MAX_EXPR:
8003 case MIN_EXPR:
8004 target = original_target;
8005 if (target == 0
8006 || modifier == EXPAND_STACK_PARM
8007 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8008 || GET_MODE (target) != mode
8009 || (GET_CODE (target) == REG
8010 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8011 target = gen_reg_rtx (mode);
8012 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8013 target, &op0, &op1, 0);
8014
8015 /* First try to do it with a special MIN or MAX instruction.
8016 If that does not win, use a conditional jump to select the proper
8017 value. */
8018 this_optab = (TREE_UNSIGNED (type)
8019 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8020 : (code == MIN_EXPR ? smin_optab : smax_optab));
8021
8022 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8023 OPTAB_WIDEN);
8024 if (temp != 0)
8025 return temp;
8026
8027 /* At this point, a MEM target is no longer useful; we will get better
8028 code without it. */
8029
8030 if (GET_CODE (target) == MEM)
8031 target = gen_reg_rtx (mode);
8032
8033 /* If op1 was placed in target, swap op0 and op1. */
8034 if (target != op0 && target == op1)
8035 {
8036 rtx tem = op0;
8037 op0 = op1;
8038 op1 = tem;
8039 }
8040
8041 if (target != op0)
8042 emit_move_insn (target, op0);
8043
8044 op0 = gen_label_rtx ();
8045
8046 /* If this mode is an integer too wide to compare properly,
8047 compare word by word. Rely on cse to optimize constant cases. */
8048 if (GET_MODE_CLASS (mode) == MODE_INT
8049 && ! can_compare_p (GE, mode, ccp_jump))
8050 {
8051 if (code == MAX_EXPR)
8052 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8053 target, op1, NULL_RTX, op0);
8054 else
8055 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8056 op1, target, NULL_RTX, op0);
8057 }
8058 else
8059 {
8060 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8061 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8062 unsignedp, mode, NULL_RTX, NULL_RTX,
8063 op0);
8064 }
8065 emit_move_insn (target, op1);
8066 emit_label (op0);
8067 return target;
8068
8069 case BIT_NOT_EXPR:
8070 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8071 if (modifier == EXPAND_STACK_PARM)
8072 target = 0;
8073 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8074 if (temp == 0)
8075 abort ();
8076 return temp;
8077
8078 /* ??? Can optimize bitwise operations with one arg constant.
8079 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8080 and (a bitwise1 b) bitwise2 b (etc)
8081 but that is probably not worth while. */
8082
8083 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8084 boolean values when we want in all cases to compute both of them. In
8085 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8086 as actual zero-or-1 values and then bitwise anding. In cases where
8087 there cannot be any side effects, better code would be made by
8088 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8089 how to recognize those cases. */
8090
8091 case TRUTH_AND_EXPR:
8092 case BIT_AND_EXPR:
8093 this_optab = and_optab;
8094 goto binop;
8095
8096 case TRUTH_OR_EXPR:
8097 case BIT_IOR_EXPR:
8098 this_optab = ior_optab;
8099 goto binop;
8100
8101 case TRUTH_XOR_EXPR:
8102 case BIT_XOR_EXPR:
8103 this_optab = xor_optab;
8104 goto binop;
8105
8106 case LSHIFT_EXPR:
8107 case RSHIFT_EXPR:
8108 case LROTATE_EXPR:
8109 case RROTATE_EXPR:
8110 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8111 subtarget = 0;
8112 if (modifier == EXPAND_STACK_PARM)
8113 target = 0;
8114 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8115 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8116 unsignedp);
8117
8118 /* Could determine the answer when only additive constants differ. Also,
8119 the addition of one can be handled by changing the condition. */
8120 case LT_EXPR:
8121 case LE_EXPR:
8122 case GT_EXPR:
8123 case GE_EXPR:
8124 case EQ_EXPR:
8125 case NE_EXPR:
8126 case UNORDERED_EXPR:
8127 case ORDERED_EXPR:
8128 case UNLT_EXPR:
8129 case UNLE_EXPR:
8130 case UNGT_EXPR:
8131 case UNGE_EXPR:
8132 case UNEQ_EXPR:
8133 temp = do_store_flag (exp,
8134 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8135 tmode != VOIDmode ? tmode : mode, 0);
8136 if (temp != 0)
8137 return temp;
8138
8139 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8140 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8141 && original_target
8142 && GET_CODE (original_target) == REG
8143 && (GET_MODE (original_target)
8144 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8145 {
8146 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8147 VOIDmode, 0);
8148
8149 /* If temp is constant, we can just compute the result. */
8150 if (GET_CODE (temp) == CONST_INT)
8151 {
8152 if (INTVAL (temp) != 0)
8153 emit_move_insn (target, const1_rtx);
8154 else
8155 emit_move_insn (target, const0_rtx);
8156
8157 return target;
8158 }
8159
8160 if (temp != original_target)
8161 {
8162 enum machine_mode mode1 = GET_MODE (temp);
8163 if (mode1 == VOIDmode)
8164 mode1 = tmode != VOIDmode ? tmode : mode;
8165
8166 temp = copy_to_mode_reg (mode1, temp);
8167 }
8168
8169 op1 = gen_label_rtx ();
8170 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8171 GET_MODE (temp), unsignedp, op1);
8172 emit_move_insn (temp, const1_rtx);
8173 emit_label (op1);
8174 return temp;
8175 }
8176
8177 /* If no set-flag instruction, must generate a conditional
8178 store into a temporary variable. Drop through
8179 and handle this like && and ||. */
8180
8181 case TRUTH_ANDIF_EXPR:
8182 case TRUTH_ORIF_EXPR:
8183 if (! ignore
8184 && (target == 0
8185 || modifier == EXPAND_STACK_PARM
8186 || ! safe_from_p (target, exp, 1)
8187 /* Make sure we don't have a hard reg (such as function's return
8188 value) live across basic blocks, if not optimizing. */
8189 || (!optimize && GET_CODE (target) == REG
8190 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8191 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8192
8193 if (target)
8194 emit_clr_insn (target);
8195
8196 op1 = gen_label_rtx ();
8197 jumpifnot (exp, op1);
8198
8199 if (target)
8200 emit_0_to_1_insn (target);
8201
8202 emit_label (op1);
8203 return ignore ? const0_rtx : target;
8204
8205 case TRUTH_NOT_EXPR:
8206 if (modifier == EXPAND_STACK_PARM)
8207 target = 0;
8208 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8209 /* The parser is careful to generate TRUTH_NOT_EXPR
8210 only with operands that are always zero or one. */
8211 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8212 target, 1, OPTAB_LIB_WIDEN);
8213 if (temp == 0)
8214 abort ();
8215 return temp;
8216
8217 case COMPOUND_EXPR:
8218 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8219 emit_queue ();
8220 return expand_expr_real (TREE_OPERAND (exp, 1),
8221 (ignore ? const0_rtx : target),
8222 VOIDmode, modifier, alt_rtl);
8223
8224 case COND_EXPR:
8225 /* If we would have a "singleton" (see below) were it not for a
8226 conversion in each arm, bring that conversion back out. */
8227 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8228 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8229 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8230 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8231 {
8232 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8233 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8234
8235 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8236 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8237 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8238 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8239 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8240 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8241 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8242 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8243 return expand_expr (build1 (NOP_EXPR, type,
8244 build (COND_EXPR, TREE_TYPE (iftrue),
8245 TREE_OPERAND (exp, 0),
8246 iftrue, iffalse)),
8247 target, tmode, modifier);
8248 }
8249
8250 {
8251 /* Note that COND_EXPRs whose type is a structure or union
8252 are required to be constructed to contain assignments of
8253 a temporary variable, so that we can evaluate them here
8254 for side effect only. If type is void, we must do likewise. */
8255
8256 /* If an arm of the branch requires a cleanup,
8257 only that cleanup is performed. */
8258
8259 tree singleton = 0;
8260 tree binary_op = 0, unary_op = 0;
8261
8262 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8263 convert it to our mode, if necessary. */
8264 if (integer_onep (TREE_OPERAND (exp, 1))
8265 && integer_zerop (TREE_OPERAND (exp, 2))
8266 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8267 {
8268 if (ignore)
8269 {
8270 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8271 modifier);
8272 return const0_rtx;
8273 }
8274
8275 if (modifier == EXPAND_STACK_PARM)
8276 target = 0;
8277 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8278 if (GET_MODE (op0) == mode)
8279 return op0;
8280
8281 if (target == 0)
8282 target = gen_reg_rtx (mode);
8283 convert_move (target, op0, unsignedp);
8284 return target;
8285 }
8286
8287 /* Check for X ? A + B : A. If we have this, we can copy A to the
8288 output and conditionally add B. Similarly for unary operations.
8289 Don't do this if X has side-effects because those side effects
8290 might affect A or B and the "?" operation is a sequence point in
8291 ANSI. (operand_equal_p tests for side effects.) */
8292
8293 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8294 && operand_equal_p (TREE_OPERAND (exp, 2),
8295 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8296 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8297 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8298 && operand_equal_p (TREE_OPERAND (exp, 1),
8299 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8300 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8301 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8302 && operand_equal_p (TREE_OPERAND (exp, 2),
8303 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8304 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8305 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8306 && operand_equal_p (TREE_OPERAND (exp, 1),
8307 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8308 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8309
8310 /* If we are not to produce a result, we have no target. Otherwise,
8311 if a target was specified use it; it will not be used as an
8312 intermediate target unless it is safe. If no target, use a
8313 temporary. */
8314
8315 if (ignore)
8316 temp = 0;
8317 else if (modifier == EXPAND_STACK_PARM)
8318 temp = assign_temp (type, 0, 0, 1);
8319 else if (original_target
8320 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8321 || (singleton && GET_CODE (original_target) == REG
8322 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8323 && original_target == var_rtx (singleton)))
8324 && GET_MODE (original_target) == mode
8325 #ifdef HAVE_conditional_move
8326 && (! can_conditionally_move_p (mode)
8327 || GET_CODE (original_target) == REG
8328 || TREE_ADDRESSABLE (type))
8329 #endif
8330 && (GET_CODE (original_target) != MEM
8331 || TREE_ADDRESSABLE (type)))
8332 temp = original_target;
8333 else if (TREE_ADDRESSABLE (type))
8334 abort ();
8335 else
8336 temp = assign_temp (type, 0, 0, 1);
8337
8338 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8339 do the test of X as a store-flag operation, do this as
8340 A + ((X != 0) << log C). Similarly for other simple binary
8341 operators. Only do for C == 1 if BRANCH_COST is low. */
8342 if (temp && singleton && binary_op
8343 && (TREE_CODE (binary_op) == PLUS_EXPR
8344 || TREE_CODE (binary_op) == MINUS_EXPR
8345 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8346 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8347 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8348 : integer_onep (TREE_OPERAND (binary_op, 1)))
8349 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8350 {
8351 rtx result;
8352 tree cond;
8353 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8354 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8355 ? addv_optab : add_optab)
8356 : TREE_CODE (binary_op) == MINUS_EXPR
8357 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8358 ? subv_optab : sub_optab)
8359 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8360 : xor_optab);
8361
8362 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8363 if (singleton == TREE_OPERAND (exp, 1))
8364 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8365 else
8366 cond = TREE_OPERAND (exp, 0);
8367
8368 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8369 ? temp : NULL_RTX),
8370 mode, BRANCH_COST <= 1);
8371
8372 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8373 result = expand_shift (LSHIFT_EXPR, mode, result,
8374 build_int_2 (tree_log2
8375 (TREE_OPERAND
8376 (binary_op, 1)),
8377 0),
8378 (safe_from_p (temp, singleton, 1)
8379 ? temp : NULL_RTX), 0);
8380
8381 if (result)
8382 {
8383 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8384 return expand_binop (mode, boptab, op1, result, temp,
8385 unsignedp, OPTAB_LIB_WIDEN);
8386 }
8387 }
8388
8389 do_pending_stack_adjust ();
8390 NO_DEFER_POP;
8391 op0 = gen_label_rtx ();
8392
8393 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8394 {
8395 if (temp != 0)
8396 {
8397 /* If the target conflicts with the other operand of the
8398 binary op, we can't use it. Also, we can't use the target
8399 if it is a hard register, because evaluating the condition
8400 might clobber it. */
8401 if ((binary_op
8402 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8403 || (GET_CODE (temp) == REG
8404 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8405 temp = gen_reg_rtx (mode);
8406 store_expr (singleton, temp,
8407 modifier == EXPAND_STACK_PARM ? 2 : 0);
8408 }
8409 else
8410 expand_expr (singleton,
8411 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8412 if (singleton == TREE_OPERAND (exp, 1))
8413 jumpif (TREE_OPERAND (exp, 0), op0);
8414 else
8415 jumpifnot (TREE_OPERAND (exp, 0), op0);
8416
8417 start_cleanup_deferral ();
8418 if (binary_op && temp == 0)
8419 /* Just touch the other operand. */
8420 expand_expr (TREE_OPERAND (binary_op, 1),
8421 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8422 else if (binary_op)
8423 store_expr (build (TREE_CODE (binary_op), type,
8424 make_tree (type, temp),
8425 TREE_OPERAND (binary_op, 1)),
8426 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8427 else
8428 store_expr (build1 (TREE_CODE (unary_op), type,
8429 make_tree (type, temp)),
8430 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8431 op1 = op0;
8432 }
8433 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8434 comparison operator. If we have one of these cases, set the
8435 output to A, branch on A (cse will merge these two references),
8436 then set the output to FOO. */
8437 else if (temp
8438 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8439 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8440 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8441 TREE_OPERAND (exp, 1), 0)
8442 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8443 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8444 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8445 {
8446 if (GET_CODE (temp) == REG
8447 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8448 temp = gen_reg_rtx (mode);
8449 store_expr (TREE_OPERAND (exp, 1), temp,
8450 modifier == EXPAND_STACK_PARM ? 2 : 0);
8451 jumpif (TREE_OPERAND (exp, 0), op0);
8452
8453 start_cleanup_deferral ();
8454 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8455 store_expr (TREE_OPERAND (exp, 2), temp,
8456 modifier == EXPAND_STACK_PARM ? 2 : 0);
8457 else
8458 expand_expr (TREE_OPERAND (exp, 2),
8459 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8460 op1 = op0;
8461 }
8462 else if (temp
8463 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8464 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8465 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8466 TREE_OPERAND (exp, 2), 0)
8467 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8468 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8469 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8470 {
8471 if (GET_CODE (temp) == REG
8472 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8473 temp = gen_reg_rtx (mode);
8474 store_expr (TREE_OPERAND (exp, 2), temp,
8475 modifier == EXPAND_STACK_PARM ? 2 : 0);
8476 jumpifnot (TREE_OPERAND (exp, 0), op0);
8477
8478 start_cleanup_deferral ();
8479 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8480 store_expr (TREE_OPERAND (exp, 1), temp,
8481 modifier == EXPAND_STACK_PARM ? 2 : 0);
8482 else
8483 expand_expr (TREE_OPERAND (exp, 1),
8484 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8485 op1 = op0;
8486 }
8487 else
8488 {
8489 op1 = gen_label_rtx ();
8490 jumpifnot (TREE_OPERAND (exp, 0), op0);
8491
8492 start_cleanup_deferral ();
8493
8494 /* One branch of the cond can be void, if it never returns. For
8495 example A ? throw : E */
8496 if (temp != 0
8497 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8498 store_expr (TREE_OPERAND (exp, 1), temp,
8499 modifier == EXPAND_STACK_PARM ? 2 : 0);
8500 else
8501 expand_expr (TREE_OPERAND (exp, 1),
8502 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8503 end_cleanup_deferral ();
8504 emit_queue ();
8505 emit_jump_insn (gen_jump (op1));
8506 emit_barrier ();
8507 emit_label (op0);
8508 start_cleanup_deferral ();
8509 if (temp != 0
8510 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8511 store_expr (TREE_OPERAND (exp, 2), temp,
8512 modifier == EXPAND_STACK_PARM ? 2 : 0);
8513 else
8514 expand_expr (TREE_OPERAND (exp, 2),
8515 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8516 }
8517
8518 end_cleanup_deferral ();
8519
8520 emit_queue ();
8521 emit_label (op1);
8522 OK_DEFER_POP;
8523
8524 return temp;
8525 }
8526
8527 case TARGET_EXPR:
8528 {
8529 /* Something needs to be initialized, but we didn't know
8530 where that thing was when building the tree. For example,
8531 it could be the return value of a function, or a parameter
8532 to a function which lays down in the stack, or a temporary
8533 variable which must be passed by reference.
8534
8535 We guarantee that the expression will either be constructed
8536 or copied into our original target. */
8537
8538 tree slot = TREE_OPERAND (exp, 0);
8539 tree cleanups = NULL_TREE;
8540 tree exp1;
8541
8542 if (TREE_CODE (slot) != VAR_DECL)
8543 abort ();
8544
8545 if (! ignore)
8546 target = original_target;
8547
8548 /* Set this here so that if we get a target that refers to a
8549 register variable that's already been used, put_reg_into_stack
8550 knows that it should fix up those uses. */
8551 TREE_USED (slot) = 1;
8552
8553 if (target == 0)
8554 {
8555 if (DECL_RTL_SET_P (slot))
8556 {
8557 target = DECL_RTL (slot);
8558 /* If we have already expanded the slot, so don't do
8559 it again. (mrs) */
8560 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8561 return target;
8562 }
8563 else
8564 {
8565 target = assign_temp (type, 2, 0, 1);
8566 /* All temp slots at this level must not conflict. */
8567 preserve_temp_slots (target);
8568 SET_DECL_RTL (slot, target);
8569 if (TREE_ADDRESSABLE (slot))
8570 put_var_into_stack (slot, /*rescan=*/false);
8571
8572 /* Since SLOT is not known to the called function
8573 to belong to its stack frame, we must build an explicit
8574 cleanup. This case occurs when we must build up a reference
8575 to pass the reference as an argument. In this case,
8576 it is very likely that such a reference need not be
8577 built here. */
8578
8579 if (TREE_OPERAND (exp, 2) == 0)
8580 TREE_OPERAND (exp, 2)
8581 = (*lang_hooks.maybe_build_cleanup) (slot);
8582 cleanups = TREE_OPERAND (exp, 2);
8583 }
8584 }
8585 else
8586 {
8587 /* This case does occur, when expanding a parameter which
8588 needs to be constructed on the stack. The target
8589 is the actual stack address that we want to initialize.
8590 The function we call will perform the cleanup in this case. */
8591
8592 /* If we have already assigned it space, use that space,
8593 not target that we were passed in, as our target
8594 parameter is only a hint. */
8595 if (DECL_RTL_SET_P (slot))
8596 {
8597 target = DECL_RTL (slot);
8598 /* If we have already expanded the slot, so don't do
8599 it again. (mrs) */
8600 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8601 return target;
8602 }
8603 else
8604 {
8605 SET_DECL_RTL (slot, target);
8606 /* If we must have an addressable slot, then make sure that
8607 the RTL that we just stored in slot is OK. */
8608 if (TREE_ADDRESSABLE (slot))
8609 put_var_into_stack (slot, /*rescan=*/true);
8610 }
8611 }
8612
8613 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8614 /* Mark it as expanded. */
8615 TREE_OPERAND (exp, 1) = NULL_TREE;
8616
8617 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
8618
8619 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8620
8621 return target;
8622 }
8623
8624 case INIT_EXPR:
8625 {
8626 tree lhs = TREE_OPERAND (exp, 0);
8627 tree rhs = TREE_OPERAND (exp, 1);
8628
8629 temp = expand_assignment (lhs, rhs, ! ignore);
8630 return temp;
8631 }
8632
8633 case MODIFY_EXPR:
8634 {
8635 /* If lhs is complex, expand calls in rhs before computing it.
8636 That's so we don't compute a pointer and save it over a
8637 call. If lhs is simple, compute it first so we can give it
8638 as a target if the rhs is just a call. This avoids an
8639 extra temp and copy and that prevents a partial-subsumption
8640 which makes bad code. Actually we could treat
8641 component_ref's of vars like vars. */
8642
8643 tree lhs = TREE_OPERAND (exp, 0);
8644 tree rhs = TREE_OPERAND (exp, 1);
8645
8646 temp = 0;
8647
8648 /* Check for |= or &= of a bitfield of size one into another bitfield
8649 of size 1. In this case, (unless we need the result of the
8650 assignment) we can do this more efficiently with a
8651 test followed by an assignment, if necessary.
8652
8653 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8654 things change so we do, this code should be enhanced to
8655 support it. */
8656 if (ignore
8657 && TREE_CODE (lhs) == COMPONENT_REF
8658 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8659 || TREE_CODE (rhs) == BIT_AND_EXPR)
8660 && TREE_OPERAND (rhs, 0) == lhs
8661 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8662 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8663 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8664 {
8665 rtx label = gen_label_rtx ();
8666
8667 do_jump (TREE_OPERAND (rhs, 1),
8668 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8669 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8670 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8671 (TREE_CODE (rhs) == BIT_IOR_EXPR
8672 ? integer_one_node
8673 : integer_zero_node)),
8674 0);
8675 do_pending_stack_adjust ();
8676 emit_label (label);
8677 return const0_rtx;
8678 }
8679
8680 temp = expand_assignment (lhs, rhs, ! ignore);
8681
8682 return temp;
8683 }
8684
8685 case RETURN_EXPR:
8686 if (!TREE_OPERAND (exp, 0))
8687 expand_null_return ();
8688 else
8689 expand_return (TREE_OPERAND (exp, 0));
8690 return const0_rtx;
8691
8692 case PREINCREMENT_EXPR:
8693 case PREDECREMENT_EXPR:
8694 return expand_increment (exp, 0, ignore);
8695
8696 case POSTINCREMENT_EXPR:
8697 case POSTDECREMENT_EXPR:
8698 /* Faster to treat as pre-increment if result is not used. */
8699 return expand_increment (exp, ! ignore, ignore);
8700
8701 case ADDR_EXPR:
8702 if (modifier == EXPAND_STACK_PARM)
8703 target = 0;
8704 /* Are we taking the address of a nested function? */
8705 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8706 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8707 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8708 && ! TREE_STATIC (exp))
8709 {
8710 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8711 op0 = force_operand (op0, target);
8712 }
8713 /* If we are taking the address of something erroneous, just
8714 return a zero. */
8715 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8716 return const0_rtx;
8717 /* If we are taking the address of a constant and are at the
8718 top level, we have to use output_constant_def since we can't
8719 call force_const_mem at top level. */
8720 else if (cfun == 0
8721 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8722 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8723 == 'c')))
8724 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8725 else
8726 {
8727 /* We make sure to pass const0_rtx down if we came in with
8728 ignore set, to avoid doing the cleanups twice for something. */
8729 op0 = expand_expr (TREE_OPERAND (exp, 0),
8730 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8731 (modifier == EXPAND_INITIALIZER
8732 ? modifier : EXPAND_CONST_ADDRESS));
8733
8734 /* If we are going to ignore the result, OP0 will have been set
8735 to const0_rtx, so just return it. Don't get confused and
8736 think we are taking the address of the constant. */
8737 if (ignore)
8738 return op0;
8739
8740 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8741 clever and returns a REG when given a MEM. */
8742 op0 = protect_from_queue (op0, 1);
8743
8744 /* We would like the object in memory. If it is a constant, we can
8745 have it be statically allocated into memory. For a non-constant,
8746 we need to allocate some memory and store the value into it. */
8747
8748 if (CONSTANT_P (op0))
8749 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8750 op0);
8751 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8752 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8753 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
8754 {
8755 /* If the operand is a SAVE_EXPR, we can deal with this by
8756 forcing the SAVE_EXPR into memory. */
8757 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8758 {
8759 put_var_into_stack (TREE_OPERAND (exp, 0),
8760 /*rescan=*/true);
8761 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8762 }
8763 else
8764 {
8765 /* If this object is in a register, it can't be BLKmode. */
8766 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8767 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8768
8769 if (GET_CODE (op0) == PARALLEL)
8770 /* Handle calls that pass values in multiple
8771 non-contiguous locations. The Irix 6 ABI has examples
8772 of this. */
8773 emit_group_store (memloc, op0, inner_type,
8774 int_size_in_bytes (inner_type));
8775 else
8776 emit_move_insn (memloc, op0);
8777
8778 op0 = memloc;
8779 }
8780 }
8781
8782 if (GET_CODE (op0) != MEM)
8783 abort ();
8784
8785 mark_temp_addr_taken (op0);
8786 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8787 {
8788 op0 = XEXP (op0, 0);
8789 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8790 op0 = convert_memory_address (ptr_mode, op0);
8791 return op0;
8792 }
8793
8794 /* If OP0 is not aligned as least as much as the type requires, we
8795 need to make a temporary, copy OP0 to it, and take the address of
8796 the temporary. We want to use the alignment of the type, not of
8797 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8798 the test for BLKmode means that can't happen. The test for
8799 BLKmode is because we never make mis-aligned MEMs with
8800 non-BLKmode.
8801
8802 We don't need to do this at all if the machine doesn't have
8803 strict alignment. */
8804 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8805 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8806 > MEM_ALIGN (op0))
8807 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8808 {
8809 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8810 rtx new;
8811
8812 if (TYPE_ALIGN_OK (inner_type))
8813 abort ();
8814
8815 if (TREE_ADDRESSABLE (inner_type))
8816 {
8817 /* We can't make a bitwise copy of this object, so fail. */
8818 error ("cannot take the address of an unaligned member");
8819 return const0_rtx;
8820 }
8821
8822 new = assign_stack_temp_for_type
8823 (TYPE_MODE (inner_type),
8824 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8825 : int_size_in_bytes (inner_type),
8826 1, build_qualified_type (inner_type,
8827 (TYPE_QUALS (inner_type)
8828 | TYPE_QUAL_CONST)));
8829
8830 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8831 (modifier == EXPAND_STACK_PARM
8832 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8833
8834 op0 = new;
8835 }
8836
8837 op0 = force_operand (XEXP (op0, 0), target);
8838 }
8839
8840 if (flag_force_addr
8841 && GET_CODE (op0) != REG
8842 && modifier != EXPAND_CONST_ADDRESS
8843 && modifier != EXPAND_INITIALIZER
8844 && modifier != EXPAND_SUM)
8845 op0 = force_reg (Pmode, op0);
8846
8847 if (GET_CODE (op0) == REG
8848 && ! REG_USERVAR_P (op0))
8849 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8850
8851 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8852 op0 = convert_memory_address (ptr_mode, op0);
8853
8854 return op0;
8855
8856 case ENTRY_VALUE_EXPR:
8857 abort ();
8858
8859 /* COMPLEX type for Extended Pascal & Fortran */
8860 case COMPLEX_EXPR:
8861 {
8862 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8863 rtx insns;
8864
8865 /* Get the rtx code of the operands. */
8866 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8867 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8868
8869 if (! target)
8870 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8871
8872 start_sequence ();
8873
8874 /* Move the real (op0) and imaginary (op1) parts to their location. */
8875 emit_move_insn (gen_realpart (mode, target), op0);
8876 emit_move_insn (gen_imagpart (mode, target), op1);
8877
8878 insns = get_insns ();
8879 end_sequence ();
8880
8881 /* Complex construction should appear as a single unit. */
8882 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8883 each with a separate pseudo as destination.
8884 It's not correct for flow to treat them as a unit. */
8885 if (GET_CODE (target) != CONCAT)
8886 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8887 else
8888 emit_insn (insns);
8889
8890 return target;
8891 }
8892
8893 case REALPART_EXPR:
8894 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8895 return gen_realpart (mode, op0);
8896
8897 case IMAGPART_EXPR:
8898 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8899 return gen_imagpart (mode, op0);
8900
8901 case CONJ_EXPR:
8902 {
8903 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8904 rtx imag_t;
8905 rtx insns;
8906
8907 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8908
8909 if (! target)
8910 target = gen_reg_rtx (mode);
8911
8912 start_sequence ();
8913
8914 /* Store the realpart and the negated imagpart to target. */
8915 emit_move_insn (gen_realpart (partmode, target),
8916 gen_realpart (partmode, op0));
8917
8918 imag_t = gen_imagpart (partmode, target);
8919 temp = expand_unop (partmode,
8920 ! unsignedp && flag_trapv
8921 && (GET_MODE_CLASS(partmode) == MODE_INT)
8922 ? negv_optab : neg_optab,
8923 gen_imagpart (partmode, op0), imag_t, 0);
8924 if (temp != imag_t)
8925 emit_move_insn (imag_t, temp);
8926
8927 insns = get_insns ();
8928 end_sequence ();
8929
8930 /* Conjugate should appear as a single unit
8931 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8932 each with a separate pseudo as destination.
8933 It's not correct for flow to treat them as a unit. */
8934 if (GET_CODE (target) != CONCAT)
8935 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8936 else
8937 emit_insn (insns);
8938
8939 return target;
8940 }
8941
8942 case TRY_CATCH_EXPR:
8943 {
8944 tree handler = TREE_OPERAND (exp, 1);
8945
8946 expand_eh_region_start ();
8947
8948 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8949
8950 expand_eh_region_end_cleanup (handler);
8951
8952 return op0;
8953 }
8954
8955 case TRY_FINALLY_EXPR:
8956 {
8957 tree try_block = TREE_OPERAND (exp, 0);
8958 tree finally_block = TREE_OPERAND (exp, 1);
8959
8960 if (!optimize || unsafe_for_reeval (finally_block) > 1)
8961 {
8962 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
8963 is not sufficient, so we cannot expand the block twice.
8964 So we play games with GOTO_SUBROUTINE_EXPR to let us
8965 expand the thing only once. */
8966 /* When not optimizing, we go ahead with this form since
8967 (1) user breakpoints operate more predictably without
8968 code duplication, and
8969 (2) we're not running any of the global optimizers
8970 that would explode in time/space with the highly
8971 connected CFG created by the indirect branching. */
8972
8973 rtx finally_label = gen_label_rtx ();
8974 rtx done_label = gen_label_rtx ();
8975 rtx return_link = gen_reg_rtx (Pmode);
8976 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8977 (tree) finally_label, (tree) return_link);
8978 TREE_SIDE_EFFECTS (cleanup) = 1;
8979
8980 /* Start a new binding layer that will keep track of all cleanup
8981 actions to be performed. */
8982 expand_start_bindings (2);
8983 target_temp_slot_level = temp_slot_level;
8984
8985 expand_decl_cleanup (NULL_TREE, cleanup);
8986 op0 = expand_expr (try_block, target, tmode, modifier);
8987
8988 preserve_temp_slots (op0);
8989 expand_end_bindings (NULL_TREE, 0, 0);
8990 emit_jump (done_label);
8991 emit_label (finally_label);
8992 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8993 emit_indirect_jump (return_link);
8994 emit_label (done_label);
8995 }
8996 else
8997 {
8998 expand_start_bindings (2);
8999 target_temp_slot_level = temp_slot_level;
9000
9001 expand_decl_cleanup (NULL_TREE, finally_block);
9002 op0 = expand_expr (try_block, target, tmode, modifier);
9003
9004 preserve_temp_slots (op0);
9005 expand_end_bindings (NULL_TREE, 0, 0);
9006 }
9007
9008 return op0;
9009 }
9010
9011 case GOTO_SUBROUTINE_EXPR:
9012 {
9013 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9014 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9015 rtx return_address = gen_label_rtx ();
9016 emit_move_insn (return_link,
9017 gen_rtx_LABEL_REF (Pmode, return_address));
9018 emit_jump (subr);
9019 emit_label (return_address);
9020 return const0_rtx;
9021 }
9022
9023 case VA_ARG_EXPR:
9024 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9025
9026 case EXC_PTR_EXPR:
9027 return get_exception_pointer (cfun);
9028
9029 case FDESC_EXPR:
9030 /* Function descriptors are not valid except for as
9031 initialization constants, and should not be expanded. */
9032 abort ();
9033
9034 default:
9035 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier,
9036 alt_rtl);
9037 }
9038
9039 /* Here to do an ordinary binary operator, generating an instruction
9040 from the optab already placed in `this_optab'. */
9041 binop:
9042 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9043 subtarget, &op0, &op1, 0);
9044 binop2:
9045 if (modifier == EXPAND_STACK_PARM)
9046 target = 0;
9047 temp = expand_binop (mode, this_optab, op0, op1, target,
9048 unsignedp, OPTAB_LIB_WIDEN);
9049 if (temp == 0)
9050 abort ();
9051 return temp;
9052 }
9053 \f
9054 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9055 when applied to the address of EXP produces an address known to be
9056 aligned more than BIGGEST_ALIGNMENT. */
9057
9058 static int
9059 is_aligning_offset (tree offset, tree exp)
9060 {
9061 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9062 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9063 || TREE_CODE (offset) == NOP_EXPR
9064 || TREE_CODE (offset) == CONVERT_EXPR
9065 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9066 offset = TREE_OPERAND (offset, 0);
9067
9068 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9069 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9070 if (TREE_CODE (offset) != BIT_AND_EXPR
9071 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9072 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9073 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9074 return 0;
9075
9076 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9077 It must be NEGATE_EXPR. Then strip any more conversions. */
9078 offset = TREE_OPERAND (offset, 0);
9079 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9080 || TREE_CODE (offset) == NOP_EXPR
9081 || TREE_CODE (offset) == CONVERT_EXPR)
9082 offset = TREE_OPERAND (offset, 0);
9083
9084 if (TREE_CODE (offset) != NEGATE_EXPR)
9085 return 0;
9086
9087 offset = TREE_OPERAND (offset, 0);
9088 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9089 || TREE_CODE (offset) == NOP_EXPR
9090 || TREE_CODE (offset) == CONVERT_EXPR)
9091 offset = TREE_OPERAND (offset, 0);
9092
9093 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9094 whose type is the same as EXP. */
9095 return (TREE_CODE (offset) == ADDR_EXPR
9096 && (TREE_OPERAND (offset, 0) == exp
9097 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9098 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9099 == TREE_TYPE (exp)))));
9100 }
9101 \f
9102 /* Return the tree node if an ARG corresponds to a string constant or zero
9103 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9104 in bytes within the string that ARG is accessing. The type of the
9105 offset will be `sizetype'. */
9106
9107 tree
9108 string_constant (tree arg, tree *ptr_offset)
9109 {
9110 STRIP_NOPS (arg);
9111
9112 if (TREE_CODE (arg) == ADDR_EXPR
9113 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9114 {
9115 *ptr_offset = size_zero_node;
9116 return TREE_OPERAND (arg, 0);
9117 }
9118 else if (TREE_CODE (arg) == PLUS_EXPR)
9119 {
9120 tree arg0 = TREE_OPERAND (arg, 0);
9121 tree arg1 = TREE_OPERAND (arg, 1);
9122
9123 STRIP_NOPS (arg0);
9124 STRIP_NOPS (arg1);
9125
9126 if (TREE_CODE (arg0) == ADDR_EXPR
9127 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9128 {
9129 *ptr_offset = convert (sizetype, arg1);
9130 return TREE_OPERAND (arg0, 0);
9131 }
9132 else if (TREE_CODE (arg1) == ADDR_EXPR
9133 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9134 {
9135 *ptr_offset = convert (sizetype, arg0);
9136 return TREE_OPERAND (arg1, 0);
9137 }
9138 }
9139
9140 return 0;
9141 }
9142 \f
9143 /* Expand code for a post- or pre- increment or decrement
9144 and return the RTX for the result.
9145 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9146
9147 static rtx
9148 expand_increment (tree exp, int post, int ignore)
9149 {
9150 rtx op0, op1;
9151 rtx temp, value;
9152 tree incremented = TREE_OPERAND (exp, 0);
9153 optab this_optab = add_optab;
9154 int icode;
9155 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9156 int op0_is_copy = 0;
9157 int single_insn = 0;
9158 /* 1 means we can't store into OP0 directly,
9159 because it is a subreg narrower than a word,
9160 and we don't dare clobber the rest of the word. */
9161 int bad_subreg = 0;
9162
9163 /* Stabilize any component ref that might need to be
9164 evaluated more than once below. */
9165 if (!post
9166 || TREE_CODE (incremented) == BIT_FIELD_REF
9167 || (TREE_CODE (incremented) == COMPONENT_REF
9168 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9169 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9170 incremented = stabilize_reference (incremented);
9171 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9172 ones into save exprs so that they don't accidentally get evaluated
9173 more than once by the code below. */
9174 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9175 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9176 incremented = save_expr (incremented);
9177
9178 /* Compute the operands as RTX.
9179 Note whether OP0 is the actual lvalue or a copy of it:
9180 I believe it is a copy iff it is a register or subreg
9181 and insns were generated in computing it. */
9182
9183 temp = get_last_insn ();
9184 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9185
9186 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9187 in place but instead must do sign- or zero-extension during assignment,
9188 so we copy it into a new register and let the code below use it as
9189 a copy.
9190
9191 Note that we can safely modify this SUBREG since it is know not to be
9192 shared (it was made by the expand_expr call above). */
9193
9194 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9195 {
9196 if (post)
9197 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9198 else
9199 bad_subreg = 1;
9200 }
9201 else if (GET_CODE (op0) == SUBREG
9202 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9203 {
9204 /* We cannot increment this SUBREG in place. If we are
9205 post-incrementing, get a copy of the old value. Otherwise,
9206 just mark that we cannot increment in place. */
9207 if (post)
9208 op0 = copy_to_reg (op0);
9209 else
9210 bad_subreg = 1;
9211 }
9212
9213 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9214 && temp != get_last_insn ());
9215 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9216
9217 /* Decide whether incrementing or decrementing. */
9218 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9219 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9220 this_optab = sub_optab;
9221
9222 /* Convert decrement by a constant into a negative increment. */
9223 if (this_optab == sub_optab
9224 && GET_CODE (op1) == CONST_INT)
9225 {
9226 op1 = GEN_INT (-INTVAL (op1));
9227 this_optab = add_optab;
9228 }
9229
9230 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9231 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9232
9233 /* For a preincrement, see if we can do this with a single instruction. */
9234 if (!post)
9235 {
9236 icode = (int) this_optab->handlers[(int) mode].insn_code;
9237 if (icode != (int) CODE_FOR_nothing
9238 /* Make sure that OP0 is valid for operands 0 and 1
9239 of the insn we want to queue. */
9240 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9241 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9242 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9243 single_insn = 1;
9244 }
9245
9246 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9247 then we cannot just increment OP0. We must therefore contrive to
9248 increment the original value. Then, for postincrement, we can return
9249 OP0 since it is a copy of the old value. For preincrement, expand here
9250 unless we can do it with a single insn.
9251
9252 Likewise if storing directly into OP0 would clobber high bits
9253 we need to preserve (bad_subreg). */
9254 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9255 {
9256 /* This is the easiest way to increment the value wherever it is.
9257 Problems with multiple evaluation of INCREMENTED are prevented
9258 because either (1) it is a component_ref or preincrement,
9259 in which case it was stabilized above, or (2) it is an array_ref
9260 with constant index in an array in a register, which is
9261 safe to reevaluate. */
9262 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9263 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9264 ? MINUS_EXPR : PLUS_EXPR),
9265 TREE_TYPE (exp),
9266 incremented,
9267 TREE_OPERAND (exp, 1));
9268
9269 while (TREE_CODE (incremented) == NOP_EXPR
9270 || TREE_CODE (incremented) == CONVERT_EXPR)
9271 {
9272 newexp = convert (TREE_TYPE (incremented), newexp);
9273 incremented = TREE_OPERAND (incremented, 0);
9274 }
9275
9276 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9277 return post ? op0 : temp;
9278 }
9279
9280 if (post)
9281 {
9282 /* We have a true reference to the value in OP0.
9283 If there is an insn to add or subtract in this mode, queue it.
9284 Queuing the increment insn avoids the register shuffling
9285 that often results if we must increment now and first save
9286 the old value for subsequent use. */
9287
9288 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9289 op0 = stabilize (op0);
9290 #endif
9291
9292 icode = (int) this_optab->handlers[(int) mode].insn_code;
9293 if (icode != (int) CODE_FOR_nothing
9294 /* Make sure that OP0 is valid for operands 0 and 1
9295 of the insn we want to queue. */
9296 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9297 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9298 {
9299 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9300 op1 = force_reg (mode, op1);
9301
9302 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9303 }
9304 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9305 {
9306 rtx addr = (general_operand (XEXP (op0, 0), mode)
9307 ? force_reg (Pmode, XEXP (op0, 0))
9308 : copy_to_reg (XEXP (op0, 0)));
9309 rtx temp, result;
9310
9311 op0 = replace_equiv_address (op0, addr);
9312 temp = force_reg (GET_MODE (op0), op0);
9313 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9314 op1 = force_reg (mode, op1);
9315
9316 /* The increment queue is LIFO, thus we have to `queue'
9317 the instructions in reverse order. */
9318 enqueue_insn (op0, gen_move_insn (op0, temp));
9319 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9320 return result;
9321 }
9322 }
9323
9324 /* Preincrement, or we can't increment with one simple insn. */
9325 if (post)
9326 /* Save a copy of the value before inc or dec, to return it later. */
9327 temp = value = copy_to_reg (op0);
9328 else
9329 /* Arrange to return the incremented value. */
9330 /* Copy the rtx because expand_binop will protect from the queue,
9331 and the results of that would be invalid for us to return
9332 if our caller does emit_queue before using our result. */
9333 temp = copy_rtx (value = op0);
9334
9335 /* Increment however we can. */
9336 op1 = expand_binop (mode, this_optab, value, op1, op0,
9337 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9338
9339 /* Make sure the value is stored into OP0. */
9340 if (op1 != op0)
9341 emit_move_insn (op0, op1);
9342
9343 return temp;
9344 }
9345 \f
9346 /* Generate code to calculate EXP using a store-flag instruction
9347 and return an rtx for the result. EXP is either a comparison
9348 or a TRUTH_NOT_EXPR whose operand is a comparison.
9349
9350 If TARGET is nonzero, store the result there if convenient.
9351
9352 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9353 cheap.
9354
9355 Return zero if there is no suitable set-flag instruction
9356 available on this machine.
9357
9358 Once expand_expr has been called on the arguments of the comparison,
9359 we are committed to doing the store flag, since it is not safe to
9360 re-evaluate the expression. We emit the store-flag insn by calling
9361 emit_store_flag, but only expand the arguments if we have a reason
9362 to believe that emit_store_flag will be successful. If we think that
9363 it will, but it isn't, we have to simulate the store-flag with a
9364 set/jump/set sequence. */
9365
9366 static rtx
9367 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9368 {
9369 enum rtx_code code;
9370 tree arg0, arg1, type;
9371 tree tem;
9372 enum machine_mode operand_mode;
9373 int invert = 0;
9374 int unsignedp;
9375 rtx op0, op1;
9376 enum insn_code icode;
9377 rtx subtarget = target;
9378 rtx result, label;
9379
9380 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9381 result at the end. We can't simply invert the test since it would
9382 have already been inverted if it were valid. This case occurs for
9383 some floating-point comparisons. */
9384
9385 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9386 invert = 1, exp = TREE_OPERAND (exp, 0);
9387
9388 arg0 = TREE_OPERAND (exp, 0);
9389 arg1 = TREE_OPERAND (exp, 1);
9390
9391 /* Don't crash if the comparison was erroneous. */
9392 if (arg0 == error_mark_node || arg1 == error_mark_node)
9393 return const0_rtx;
9394
9395 type = TREE_TYPE (arg0);
9396 operand_mode = TYPE_MODE (type);
9397 unsignedp = TREE_UNSIGNED (type);
9398
9399 /* We won't bother with BLKmode store-flag operations because it would mean
9400 passing a lot of information to emit_store_flag. */
9401 if (operand_mode == BLKmode)
9402 return 0;
9403
9404 /* We won't bother with store-flag operations involving function pointers
9405 when function pointers must be canonicalized before comparisons. */
9406 #ifdef HAVE_canonicalize_funcptr_for_compare
9407 if (HAVE_canonicalize_funcptr_for_compare
9408 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9409 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9410 == FUNCTION_TYPE))
9411 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9412 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9413 == FUNCTION_TYPE))))
9414 return 0;
9415 #endif
9416
9417 STRIP_NOPS (arg0);
9418 STRIP_NOPS (arg1);
9419
9420 /* Get the rtx comparison code to use. We know that EXP is a comparison
9421 operation of some type. Some comparisons against 1 and -1 can be
9422 converted to comparisons with zero. Do so here so that the tests
9423 below will be aware that we have a comparison with zero. These
9424 tests will not catch constants in the first operand, but constants
9425 are rarely passed as the first operand. */
9426
9427 switch (TREE_CODE (exp))
9428 {
9429 case EQ_EXPR:
9430 code = EQ;
9431 break;
9432 case NE_EXPR:
9433 code = NE;
9434 break;
9435 case LT_EXPR:
9436 if (integer_onep (arg1))
9437 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9438 else
9439 code = unsignedp ? LTU : LT;
9440 break;
9441 case LE_EXPR:
9442 if (! unsignedp && integer_all_onesp (arg1))
9443 arg1 = integer_zero_node, code = LT;
9444 else
9445 code = unsignedp ? LEU : LE;
9446 break;
9447 case GT_EXPR:
9448 if (! unsignedp && integer_all_onesp (arg1))
9449 arg1 = integer_zero_node, code = GE;
9450 else
9451 code = unsignedp ? GTU : GT;
9452 break;
9453 case GE_EXPR:
9454 if (integer_onep (arg1))
9455 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9456 else
9457 code = unsignedp ? GEU : GE;
9458 break;
9459
9460 case UNORDERED_EXPR:
9461 code = UNORDERED;
9462 break;
9463 case ORDERED_EXPR:
9464 code = ORDERED;
9465 break;
9466 case UNLT_EXPR:
9467 code = UNLT;
9468 break;
9469 case UNLE_EXPR:
9470 code = UNLE;
9471 break;
9472 case UNGT_EXPR:
9473 code = UNGT;
9474 break;
9475 case UNGE_EXPR:
9476 code = UNGE;
9477 break;
9478 case UNEQ_EXPR:
9479 code = UNEQ;
9480 break;
9481
9482 default:
9483 abort ();
9484 }
9485
9486 /* Put a constant second. */
9487 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9488 {
9489 tem = arg0; arg0 = arg1; arg1 = tem;
9490 code = swap_condition (code);
9491 }
9492
9493 /* If this is an equality or inequality test of a single bit, we can
9494 do this by shifting the bit being tested to the low-order bit and
9495 masking the result with the constant 1. If the condition was EQ,
9496 we xor it with 1. This does not require an scc insn and is faster
9497 than an scc insn even if we have it.
9498
9499 The code to make this transformation was moved into fold_single_bit_test,
9500 so we just call into the folder and expand its result. */
9501
9502 if ((code == NE || code == EQ)
9503 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9504 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9505 {
9506 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
9507 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9508 arg0, arg1, type),
9509 target, VOIDmode, EXPAND_NORMAL);
9510 }
9511
9512 /* Now see if we are likely to be able to do this. Return if not. */
9513 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9514 return 0;
9515
9516 icode = setcc_gen_code[(int) code];
9517 if (icode == CODE_FOR_nothing
9518 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9519 {
9520 /* We can only do this if it is one of the special cases that
9521 can be handled without an scc insn. */
9522 if ((code == LT && integer_zerop (arg1))
9523 || (! only_cheap && code == GE && integer_zerop (arg1)))
9524 ;
9525 else if (BRANCH_COST >= 0
9526 && ! only_cheap && (code == NE || code == EQ)
9527 && TREE_CODE (type) != REAL_TYPE
9528 && ((abs_optab->handlers[(int) operand_mode].insn_code
9529 != CODE_FOR_nothing)
9530 || (ffs_optab->handlers[(int) operand_mode].insn_code
9531 != CODE_FOR_nothing)))
9532 ;
9533 else
9534 return 0;
9535 }
9536
9537 if (! get_subtarget (target)
9538 || GET_MODE (subtarget) != operand_mode)
9539 subtarget = 0;
9540
9541 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9542
9543 if (target == 0)
9544 target = gen_reg_rtx (mode);
9545
9546 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9547 because, if the emit_store_flag does anything it will succeed and
9548 OP0 and OP1 will not be used subsequently. */
9549
9550 result = emit_store_flag (target, code,
9551 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9552 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9553 operand_mode, unsignedp, 1);
9554
9555 if (result)
9556 {
9557 if (invert)
9558 result = expand_binop (mode, xor_optab, result, const1_rtx,
9559 result, 0, OPTAB_LIB_WIDEN);
9560 return result;
9561 }
9562
9563 /* If this failed, we have to do this with set/compare/jump/set code. */
9564 if (GET_CODE (target) != REG
9565 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9566 target = gen_reg_rtx (GET_MODE (target));
9567
9568 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9569 result = compare_from_rtx (op0, op1, code, unsignedp,
9570 operand_mode, NULL_RTX);
9571 if (GET_CODE (result) == CONST_INT)
9572 return (((result == const0_rtx && ! invert)
9573 || (result != const0_rtx && invert))
9574 ? const0_rtx : const1_rtx);
9575
9576 /* The code of RESULT may not match CODE if compare_from_rtx
9577 decided to swap its operands and reverse the original code.
9578
9579 We know that compare_from_rtx returns either a CONST_INT or
9580 a new comparison code, so it is safe to just extract the
9581 code from RESULT. */
9582 code = GET_CODE (result);
9583
9584 label = gen_label_rtx ();
9585 if (bcc_gen_fctn[(int) code] == 0)
9586 abort ();
9587
9588 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9589 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9590 emit_label (label);
9591
9592 return target;
9593 }
9594 \f
9595
9596 /* Stubs in case we haven't got a casesi insn. */
9597 #ifndef HAVE_casesi
9598 # define HAVE_casesi 0
9599 # define gen_casesi(a, b, c, d, e) (0)
9600 # define CODE_FOR_casesi CODE_FOR_nothing
9601 #endif
9602
9603 /* If the machine does not have a case insn that compares the bounds,
9604 this means extra overhead for dispatch tables, which raises the
9605 threshold for using them. */
9606 #ifndef CASE_VALUES_THRESHOLD
9607 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9608 #endif /* CASE_VALUES_THRESHOLD */
9609
9610 unsigned int
9611 case_values_threshold (void)
9612 {
9613 return CASE_VALUES_THRESHOLD;
9614 }
9615
9616 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9617 0 otherwise (i.e. if there is no casesi instruction). */
9618 int
9619 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9620 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9621 {
9622 enum machine_mode index_mode = SImode;
9623 int index_bits = GET_MODE_BITSIZE (index_mode);
9624 rtx op1, op2, index;
9625 enum machine_mode op_mode;
9626
9627 if (! HAVE_casesi)
9628 return 0;
9629
9630 /* Convert the index to SImode. */
9631 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9632 {
9633 enum machine_mode omode = TYPE_MODE (index_type);
9634 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9635
9636 /* We must handle the endpoints in the original mode. */
9637 index_expr = build (MINUS_EXPR, index_type,
9638 index_expr, minval);
9639 minval = integer_zero_node;
9640 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9641 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9642 omode, 1, default_label);
9643 /* Now we can safely truncate. */
9644 index = convert_to_mode (index_mode, index, 0);
9645 }
9646 else
9647 {
9648 if (TYPE_MODE (index_type) != index_mode)
9649 {
9650 index_expr = convert ((*lang_hooks.types.type_for_size)
9651 (index_bits, 0), index_expr);
9652 index_type = TREE_TYPE (index_expr);
9653 }
9654
9655 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9656 }
9657 emit_queue ();
9658 index = protect_from_queue (index, 0);
9659 do_pending_stack_adjust ();
9660
9661 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9662 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9663 (index, op_mode))
9664 index = copy_to_mode_reg (op_mode, index);
9665
9666 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9667
9668 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9669 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9670 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
9671 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9672 (op1, op_mode))
9673 op1 = copy_to_mode_reg (op_mode, op1);
9674
9675 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9676
9677 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9678 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9679 op2, TREE_UNSIGNED (TREE_TYPE (range)));
9680 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9681 (op2, op_mode))
9682 op2 = copy_to_mode_reg (op_mode, op2);
9683
9684 emit_jump_insn (gen_casesi (index, op1, op2,
9685 table_label, default_label));
9686 return 1;
9687 }
9688
9689 /* Attempt to generate a tablejump instruction; same concept. */
9690 #ifndef HAVE_tablejump
9691 #define HAVE_tablejump 0
9692 #define gen_tablejump(x, y) (0)
9693 #endif
9694
9695 /* Subroutine of the next function.
9696
9697 INDEX is the value being switched on, with the lowest value
9698 in the table already subtracted.
9699 MODE is its expected mode (needed if INDEX is constant).
9700 RANGE is the length of the jump table.
9701 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9702
9703 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9704 index value is out of range. */
9705
9706 static void
9707 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9708 rtx default_label)
9709 {
9710 rtx temp, vector;
9711
9712 if (INTVAL (range) > cfun->max_jumptable_ents)
9713 cfun->max_jumptable_ents = INTVAL (range);
9714
9715 /* Do an unsigned comparison (in the proper mode) between the index
9716 expression and the value which represents the length of the range.
9717 Since we just finished subtracting the lower bound of the range
9718 from the index expression, this comparison allows us to simultaneously
9719 check that the original index expression value is both greater than
9720 or equal to the minimum value of the range and less than or equal to
9721 the maximum value of the range. */
9722
9723 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9724 default_label);
9725
9726 /* If index is in range, it must fit in Pmode.
9727 Convert to Pmode so we can index with it. */
9728 if (mode != Pmode)
9729 index = convert_to_mode (Pmode, index, 1);
9730
9731 /* Don't let a MEM slip through, because then INDEX that comes
9732 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9733 and break_out_memory_refs will go to work on it and mess it up. */
9734 #ifdef PIC_CASE_VECTOR_ADDRESS
9735 if (flag_pic && GET_CODE (index) != REG)
9736 index = copy_to_mode_reg (Pmode, index);
9737 #endif
9738
9739 /* If flag_force_addr were to affect this address
9740 it could interfere with the tricky assumptions made
9741 about addresses that contain label-refs,
9742 which may be valid only very near the tablejump itself. */
9743 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9744 GET_MODE_SIZE, because this indicates how large insns are. The other
9745 uses should all be Pmode, because they are addresses. This code
9746 could fail if addresses and insns are not the same size. */
9747 index = gen_rtx_PLUS (Pmode,
9748 gen_rtx_MULT (Pmode, index,
9749 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9750 gen_rtx_LABEL_REF (Pmode, table_label));
9751 #ifdef PIC_CASE_VECTOR_ADDRESS
9752 if (flag_pic)
9753 index = PIC_CASE_VECTOR_ADDRESS (index);
9754 else
9755 #endif
9756 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9757 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9758 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9759 RTX_UNCHANGING_P (vector) = 1;
9760 MEM_NOTRAP_P (vector) = 1;
9761 convert_move (temp, vector, 0);
9762
9763 emit_jump_insn (gen_tablejump (temp, table_label));
9764
9765 /* If we are generating PIC code or if the table is PC-relative, the
9766 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9767 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9768 emit_barrier ();
9769 }
9770
9771 int
9772 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9773 rtx table_label, rtx default_label)
9774 {
9775 rtx index;
9776
9777 if (! HAVE_tablejump)
9778 return 0;
9779
9780 index_expr = fold (build (MINUS_EXPR, index_type,
9781 convert (index_type, index_expr),
9782 convert (index_type, minval)));
9783 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9784 emit_queue ();
9785 index = protect_from_queue (index, 0);
9786 do_pending_stack_adjust ();
9787
9788 do_tablejump (index, TYPE_MODE (index_type),
9789 convert_modes (TYPE_MODE (index_type),
9790 TYPE_MODE (TREE_TYPE (range)),
9791 expand_expr (range, NULL_RTX,
9792 VOIDmode, 0),
9793 TREE_UNSIGNED (TREE_TYPE (range))),
9794 table_label, default_label);
9795 return 1;
9796 }
9797
9798 /* Nonzero if the mode is a valid vector mode for this architecture.
9799 This returns nonzero even if there is no hardware support for the
9800 vector mode, but we can emulate with narrower modes. */
9801
9802 int
9803 vector_mode_valid_p (enum machine_mode mode)
9804 {
9805 enum mode_class class = GET_MODE_CLASS (mode);
9806 enum machine_mode innermode;
9807
9808 /* Doh! What's going on? */
9809 if (class != MODE_VECTOR_INT
9810 && class != MODE_VECTOR_FLOAT)
9811 return 0;
9812
9813 /* Hardware support. Woo hoo! */
9814 if (VECTOR_MODE_SUPPORTED_P (mode))
9815 return 1;
9816
9817 innermode = GET_MODE_INNER (mode);
9818
9819 /* We should probably return 1 if requesting V4DI and we have no DI,
9820 but we have V2DI, but this is probably very unlikely. */
9821
9822 /* If we have support for the inner mode, we can safely emulate it.
9823 We may not have V2DI, but me can emulate with a pair of DIs. */
9824 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9825 }
9826
9827 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9828 static rtx
9829 const_vector_from_tree (tree exp)
9830 {
9831 rtvec v;
9832 int units, i;
9833 tree link, elt;
9834 enum machine_mode inner, mode;
9835
9836 mode = TYPE_MODE (TREE_TYPE (exp));
9837
9838 if (is_zeros_p (exp))
9839 return CONST0_RTX (mode);
9840
9841 units = GET_MODE_NUNITS (mode);
9842 inner = GET_MODE_INNER (mode);
9843
9844 v = rtvec_alloc (units);
9845
9846 link = TREE_VECTOR_CST_ELTS (exp);
9847 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9848 {
9849 elt = TREE_VALUE (link);
9850
9851 if (TREE_CODE (elt) == REAL_CST)
9852 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9853 inner);
9854 else
9855 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9856 TREE_INT_CST_HIGH (elt),
9857 inner);
9858 }
9859
9860 /* Initialize remaining elements to 0. */
9861 for (; i < units; ++i)
9862 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9863
9864 return gen_rtx_raw_CONST_VECTOR (mode, v);
9865 }
9866
9867 #include "gt-expr.h"