final.c (SEEN_BB, [...]): Define.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "target.h"
51
52 /* Decide whether a function's arguments should be processed
53 from first to last or from last to first.
54
55 They should if the stack and args grow in opposite directions, but
56 only if we have push insns. */
57
58 #ifdef PUSH_ROUNDING
59
60 #ifndef PUSH_ARGS_REVERSED
61 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
62 #define PUSH_ARGS_REVERSED /* If it's last to first. */
63 #endif
64 #endif
65
66 #endif
67
68 #ifndef STACK_PUSH_CODE
69 #ifdef STACK_GROWS_DOWNWARD
70 #define STACK_PUSH_CODE PRE_DEC
71 #else
72 #define STACK_PUSH_CODE PRE_INC
73 #endif
74 #endif
75
76 /* Assume that case vectors are not pc-relative. */
77 #ifndef CASE_VECTOR_PC_RELATIVE
78 #define CASE_VECTOR_PC_RELATIVE 0
79 #endif
80
81 /* Convert defined/undefined to boolean. */
82 #ifdef TARGET_MEM_FUNCTIONS
83 #undef TARGET_MEM_FUNCTIONS
84 #define TARGET_MEM_FUNCTIONS 1
85 #else
86 #define TARGET_MEM_FUNCTIONS 0
87 #endif
88
89
90 /* If this is nonzero, we do not bother generating VOLATILE
91 around volatile memory references, and we are willing to
92 output indirect addresses. If cse is to follow, we reject
93 indirect addresses so a useful potential cse is generated;
94 if it is used only once, instruction combination will produce
95 the same indirect address eventually. */
96 int cse_not_expected;
97
98 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
99 tree placeholder_list = 0;
100
101 /* This structure is used by move_by_pieces to describe the move to
102 be performed. */
103 struct move_by_pieces
104 {
105 rtx to;
106 rtx to_addr;
107 int autinc_to;
108 int explicit_inc_to;
109 rtx from;
110 rtx from_addr;
111 int autinc_from;
112 int explicit_inc_from;
113 unsigned HOST_WIDE_INT len;
114 HOST_WIDE_INT offset;
115 int reverse;
116 };
117
118 /* This structure is used by store_by_pieces to describe the clear to
119 be performed. */
120
121 struct store_by_pieces
122 {
123 rtx to;
124 rtx to_addr;
125 int autinc_to;
126 int explicit_inc_to;
127 unsigned HOST_WIDE_INT len;
128 HOST_WIDE_INT offset;
129 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
130 void *constfundata;
131 int reverse;
132 };
133
134 static rtx enqueue_insn (rtx, rtx);
135 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
136 unsigned int);
137 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
138 struct move_by_pieces *);
139 static bool block_move_libcall_safe_for_call_parm (void);
140 static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
141 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
142 static tree emit_block_move_libcall_fn (int);
143 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
144 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
145 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
146 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
147 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
148 struct store_by_pieces *);
149 static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
150 static rtx clear_storage_via_libcall (rtx, rtx);
151 static tree clear_storage_libcall_fn (int);
152 static rtx compress_float_constant (rtx, rtx);
153 static rtx get_subtarget (rtx);
154 static int is_zeros_p (tree);
155 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
156 HOST_WIDE_INT, enum machine_mode,
157 tree, tree, int, int);
158 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
159 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
160 tree, enum machine_mode, int, tree, int);
161 static rtx var_rtx (tree);
162
163 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
164 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type (tree, tree);
165
166 static int is_aligning_offset (tree, tree);
167 static rtx expand_increment (tree, int, int);
168 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
169 enum expand_modifier);
170 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
171 #ifdef PUSH_ROUNDING
172 static void emit_single_push_insn (enum machine_mode, rtx, tree);
173 #endif
174 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
175 static rtx const_vector_from_tree (tree);
176
177 /* Record for each mode whether we can move a register directly to or
178 from an object of that mode in memory. If we can't, we won't try
179 to use that mode directly when accessing a field of that mode. */
180
181 static char direct_load[NUM_MACHINE_MODES];
182 static char direct_store[NUM_MACHINE_MODES];
183
184 /* Record for each mode whether we can float-extend from memory. */
185
186 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
187
188 /* This macro is used to determine whether move_by_pieces should be called
189 to perform a structure copy. */
190 #ifndef MOVE_BY_PIECES_P
191 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
192 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
193 #endif
194
195 /* This macro is used to determine whether clear_by_pieces should be
196 called to clear storage. */
197 #ifndef CLEAR_BY_PIECES_P
198 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
199 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
200 #endif
201
202 /* This macro is used to determine whether store_by_pieces should be
203 called to "memset" storage with byte values other than zero, or
204 to "memcpy" storage when the source is a constant string. */
205 #ifndef STORE_BY_PIECES_P
206 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
207 #endif
208
209 /* This array records the insn_code of insns to perform block moves. */
210 enum insn_code movstr_optab[NUM_MACHINE_MODES];
211
212 /* This array records the insn_code of insns to perform block clears. */
213 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
214
215 /* These arrays record the insn_code of two different kinds of insns
216 to perform block compares. */
217 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
218 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
219
220 /* Stack of EXPR_WITH_FILE_LOCATION nested expressions. */
221 struct file_stack *expr_wfl_stack;
222
223 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
224
225 #ifndef SLOW_UNALIGNED_ACCESS
226 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
227 #endif
228 \f
229 /* This is run once per compilation to set up which modes can be used
230 directly in memory and to initialize the block move optab. */
231
232 void
233 init_expr_once (void)
234 {
235 rtx insn, pat;
236 enum machine_mode mode;
237 int num_clobbers;
238 rtx mem, mem1;
239 rtx reg;
240
241 /* Try indexing by frame ptr and try by stack ptr.
242 It is known that on the Convex the stack ptr isn't a valid index.
243 With luck, one or the other is valid on any machine. */
244 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
245 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
246
247 /* A scratch register we can modify in-place below to avoid
248 useless RTL allocations. */
249 reg = gen_rtx_REG (VOIDmode, -1);
250
251 insn = rtx_alloc (INSN);
252 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
253 PATTERN (insn) = pat;
254
255 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
256 mode = (enum machine_mode) ((int) mode + 1))
257 {
258 int regno;
259
260 direct_load[(int) mode] = direct_store[(int) mode] = 0;
261 PUT_MODE (mem, mode);
262 PUT_MODE (mem1, mode);
263 PUT_MODE (reg, mode);
264
265 /* See if there is some register that can be used in this mode and
266 directly loaded or stored from memory. */
267
268 if (mode != VOIDmode && mode != BLKmode)
269 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
270 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
271 regno++)
272 {
273 if (! HARD_REGNO_MODE_OK (regno, mode))
274 continue;
275
276 REGNO (reg) = regno;
277
278 SET_SRC (pat) = mem;
279 SET_DEST (pat) = reg;
280 if (recog (pat, insn, &num_clobbers) >= 0)
281 direct_load[(int) mode] = 1;
282
283 SET_SRC (pat) = mem1;
284 SET_DEST (pat) = reg;
285 if (recog (pat, insn, &num_clobbers) >= 0)
286 direct_load[(int) mode] = 1;
287
288 SET_SRC (pat) = reg;
289 SET_DEST (pat) = mem;
290 if (recog (pat, insn, &num_clobbers) >= 0)
291 direct_store[(int) mode] = 1;
292
293 SET_SRC (pat) = reg;
294 SET_DEST (pat) = mem1;
295 if (recog (pat, insn, &num_clobbers) >= 0)
296 direct_store[(int) mode] = 1;
297 }
298 }
299
300 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
301
302 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
303 mode = GET_MODE_WIDER_MODE (mode))
304 {
305 enum machine_mode srcmode;
306 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
307 srcmode = GET_MODE_WIDER_MODE (srcmode))
308 {
309 enum insn_code ic;
310
311 ic = can_extend_p (mode, srcmode, 0);
312 if (ic == CODE_FOR_nothing)
313 continue;
314
315 PUT_MODE (mem, srcmode);
316
317 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
318 float_extend_from_mem[mode][srcmode] = true;
319 }
320 }
321 }
322
323 /* This is run at the start of compiling a function. */
324
325 void
326 init_expr (void)
327 {
328 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
329 }
330
331 /* Small sanity check that the queue is empty at the end of a function. */
332
333 void
334 finish_expr_for_function (void)
335 {
336 if (pending_chain)
337 abort ();
338 }
339 \f
340 /* Manage the queue of increment instructions to be output
341 for POSTINCREMENT_EXPR expressions, etc. */
342
343 /* Queue up to increment (or change) VAR later. BODY says how:
344 BODY should be the same thing you would pass to emit_insn
345 to increment right away. It will go to emit_insn later on.
346
347 The value is a QUEUED expression to be used in place of VAR
348 where you want to guarantee the pre-incrementation value of VAR. */
349
350 static rtx
351 enqueue_insn (rtx var, rtx body)
352 {
353 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
354 body, pending_chain);
355 return pending_chain;
356 }
357
358 /* Use protect_from_queue to convert a QUEUED expression
359 into something that you can put immediately into an instruction.
360 If the queued incrementation has not happened yet,
361 protect_from_queue returns the variable itself.
362 If the incrementation has happened, protect_from_queue returns a temp
363 that contains a copy of the old value of the variable.
364
365 Any time an rtx which might possibly be a QUEUED is to be put
366 into an instruction, it must be passed through protect_from_queue first.
367 QUEUED expressions are not meaningful in instructions.
368
369 Do not pass a value through protect_from_queue and then hold
370 on to it for a while before putting it in an instruction!
371 If the queue is flushed in between, incorrect code will result. */
372
373 rtx
374 protect_from_queue (rtx x, int modify)
375 {
376 RTX_CODE code = GET_CODE (x);
377
378 #if 0 /* A QUEUED can hang around after the queue is forced out. */
379 /* Shortcut for most common case. */
380 if (pending_chain == 0)
381 return x;
382 #endif
383
384 if (code != QUEUED)
385 {
386 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
387 use of autoincrement. Make a copy of the contents of the memory
388 location rather than a copy of the address, but not if the value is
389 of mode BLKmode. Don't modify X in place since it might be
390 shared. */
391 if (code == MEM && GET_MODE (x) != BLKmode
392 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
393 {
394 rtx y = XEXP (x, 0);
395 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
396
397 if (QUEUED_INSN (y))
398 {
399 rtx temp = gen_reg_rtx (GET_MODE (x));
400
401 emit_insn_before (gen_move_insn (temp, new),
402 QUEUED_INSN (y));
403 return temp;
404 }
405
406 /* Copy the address into a pseudo, so that the returned value
407 remains correct across calls to emit_queue. */
408 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
409 }
410
411 /* Otherwise, recursively protect the subexpressions of all
412 the kinds of rtx's that can contain a QUEUED. */
413 if (code == MEM)
414 {
415 rtx tem = protect_from_queue (XEXP (x, 0), 0);
416 if (tem != XEXP (x, 0))
417 {
418 x = copy_rtx (x);
419 XEXP (x, 0) = tem;
420 }
421 }
422 else if (code == PLUS || code == MULT)
423 {
424 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
425 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
426 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
427 {
428 x = copy_rtx (x);
429 XEXP (x, 0) = new0;
430 XEXP (x, 1) = new1;
431 }
432 }
433 return x;
434 }
435 /* If the increment has not happened, use the variable itself. Copy it
436 into a new pseudo so that the value remains correct across calls to
437 emit_queue. */
438 if (QUEUED_INSN (x) == 0)
439 return copy_to_reg (QUEUED_VAR (x));
440 /* If the increment has happened and a pre-increment copy exists,
441 use that copy. */
442 if (QUEUED_COPY (x) != 0)
443 return QUEUED_COPY (x);
444 /* The increment has happened but we haven't set up a pre-increment copy.
445 Set one up now, and use it. */
446 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
447 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
448 QUEUED_INSN (x));
449 return QUEUED_COPY (x);
450 }
451
452 /* Return nonzero if X contains a QUEUED expression:
453 if it contains anything that will be altered by a queued increment.
454 We handle only combinations of MEM, PLUS, MINUS and MULT operators
455 since memory addresses generally contain only those. */
456
457 int
458 queued_subexp_p (rtx x)
459 {
460 enum rtx_code code = GET_CODE (x);
461 switch (code)
462 {
463 case QUEUED:
464 return 1;
465 case MEM:
466 return queued_subexp_p (XEXP (x, 0));
467 case MULT:
468 case PLUS:
469 case MINUS:
470 return (queued_subexp_p (XEXP (x, 0))
471 || queued_subexp_p (XEXP (x, 1)));
472 default:
473 return 0;
474 }
475 }
476
477 /* Perform all the pending incrementations. */
478
479 void
480 emit_queue (void)
481 {
482 rtx p;
483 while ((p = pending_chain))
484 {
485 rtx body = QUEUED_BODY (p);
486
487 switch (GET_CODE (body))
488 {
489 case INSN:
490 case JUMP_INSN:
491 case CALL_INSN:
492 case CODE_LABEL:
493 case BARRIER:
494 case NOTE:
495 QUEUED_INSN (p) = body;
496 emit_insn (body);
497 break;
498
499 #ifdef ENABLE_CHECKING
500 case SEQUENCE:
501 abort ();
502 break;
503 #endif
504
505 default:
506 QUEUED_INSN (p) = emit_insn (body);
507 break;
508 }
509
510 pending_chain = QUEUED_NEXT (p);
511 }
512 }
513 \f
514 /* Copy data from FROM to TO, where the machine modes are not the same.
515 Both modes may be integer, or both may be floating.
516 UNSIGNEDP should be nonzero if FROM is an unsigned type.
517 This causes zero-extension instead of sign-extension. */
518
519 void
520 convert_move (rtx to, rtx from, int unsignedp)
521 {
522 enum machine_mode to_mode = GET_MODE (to);
523 enum machine_mode from_mode = GET_MODE (from);
524 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
525 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
526 enum insn_code code;
527 rtx libcall;
528
529 /* rtx code for making an equivalent value. */
530 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
531 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
532
533 to = protect_from_queue (to, 1);
534 from = protect_from_queue (from, 0);
535
536 if (to_real != from_real)
537 abort ();
538
539 /* If FROM is a SUBREG that indicates that we have already done at least
540 the required extension, strip it. We don't handle such SUBREGs as
541 TO here. */
542
543 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
544 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
545 >= GET_MODE_SIZE (to_mode))
546 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
547 from = gen_lowpart (to_mode, from), from_mode = to_mode;
548
549 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
550 abort ();
551
552 if (to_mode == from_mode
553 || (from_mode == VOIDmode && CONSTANT_P (from)))
554 {
555 emit_move_insn (to, from);
556 return;
557 }
558
559 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
560 {
561 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
562 abort ();
563
564 if (VECTOR_MODE_P (to_mode))
565 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
566 else
567 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
568
569 emit_move_insn (to, from);
570 return;
571 }
572
573 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
574 {
575 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
576 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
577 return;
578 }
579
580 if (to_real)
581 {
582 rtx value, insns;
583 convert_optab tab;
584
585 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
586 tab = sext_optab;
587 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
588 tab = trunc_optab;
589 else
590 abort ();
591
592 /* Try converting directly if the insn is supported. */
593
594 code = tab->handlers[to_mode][from_mode].insn_code;
595 if (code != CODE_FOR_nothing)
596 {
597 emit_unop_insn (code, to, from,
598 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
599 return;
600 }
601
602 /* Otherwise use a libcall. */
603 libcall = tab->handlers[to_mode][from_mode].libfunc;
604
605 if (!libcall)
606 /* This conversion is not implemented yet. */
607 abort ();
608
609 start_sequence ();
610 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
611 1, from, from_mode);
612 insns = get_insns ();
613 end_sequence ();
614 emit_libcall_block (insns, to, value,
615 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
616 from)
617 : gen_rtx_FLOAT_EXTEND (to_mode, from));
618 return;
619 }
620
621 /* Handle pointer conversion. */ /* SPEE 900220. */
622 /* Targets are expected to provide conversion insns between PxImode and
623 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
624 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
625 {
626 enum machine_mode full_mode
627 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
628
629 if (trunc_optab->handlers[to_mode][full_mode].insn_code
630 == CODE_FOR_nothing)
631 abort ();
632
633 if (full_mode != from_mode)
634 from = convert_to_mode (full_mode, from, unsignedp);
635 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
636 to, from, UNKNOWN);
637 return;
638 }
639 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
640 {
641 enum machine_mode full_mode
642 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
643
644 if (sext_optab->handlers[full_mode][from_mode].insn_code
645 == CODE_FOR_nothing)
646 abort ();
647
648 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
649 to, from, UNKNOWN);
650 if (to_mode == full_mode)
651 return;
652
653 /* else proceed to integer conversions below */
654 from_mode = full_mode;
655 }
656
657 /* Now both modes are integers. */
658
659 /* Handle expanding beyond a word. */
660 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
661 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
662 {
663 rtx insns;
664 rtx lowpart;
665 rtx fill_value;
666 rtx lowfrom;
667 int i;
668 enum machine_mode lowpart_mode;
669 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
670
671 /* Try converting directly if the insn is supported. */
672 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
673 != CODE_FOR_nothing)
674 {
675 /* If FROM is a SUBREG, put it into a register. Do this
676 so that we always generate the same set of insns for
677 better cse'ing; if an intermediate assignment occurred,
678 we won't be doing the operation directly on the SUBREG. */
679 if (optimize > 0 && GET_CODE (from) == SUBREG)
680 from = force_reg (from_mode, from);
681 emit_unop_insn (code, to, from, equiv_code);
682 return;
683 }
684 /* Next, try converting via full word. */
685 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
686 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
687 != CODE_FOR_nothing))
688 {
689 if (GET_CODE (to) == REG)
690 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
691 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
692 emit_unop_insn (code, to,
693 gen_lowpart (word_mode, to), equiv_code);
694 return;
695 }
696
697 /* No special multiword conversion insn; do it by hand. */
698 start_sequence ();
699
700 /* Since we will turn this into a no conflict block, we must ensure
701 that the source does not overlap the target. */
702
703 if (reg_overlap_mentioned_p (to, from))
704 from = force_reg (from_mode, from);
705
706 /* Get a copy of FROM widened to a word, if necessary. */
707 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
708 lowpart_mode = word_mode;
709 else
710 lowpart_mode = from_mode;
711
712 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
713
714 lowpart = gen_lowpart (lowpart_mode, to);
715 emit_move_insn (lowpart, lowfrom);
716
717 /* Compute the value to put in each remaining word. */
718 if (unsignedp)
719 fill_value = const0_rtx;
720 else
721 {
722 #ifdef HAVE_slt
723 if (HAVE_slt
724 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
725 && STORE_FLAG_VALUE == -1)
726 {
727 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
728 lowpart_mode, 0);
729 fill_value = gen_reg_rtx (word_mode);
730 emit_insn (gen_slt (fill_value));
731 }
732 else
733 #endif
734 {
735 fill_value
736 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
737 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
738 NULL_RTX, 0);
739 fill_value = convert_to_mode (word_mode, fill_value, 1);
740 }
741 }
742
743 /* Fill the remaining words. */
744 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
745 {
746 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
747 rtx subword = operand_subword (to, index, 1, to_mode);
748
749 if (subword == 0)
750 abort ();
751
752 if (fill_value != subword)
753 emit_move_insn (subword, fill_value);
754 }
755
756 insns = get_insns ();
757 end_sequence ();
758
759 emit_no_conflict_block (insns, to, from, NULL_RTX,
760 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
761 return;
762 }
763
764 /* Truncating multi-word to a word or less. */
765 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
766 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
767 {
768 if (!((GET_CODE (from) == MEM
769 && ! MEM_VOLATILE_P (from)
770 && direct_load[(int) to_mode]
771 && ! mode_dependent_address_p (XEXP (from, 0)))
772 || GET_CODE (from) == REG
773 || GET_CODE (from) == SUBREG))
774 from = force_reg (from_mode, from);
775 convert_move (to, gen_lowpart (word_mode, from), 0);
776 return;
777 }
778
779 /* Now follow all the conversions between integers
780 no more than a word long. */
781
782 /* For truncation, usually we can just refer to FROM in a narrower mode. */
783 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
784 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
785 GET_MODE_BITSIZE (from_mode)))
786 {
787 if (!((GET_CODE (from) == MEM
788 && ! MEM_VOLATILE_P (from)
789 && direct_load[(int) to_mode]
790 && ! mode_dependent_address_p (XEXP (from, 0)))
791 || GET_CODE (from) == REG
792 || GET_CODE (from) == SUBREG))
793 from = force_reg (from_mode, from);
794 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
795 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
796 from = copy_to_reg (from);
797 emit_move_insn (to, gen_lowpart (to_mode, from));
798 return;
799 }
800
801 /* Handle extension. */
802 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
803 {
804 /* Convert directly if that works. */
805 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
806 != CODE_FOR_nothing)
807 {
808 if (flag_force_mem)
809 from = force_not_mem (from);
810
811 emit_unop_insn (code, to, from, equiv_code);
812 return;
813 }
814 else
815 {
816 enum machine_mode intermediate;
817 rtx tmp;
818 tree shift_amount;
819
820 /* Search for a mode to convert via. */
821 for (intermediate = from_mode; intermediate != VOIDmode;
822 intermediate = GET_MODE_WIDER_MODE (intermediate))
823 if (((can_extend_p (to_mode, intermediate, unsignedp)
824 != CODE_FOR_nothing)
825 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
826 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
827 GET_MODE_BITSIZE (intermediate))))
828 && (can_extend_p (intermediate, from_mode, unsignedp)
829 != CODE_FOR_nothing))
830 {
831 convert_move (to, convert_to_mode (intermediate, from,
832 unsignedp), unsignedp);
833 return;
834 }
835
836 /* No suitable intermediate mode.
837 Generate what we need with shifts. */
838 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
839 - GET_MODE_BITSIZE (from_mode), 0);
840 from = gen_lowpart (to_mode, force_reg (from_mode, from));
841 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
842 to, unsignedp);
843 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
844 to, unsignedp);
845 if (tmp != to)
846 emit_move_insn (to, tmp);
847 return;
848 }
849 }
850
851 /* Support special truncate insns for certain modes. */
852 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
853 {
854 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
855 to, from, UNKNOWN);
856 return;
857 }
858
859 /* Handle truncation of volatile memrefs, and so on;
860 the things that couldn't be truncated directly,
861 and for which there was no special instruction.
862
863 ??? Code above formerly short-circuited this, for most integer
864 mode pairs, with a force_reg in from_mode followed by a recursive
865 call to this routine. Appears always to have been wrong. */
866 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
867 {
868 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
869 emit_move_insn (to, temp);
870 return;
871 }
872
873 /* Mode combination is not recognized. */
874 abort ();
875 }
876
877 /* Return an rtx for a value that would result
878 from converting X to mode MODE.
879 Both X and MODE may be floating, or both integer.
880 UNSIGNEDP is nonzero if X is an unsigned value.
881 This can be done by referring to a part of X in place
882 or by copying to a new temporary with conversion.
883
884 This function *must not* call protect_from_queue
885 except when putting X into an insn (in which case convert_move does it). */
886
887 rtx
888 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
889 {
890 return convert_modes (mode, VOIDmode, x, unsignedp);
891 }
892
893 /* Return an rtx for a value that would result
894 from converting X from mode OLDMODE to mode MODE.
895 Both modes may be floating, or both integer.
896 UNSIGNEDP is nonzero if X is an unsigned value.
897
898 This can be done by referring to a part of X in place
899 or by copying to a new temporary with conversion.
900
901 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
902
903 This function *must not* call protect_from_queue
904 except when putting X into an insn (in which case convert_move does it). */
905
906 rtx
907 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
908 {
909 rtx temp;
910
911 /* If FROM is a SUBREG that indicates that we have already done at least
912 the required extension, strip it. */
913
914 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
915 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
916 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
917 x = gen_lowpart (mode, x);
918
919 if (GET_MODE (x) != VOIDmode)
920 oldmode = GET_MODE (x);
921
922 if (mode == oldmode)
923 return x;
924
925 /* There is one case that we must handle specially: If we are converting
926 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
927 we are to interpret the constant as unsigned, gen_lowpart will do
928 the wrong if the constant appears negative. What we want to do is
929 make the high-order word of the constant zero, not all ones. */
930
931 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
932 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
933 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
934 {
935 HOST_WIDE_INT val = INTVAL (x);
936
937 if (oldmode != VOIDmode
938 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
939 {
940 int width = GET_MODE_BITSIZE (oldmode);
941
942 /* We need to zero extend VAL. */
943 val &= ((HOST_WIDE_INT) 1 << width) - 1;
944 }
945
946 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
947 }
948
949 /* We can do this with a gen_lowpart if both desired and current modes
950 are integer, and this is either a constant integer, a register, or a
951 non-volatile MEM. Except for the constant case where MODE is no
952 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
953
954 if ((GET_CODE (x) == CONST_INT
955 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
956 || (GET_MODE_CLASS (mode) == MODE_INT
957 && GET_MODE_CLASS (oldmode) == MODE_INT
958 && (GET_CODE (x) == CONST_DOUBLE
959 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
960 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
961 && direct_load[(int) mode])
962 || (GET_CODE (x) == REG
963 && (! HARD_REGISTER_P (x)
964 || HARD_REGNO_MODE_OK (REGNO (x), mode))
965 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
966 GET_MODE_BITSIZE (GET_MODE (x)))))))))
967 {
968 /* ?? If we don't know OLDMODE, we have to assume here that
969 X does not need sign- or zero-extension. This may not be
970 the case, but it's the best we can do. */
971 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
972 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
973 {
974 HOST_WIDE_INT val = INTVAL (x);
975 int width = GET_MODE_BITSIZE (oldmode);
976
977 /* We must sign or zero-extend in this case. Start by
978 zero-extending, then sign extend if we need to. */
979 val &= ((HOST_WIDE_INT) 1 << width) - 1;
980 if (! unsignedp
981 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
982 val |= (HOST_WIDE_INT) (-1) << width;
983
984 return gen_int_mode (val, mode);
985 }
986
987 return gen_lowpart (mode, x);
988 }
989
990 /* Converting from integer constant into mode is always equivalent to an
991 subreg operation. */
992 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
993 {
994 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
995 abort ();
996 return simplify_gen_subreg (mode, x, oldmode, 0);
997 }
998
999 temp = gen_reg_rtx (mode);
1000 convert_move (temp, x, unsignedp);
1001 return temp;
1002 }
1003 \f
1004 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1005 store efficiently. Due to internal GCC limitations, this is
1006 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1007 for an immediate constant. */
1008
1009 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1010
1011 /* Determine whether the LEN bytes can be moved by using several move
1012 instructions. Return nonzero if a call to move_by_pieces should
1013 succeed. */
1014
1015 int
1016 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1017 unsigned int align ATTRIBUTE_UNUSED)
1018 {
1019 return MOVE_BY_PIECES_P (len, align);
1020 }
1021
1022 /* Generate several move instructions to copy LEN bytes from block FROM to
1023 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1024 and TO through protect_from_queue before calling.
1025
1026 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1027 used to push FROM to the stack.
1028
1029 ALIGN is maximum stack alignment we can assume.
1030
1031 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1032 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1033 stpcpy. */
1034
1035 rtx
1036 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1037 unsigned int align, int endp)
1038 {
1039 struct move_by_pieces data;
1040 rtx to_addr, from_addr = XEXP (from, 0);
1041 unsigned int max_size = MOVE_MAX_PIECES + 1;
1042 enum machine_mode mode = VOIDmode, tmode;
1043 enum insn_code icode;
1044
1045 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1046
1047 data.offset = 0;
1048 data.from_addr = from_addr;
1049 if (to)
1050 {
1051 to_addr = XEXP (to, 0);
1052 data.to = to;
1053 data.autinc_to
1054 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1055 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1056 data.reverse
1057 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1058 }
1059 else
1060 {
1061 to_addr = NULL_RTX;
1062 data.to = NULL_RTX;
1063 data.autinc_to = 1;
1064 #ifdef STACK_GROWS_DOWNWARD
1065 data.reverse = 1;
1066 #else
1067 data.reverse = 0;
1068 #endif
1069 }
1070 data.to_addr = to_addr;
1071 data.from = from;
1072 data.autinc_from
1073 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1074 || GET_CODE (from_addr) == POST_INC
1075 || GET_CODE (from_addr) == POST_DEC);
1076
1077 data.explicit_inc_from = 0;
1078 data.explicit_inc_to = 0;
1079 if (data.reverse) data.offset = len;
1080 data.len = len;
1081
1082 /* If copying requires more than two move insns,
1083 copy addresses to registers (to make displacements shorter)
1084 and use post-increment if available. */
1085 if (!(data.autinc_from && data.autinc_to)
1086 && move_by_pieces_ninsns (len, align) > 2)
1087 {
1088 /* Find the mode of the largest move... */
1089 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1090 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1091 if (GET_MODE_SIZE (tmode) < max_size)
1092 mode = tmode;
1093
1094 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1095 {
1096 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1097 data.autinc_from = 1;
1098 data.explicit_inc_from = -1;
1099 }
1100 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1101 {
1102 data.from_addr = copy_addr_to_reg (from_addr);
1103 data.autinc_from = 1;
1104 data.explicit_inc_from = 1;
1105 }
1106 if (!data.autinc_from && CONSTANT_P (from_addr))
1107 data.from_addr = copy_addr_to_reg (from_addr);
1108 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1109 {
1110 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1111 data.autinc_to = 1;
1112 data.explicit_inc_to = -1;
1113 }
1114 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1115 {
1116 data.to_addr = copy_addr_to_reg (to_addr);
1117 data.autinc_to = 1;
1118 data.explicit_inc_to = 1;
1119 }
1120 if (!data.autinc_to && CONSTANT_P (to_addr))
1121 data.to_addr = copy_addr_to_reg (to_addr);
1122 }
1123
1124 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1125 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1126 align = MOVE_MAX * BITS_PER_UNIT;
1127
1128 /* First move what we can in the largest integer mode, then go to
1129 successively smaller modes. */
1130
1131 while (max_size > 1)
1132 {
1133 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1134 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1135 if (GET_MODE_SIZE (tmode) < max_size)
1136 mode = tmode;
1137
1138 if (mode == VOIDmode)
1139 break;
1140
1141 icode = mov_optab->handlers[(int) mode].insn_code;
1142 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1143 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1144
1145 max_size = GET_MODE_SIZE (mode);
1146 }
1147
1148 /* The code above should have handled everything. */
1149 if (data.len > 0)
1150 abort ();
1151
1152 if (endp)
1153 {
1154 rtx to1;
1155
1156 if (data.reverse)
1157 abort ();
1158 if (data.autinc_to)
1159 {
1160 if (endp == 2)
1161 {
1162 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1163 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1164 else
1165 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1166 -1));
1167 }
1168 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1169 data.offset);
1170 }
1171 else
1172 {
1173 if (endp == 2)
1174 --data.offset;
1175 to1 = adjust_address (data.to, QImode, data.offset);
1176 }
1177 return to1;
1178 }
1179 else
1180 return data.to;
1181 }
1182
1183 /* Return number of insns required to move L bytes by pieces.
1184 ALIGN (in bits) is maximum alignment we can assume. */
1185
1186 static unsigned HOST_WIDE_INT
1187 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1188 {
1189 unsigned HOST_WIDE_INT n_insns = 0;
1190 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1191
1192 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1193 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1194 align = MOVE_MAX * BITS_PER_UNIT;
1195
1196 while (max_size > 1)
1197 {
1198 enum machine_mode mode = VOIDmode, tmode;
1199 enum insn_code icode;
1200
1201 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1202 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1203 if (GET_MODE_SIZE (tmode) < max_size)
1204 mode = tmode;
1205
1206 if (mode == VOIDmode)
1207 break;
1208
1209 icode = mov_optab->handlers[(int) mode].insn_code;
1210 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1211 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1212
1213 max_size = GET_MODE_SIZE (mode);
1214 }
1215
1216 if (l)
1217 abort ();
1218 return n_insns;
1219 }
1220
1221 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1222 with move instructions for mode MODE. GENFUN is the gen_... function
1223 to make a move insn for that mode. DATA has all the other info. */
1224
1225 static void
1226 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1227 struct move_by_pieces *data)
1228 {
1229 unsigned int size = GET_MODE_SIZE (mode);
1230 rtx to1 = NULL_RTX, from1;
1231
1232 while (data->len >= size)
1233 {
1234 if (data->reverse)
1235 data->offset -= size;
1236
1237 if (data->to)
1238 {
1239 if (data->autinc_to)
1240 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1241 data->offset);
1242 else
1243 to1 = adjust_address (data->to, mode, data->offset);
1244 }
1245
1246 if (data->autinc_from)
1247 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1248 data->offset);
1249 else
1250 from1 = adjust_address (data->from, mode, data->offset);
1251
1252 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1253 emit_insn (gen_add2_insn (data->to_addr,
1254 GEN_INT (-(HOST_WIDE_INT)size)));
1255 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1256 emit_insn (gen_add2_insn (data->from_addr,
1257 GEN_INT (-(HOST_WIDE_INT)size)));
1258
1259 if (data->to)
1260 emit_insn ((*genfun) (to1, from1));
1261 else
1262 {
1263 #ifdef PUSH_ROUNDING
1264 emit_single_push_insn (mode, from1, NULL);
1265 #else
1266 abort ();
1267 #endif
1268 }
1269
1270 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1271 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1272 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1273 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1274
1275 if (! data->reverse)
1276 data->offset += size;
1277
1278 data->len -= size;
1279 }
1280 }
1281 \f
1282 /* Emit code to move a block Y to a block X. This may be done with
1283 string-move instructions, with multiple scalar move instructions,
1284 or with a library call.
1285
1286 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1287 SIZE is an rtx that says how long they are.
1288 ALIGN is the maximum alignment we can assume they have.
1289 METHOD describes what kind of copy this is, and what mechanisms may be used.
1290
1291 Return the address of the new block, if memcpy is called and returns it,
1292 0 otherwise. */
1293
1294 rtx
1295 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1296 {
1297 bool may_use_call;
1298 rtx retval = 0;
1299 unsigned int align;
1300
1301 switch (method)
1302 {
1303 case BLOCK_OP_NORMAL:
1304 may_use_call = true;
1305 break;
1306
1307 case BLOCK_OP_CALL_PARM:
1308 may_use_call = block_move_libcall_safe_for_call_parm ();
1309
1310 /* Make inhibit_defer_pop nonzero around the library call
1311 to force it to pop the arguments right away. */
1312 NO_DEFER_POP;
1313 break;
1314
1315 case BLOCK_OP_NO_LIBCALL:
1316 may_use_call = false;
1317 break;
1318
1319 default:
1320 abort ();
1321 }
1322
1323 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1324
1325 if (GET_MODE (x) != BLKmode)
1326 abort ();
1327 if (GET_MODE (y) != BLKmode)
1328 abort ();
1329
1330 x = protect_from_queue (x, 1);
1331 y = protect_from_queue (y, 0);
1332 size = protect_from_queue (size, 0);
1333
1334 if (GET_CODE (x) != MEM)
1335 abort ();
1336 if (GET_CODE (y) != MEM)
1337 abort ();
1338 if (size == 0)
1339 abort ();
1340
1341 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1342 can be incorrect is coming from __builtin_memcpy. */
1343 if (GET_CODE (size) == CONST_INT)
1344 {
1345 if (INTVAL (size) == 0)
1346 return 0;
1347
1348 x = shallow_copy_rtx (x);
1349 y = shallow_copy_rtx (y);
1350 set_mem_size (x, size);
1351 set_mem_size (y, size);
1352 }
1353
1354 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1355 move_by_pieces (x, y, INTVAL (size), align, 0);
1356 else if (emit_block_move_via_movstr (x, y, size, align))
1357 ;
1358 else if (may_use_call)
1359 retval = emit_block_move_via_libcall (x, y, size);
1360 else
1361 emit_block_move_via_loop (x, y, size, align);
1362
1363 if (method == BLOCK_OP_CALL_PARM)
1364 OK_DEFER_POP;
1365
1366 return retval;
1367 }
1368
1369 /* A subroutine of emit_block_move. Returns true if calling the
1370 block move libcall will not clobber any parameters which may have
1371 already been placed on the stack. */
1372
1373 static bool
1374 block_move_libcall_safe_for_call_parm (void)
1375 {
1376 /* If arguments are pushed on the stack, then they're safe. */
1377 if (PUSH_ARGS)
1378 return true;
1379
1380 /* If registers go on the stack anyway, any argument is sure to clobber
1381 an outgoing argument. */
1382 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1383 {
1384 tree fn = emit_block_move_libcall_fn (false);
1385 (void) fn;
1386 if (REG_PARM_STACK_SPACE (fn) != 0)
1387 return false;
1388 }
1389 #endif
1390
1391 /* If any argument goes in memory, then it might clobber an outgoing
1392 argument. */
1393 {
1394 CUMULATIVE_ARGS args_so_far;
1395 tree fn, arg;
1396
1397 fn = emit_block_move_libcall_fn (false);
1398 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1399
1400 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1401 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1402 {
1403 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1404 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1405 if (!tmp || !REG_P (tmp))
1406 return false;
1407 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1408 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1409 NULL_TREE, 1))
1410 return false;
1411 #endif
1412 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1413 }
1414 }
1415 return true;
1416 }
1417
1418 /* A subroutine of emit_block_move. Expand a movstr pattern;
1419 return true if successful. */
1420
1421 static bool
1422 emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
1423 {
1424 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1425 enum machine_mode mode;
1426
1427 /* Since this is a move insn, we don't care about volatility. */
1428 volatile_ok = 1;
1429
1430 /* Try the most limited insn first, because there's no point
1431 including more than one in the machine description unless
1432 the more limited one has some advantage. */
1433
1434 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1435 mode = GET_MODE_WIDER_MODE (mode))
1436 {
1437 enum insn_code code = movstr_optab[(int) mode];
1438 insn_operand_predicate_fn pred;
1439
1440 if (code != CODE_FOR_nothing
1441 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1442 here because if SIZE is less than the mode mask, as it is
1443 returned by the macro, it will definitely be less than the
1444 actual mode mask. */
1445 && ((GET_CODE (size) == CONST_INT
1446 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1447 <= (GET_MODE_MASK (mode) >> 1)))
1448 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1449 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1450 || (*pred) (x, BLKmode))
1451 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1452 || (*pred) (y, BLKmode))
1453 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1454 || (*pred) (opalign, VOIDmode)))
1455 {
1456 rtx op2;
1457 rtx last = get_last_insn ();
1458 rtx pat;
1459
1460 op2 = convert_to_mode (mode, size, 1);
1461 pred = insn_data[(int) code].operand[2].predicate;
1462 if (pred != 0 && ! (*pred) (op2, mode))
1463 op2 = copy_to_mode_reg (mode, op2);
1464
1465 /* ??? When called via emit_block_move_for_call, it'd be
1466 nice if there were some way to inform the backend, so
1467 that it doesn't fail the expansion because it thinks
1468 emitting the libcall would be more efficient. */
1469
1470 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1471 if (pat)
1472 {
1473 emit_insn (pat);
1474 volatile_ok = 0;
1475 return true;
1476 }
1477 else
1478 delete_insns_since (last);
1479 }
1480 }
1481
1482 volatile_ok = 0;
1483 return false;
1484 }
1485
1486 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1487 Return the return value from memcpy, 0 otherwise. */
1488
1489 static rtx
1490 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1491 {
1492 rtx dst_addr, src_addr;
1493 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1494 enum machine_mode size_mode;
1495 rtx retval;
1496
1497 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1498
1499 It is unsafe to save the value generated by protect_from_queue and reuse
1500 it later. Consider what happens if emit_queue is called before the
1501 return value from protect_from_queue is used.
1502
1503 Expansion of the CALL_EXPR below will call emit_queue before we are
1504 finished emitting RTL for argument setup. So if we are not careful we
1505 could get the wrong value for an argument.
1506
1507 To avoid this problem we go ahead and emit code to copy the addresses of
1508 DST and SRC and SIZE into new pseudos. We can then place those new
1509 pseudos into an RTL_EXPR and use them later, even after a call to
1510 emit_queue.
1511
1512 Note this is not strictly needed for library calls since they do not call
1513 emit_queue before loading their arguments. However, we may need to have
1514 library calls call emit_queue in the future since failing to do so could
1515 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1516 arguments in registers. */
1517
1518 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1519 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1520
1521 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1522 src_addr = convert_memory_address (ptr_mode, src_addr);
1523
1524 dst_tree = make_tree (ptr_type_node, dst_addr);
1525 src_tree = make_tree (ptr_type_node, src_addr);
1526
1527 if (TARGET_MEM_FUNCTIONS)
1528 size_mode = TYPE_MODE (sizetype);
1529 else
1530 size_mode = TYPE_MODE (unsigned_type_node);
1531
1532 size = convert_to_mode (size_mode, size, 1);
1533 size = copy_to_mode_reg (size_mode, size);
1534
1535 /* It is incorrect to use the libcall calling conventions to call
1536 memcpy in this context. This could be a user call to memcpy and
1537 the user may wish to examine the return value from memcpy. For
1538 targets where libcalls and normal calls have different conventions
1539 for returning pointers, we could end up generating incorrect code.
1540
1541 For convenience, we generate the call to bcopy this way as well. */
1542
1543 if (TARGET_MEM_FUNCTIONS)
1544 size_tree = make_tree (sizetype, size);
1545 else
1546 size_tree = make_tree (unsigned_type_node, size);
1547
1548 fn = emit_block_move_libcall_fn (true);
1549 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1550 if (TARGET_MEM_FUNCTIONS)
1551 {
1552 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1553 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1554 }
1555 else
1556 {
1557 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1558 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1559 }
1560
1561 /* Now we have to build up the CALL_EXPR itself. */
1562 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1563 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1564 call_expr, arg_list, NULL_TREE);
1565
1566 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1567
1568 /* If we are initializing a readonly value, show the above call clobbered
1569 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1570 the delay slot scheduler might overlook conflicts and take nasty
1571 decisions. */
1572 if (RTX_UNCHANGING_P (dst))
1573 add_function_usage_to
1574 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1575 gen_rtx_CLOBBER (VOIDmode, dst),
1576 NULL_RTX));
1577
1578 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
1579 }
1580
1581 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1582 for the function we use for block copies. The first time FOR_CALL
1583 is true, we call assemble_external. */
1584
1585 static GTY(()) tree block_move_fn;
1586
1587 void
1588 init_block_move_fn (const char *asmspec)
1589 {
1590 if (!block_move_fn)
1591 {
1592 tree args, fn;
1593
1594 if (TARGET_MEM_FUNCTIONS)
1595 {
1596 fn = get_identifier ("memcpy");
1597 args = build_function_type_list (ptr_type_node, ptr_type_node,
1598 const_ptr_type_node, sizetype,
1599 NULL_TREE);
1600 }
1601 else
1602 {
1603 fn = get_identifier ("bcopy");
1604 args = build_function_type_list (void_type_node, const_ptr_type_node,
1605 ptr_type_node, unsigned_type_node,
1606 NULL_TREE);
1607 }
1608
1609 fn = build_decl (FUNCTION_DECL, fn, args);
1610 DECL_EXTERNAL (fn) = 1;
1611 TREE_PUBLIC (fn) = 1;
1612 DECL_ARTIFICIAL (fn) = 1;
1613 TREE_NOTHROW (fn) = 1;
1614
1615 block_move_fn = fn;
1616 }
1617
1618 if (asmspec)
1619 {
1620 SET_DECL_RTL (block_move_fn, NULL_RTX);
1621 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1622 }
1623 }
1624
1625 static tree
1626 emit_block_move_libcall_fn (int for_call)
1627 {
1628 static bool emitted_extern;
1629
1630 if (!block_move_fn)
1631 init_block_move_fn (NULL);
1632
1633 if (for_call && !emitted_extern)
1634 {
1635 emitted_extern = true;
1636 make_decl_rtl (block_move_fn, NULL);
1637 assemble_external (block_move_fn);
1638 }
1639
1640 return block_move_fn;
1641 }
1642
1643 /* A subroutine of emit_block_move. Copy the data via an explicit
1644 loop. This is used only when libcalls are forbidden. */
1645 /* ??? It'd be nice to copy in hunks larger than QImode. */
1646
1647 static void
1648 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1649 unsigned int align ATTRIBUTE_UNUSED)
1650 {
1651 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1652 enum machine_mode iter_mode;
1653
1654 iter_mode = GET_MODE (size);
1655 if (iter_mode == VOIDmode)
1656 iter_mode = word_mode;
1657
1658 top_label = gen_label_rtx ();
1659 cmp_label = gen_label_rtx ();
1660 iter = gen_reg_rtx (iter_mode);
1661
1662 emit_move_insn (iter, const0_rtx);
1663
1664 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1665 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1666 do_pending_stack_adjust ();
1667
1668 emit_note (NOTE_INSN_LOOP_BEG);
1669
1670 emit_jump (cmp_label);
1671 emit_label (top_label);
1672
1673 tmp = convert_modes (Pmode, iter_mode, iter, true);
1674 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1675 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1676 x = change_address (x, QImode, x_addr);
1677 y = change_address (y, QImode, y_addr);
1678
1679 emit_move_insn (x, y);
1680
1681 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1682 true, OPTAB_LIB_WIDEN);
1683 if (tmp != iter)
1684 emit_move_insn (iter, tmp);
1685
1686 emit_note (NOTE_INSN_LOOP_CONT);
1687 emit_label (cmp_label);
1688
1689 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1690 true, top_label);
1691
1692 emit_note (NOTE_INSN_LOOP_END);
1693 }
1694 \f
1695 /* Copy all or part of a value X into registers starting at REGNO.
1696 The number of registers to be filled is NREGS. */
1697
1698 void
1699 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1700 {
1701 int i;
1702 #ifdef HAVE_load_multiple
1703 rtx pat;
1704 rtx last;
1705 #endif
1706
1707 if (nregs == 0)
1708 return;
1709
1710 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1711 x = validize_mem (force_const_mem (mode, x));
1712
1713 /* See if the machine can do this with a load multiple insn. */
1714 #ifdef HAVE_load_multiple
1715 if (HAVE_load_multiple)
1716 {
1717 last = get_last_insn ();
1718 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1719 GEN_INT (nregs));
1720 if (pat)
1721 {
1722 emit_insn (pat);
1723 return;
1724 }
1725 else
1726 delete_insns_since (last);
1727 }
1728 #endif
1729
1730 for (i = 0; i < nregs; i++)
1731 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1732 operand_subword_force (x, i, mode));
1733 }
1734
1735 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1736 The number of registers to be filled is NREGS. */
1737
1738 void
1739 move_block_from_reg (int regno, rtx x, int nregs)
1740 {
1741 int i;
1742
1743 if (nregs == 0)
1744 return;
1745
1746 /* See if the machine can do this with a store multiple insn. */
1747 #ifdef HAVE_store_multiple
1748 if (HAVE_store_multiple)
1749 {
1750 rtx last = get_last_insn ();
1751 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1752 GEN_INT (nregs));
1753 if (pat)
1754 {
1755 emit_insn (pat);
1756 return;
1757 }
1758 else
1759 delete_insns_since (last);
1760 }
1761 #endif
1762
1763 for (i = 0; i < nregs; i++)
1764 {
1765 rtx tem = operand_subword (x, i, 1, BLKmode);
1766
1767 if (tem == 0)
1768 abort ();
1769
1770 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1771 }
1772 }
1773
1774 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1775 ORIG, where ORIG is a non-consecutive group of registers represented by
1776 a PARALLEL. The clone is identical to the original except in that the
1777 original set of registers is replaced by a new set of pseudo registers.
1778 The new set has the same modes as the original set. */
1779
1780 rtx
1781 gen_group_rtx (rtx orig)
1782 {
1783 int i, length;
1784 rtx *tmps;
1785
1786 if (GET_CODE (orig) != PARALLEL)
1787 abort ();
1788
1789 length = XVECLEN (orig, 0);
1790 tmps = alloca (sizeof (rtx) * length);
1791
1792 /* Skip a NULL entry in first slot. */
1793 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1794
1795 if (i)
1796 tmps[0] = 0;
1797
1798 for (; i < length; i++)
1799 {
1800 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1801 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1802
1803 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1804 }
1805
1806 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1807 }
1808
1809 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1810 where DST is non-consecutive registers represented by a PARALLEL.
1811 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1812 if not known. */
1813
1814 void
1815 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1816 {
1817 rtx *tmps, src;
1818 int start, i;
1819
1820 if (GET_CODE (dst) != PARALLEL)
1821 abort ();
1822
1823 /* Check for a NULL entry, used to indicate that the parameter goes
1824 both on the stack and in registers. */
1825 if (XEXP (XVECEXP (dst, 0, 0), 0))
1826 start = 0;
1827 else
1828 start = 1;
1829
1830 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1831
1832 /* Process the pieces. */
1833 for (i = start; i < XVECLEN (dst, 0); i++)
1834 {
1835 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1836 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1837 unsigned int bytelen = GET_MODE_SIZE (mode);
1838 int shift = 0;
1839
1840 /* Handle trailing fragments that run over the size of the struct. */
1841 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1842 {
1843 /* Arrange to shift the fragment to where it belongs.
1844 extract_bit_field loads to the lsb of the reg. */
1845 if (
1846 #ifdef BLOCK_REG_PADDING
1847 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1848 == (BYTES_BIG_ENDIAN ? upward : downward)
1849 #else
1850 BYTES_BIG_ENDIAN
1851 #endif
1852 )
1853 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1854 bytelen = ssize - bytepos;
1855 if (bytelen <= 0)
1856 abort ();
1857 }
1858
1859 /* If we won't be loading directly from memory, protect the real source
1860 from strange tricks we might play; but make sure that the source can
1861 be loaded directly into the destination. */
1862 src = orig_src;
1863 if (GET_CODE (orig_src) != MEM
1864 && (!CONSTANT_P (orig_src)
1865 || (GET_MODE (orig_src) != mode
1866 && GET_MODE (orig_src) != VOIDmode)))
1867 {
1868 if (GET_MODE (orig_src) == VOIDmode)
1869 src = gen_reg_rtx (mode);
1870 else
1871 src = gen_reg_rtx (GET_MODE (orig_src));
1872
1873 emit_move_insn (src, orig_src);
1874 }
1875
1876 /* Optimize the access just a bit. */
1877 if (GET_CODE (src) == MEM
1878 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1879 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1880 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1881 && bytelen == GET_MODE_SIZE (mode))
1882 {
1883 tmps[i] = gen_reg_rtx (mode);
1884 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1885 }
1886 else if (GET_CODE (src) == CONCAT)
1887 {
1888 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1889 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1890
1891 if ((bytepos == 0 && bytelen == slen0)
1892 || (bytepos != 0 && bytepos + bytelen <= slen))
1893 {
1894 /* The following assumes that the concatenated objects all
1895 have the same size. In this case, a simple calculation
1896 can be used to determine the object and the bit field
1897 to be extracted. */
1898 tmps[i] = XEXP (src, bytepos / slen0);
1899 if (! CONSTANT_P (tmps[i])
1900 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
1901 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1902 (bytepos % slen0) * BITS_PER_UNIT,
1903 1, NULL_RTX, mode, mode, ssize);
1904 }
1905 else if (bytepos == 0)
1906 {
1907 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1908 emit_move_insn (mem, src);
1909 tmps[i] = adjust_address (mem, mode, 0);
1910 }
1911 else
1912 abort ();
1913 }
1914 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1915 SIMD register, which is currently broken. While we get GCC
1916 to emit proper RTL for these cases, let's dump to memory. */
1917 else if (VECTOR_MODE_P (GET_MODE (dst))
1918 && GET_CODE (src) == REG)
1919 {
1920 int slen = GET_MODE_SIZE (GET_MODE (src));
1921 rtx mem;
1922
1923 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1924 emit_move_insn (mem, src);
1925 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1926 }
1927 else if (CONSTANT_P (src)
1928 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
1929 tmps[i] = src;
1930 else
1931 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1932 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1933 mode, mode, ssize);
1934
1935 if (shift)
1936 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1937 tmps[i], 0, OPTAB_WIDEN);
1938 }
1939
1940 emit_queue ();
1941
1942 /* Copy the extracted pieces into the proper (probable) hard regs. */
1943 for (i = start; i < XVECLEN (dst, 0); i++)
1944 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1945 }
1946
1947 /* Emit code to move a block SRC to block DST, where SRC and DST are
1948 non-consecutive groups of registers, each represented by a PARALLEL. */
1949
1950 void
1951 emit_group_move (rtx dst, rtx src)
1952 {
1953 int i;
1954
1955 if (GET_CODE (src) != PARALLEL
1956 || GET_CODE (dst) != PARALLEL
1957 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1958 abort ();
1959
1960 /* Skip first entry if NULL. */
1961 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1962 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1963 XEXP (XVECEXP (src, 0, i), 0));
1964 }
1965
1966 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1967 where SRC is non-consecutive registers represented by a PARALLEL.
1968 SSIZE represents the total size of block ORIG_DST, or -1 if not
1969 known. */
1970
1971 void
1972 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1973 {
1974 rtx *tmps, dst;
1975 int start, i;
1976
1977 if (GET_CODE (src) != PARALLEL)
1978 abort ();
1979
1980 /* Check for a NULL entry, used to indicate that the parameter goes
1981 both on the stack and in registers. */
1982 if (XEXP (XVECEXP (src, 0, 0), 0))
1983 start = 0;
1984 else
1985 start = 1;
1986
1987 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1988
1989 /* Copy the (probable) hard regs into pseudos. */
1990 for (i = start; i < XVECLEN (src, 0); i++)
1991 {
1992 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1993 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1994 emit_move_insn (tmps[i], reg);
1995 }
1996 emit_queue ();
1997
1998 /* If we won't be storing directly into memory, protect the real destination
1999 from strange tricks we might play. */
2000 dst = orig_dst;
2001 if (GET_CODE (dst) == PARALLEL)
2002 {
2003 rtx temp;
2004
2005 /* We can get a PARALLEL dst if there is a conditional expression in
2006 a return statement. In that case, the dst and src are the same,
2007 so no action is necessary. */
2008 if (rtx_equal_p (dst, src))
2009 return;
2010
2011 /* It is unclear if we can ever reach here, but we may as well handle
2012 it. Allocate a temporary, and split this into a store/load to/from
2013 the temporary. */
2014
2015 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2016 emit_group_store (temp, src, type, ssize);
2017 emit_group_load (dst, temp, type, ssize);
2018 return;
2019 }
2020 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2021 {
2022 dst = gen_reg_rtx (GET_MODE (orig_dst));
2023 /* Make life a bit easier for combine. */
2024 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2025 }
2026
2027 /* Process the pieces. */
2028 for (i = start; i < XVECLEN (src, 0); i++)
2029 {
2030 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2031 enum machine_mode mode = GET_MODE (tmps[i]);
2032 unsigned int bytelen = GET_MODE_SIZE (mode);
2033 rtx dest = dst;
2034
2035 /* Handle trailing fragments that run over the size of the struct. */
2036 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2037 {
2038 /* store_bit_field always takes its value from the lsb.
2039 Move the fragment to the lsb if it's not already there. */
2040 if (
2041 #ifdef BLOCK_REG_PADDING
2042 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2043 == (BYTES_BIG_ENDIAN ? upward : downward)
2044 #else
2045 BYTES_BIG_ENDIAN
2046 #endif
2047 )
2048 {
2049 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2050 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2051 tmps[i], 0, OPTAB_WIDEN);
2052 }
2053 bytelen = ssize - bytepos;
2054 }
2055
2056 if (GET_CODE (dst) == CONCAT)
2057 {
2058 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2059 dest = XEXP (dst, 0);
2060 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2061 {
2062 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2063 dest = XEXP (dst, 1);
2064 }
2065 else if (bytepos == 0 && XVECLEN (src, 0))
2066 {
2067 dest = assign_stack_temp (GET_MODE (dest),
2068 GET_MODE_SIZE (GET_MODE (dest)), 0);
2069 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2070 tmps[i]);
2071 dst = dest;
2072 break;
2073 }
2074 else
2075 abort ();
2076 }
2077
2078 /* Optimize the access just a bit. */
2079 if (GET_CODE (dest) == MEM
2080 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2081 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2082 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2083 && bytelen == GET_MODE_SIZE (mode))
2084 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2085 else
2086 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2087 mode, tmps[i], ssize);
2088 }
2089
2090 emit_queue ();
2091
2092 /* Copy from the pseudo into the (probable) hard reg. */
2093 if (orig_dst != dst)
2094 emit_move_insn (orig_dst, dst);
2095 }
2096
2097 /* Generate code to copy a BLKmode object of TYPE out of a
2098 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2099 is null, a stack temporary is created. TGTBLK is returned.
2100
2101 The purpose of this routine is to handle functions that return
2102 BLKmode structures in registers. Some machines (the PA for example)
2103 want to return all small structures in registers regardless of the
2104 structure's alignment. */
2105
2106 rtx
2107 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2108 {
2109 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2110 rtx src = NULL, dst = NULL;
2111 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2112 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2113
2114 if (tgtblk == 0)
2115 {
2116 tgtblk = assign_temp (build_qualified_type (type,
2117 (TYPE_QUALS (type)
2118 | TYPE_QUAL_CONST)),
2119 0, 1, 1);
2120 preserve_temp_slots (tgtblk);
2121 }
2122
2123 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2124 into a new pseudo which is a full word. */
2125
2126 if (GET_MODE (srcreg) != BLKmode
2127 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2128 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2129
2130 /* If the structure doesn't take up a whole number of words, see whether
2131 SRCREG is padded on the left or on the right. If it's on the left,
2132 set PADDING_CORRECTION to the number of bits to skip.
2133
2134 In most ABIs, the structure will be returned at the least end of
2135 the register, which translates to right padding on little-endian
2136 targets and left padding on big-endian targets. The opposite
2137 holds if the structure is returned at the most significant
2138 end of the register. */
2139 if (bytes % UNITS_PER_WORD != 0
2140 && (targetm.calls.return_in_msb (type)
2141 ? !BYTES_BIG_ENDIAN
2142 : BYTES_BIG_ENDIAN))
2143 padding_correction
2144 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2145
2146 /* Copy the structure BITSIZE bites at a time.
2147
2148 We could probably emit more efficient code for machines which do not use
2149 strict alignment, but it doesn't seem worth the effort at the current
2150 time. */
2151 for (bitpos = 0, xbitpos = padding_correction;
2152 bitpos < bytes * BITS_PER_UNIT;
2153 bitpos += bitsize, xbitpos += bitsize)
2154 {
2155 /* We need a new source operand each time xbitpos is on a
2156 word boundary and when xbitpos == padding_correction
2157 (the first time through). */
2158 if (xbitpos % BITS_PER_WORD == 0
2159 || xbitpos == padding_correction)
2160 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2161 GET_MODE (srcreg));
2162
2163 /* We need a new destination operand each time bitpos is on
2164 a word boundary. */
2165 if (bitpos % BITS_PER_WORD == 0)
2166 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2167
2168 /* Use xbitpos for the source extraction (right justified) and
2169 xbitpos for the destination store (left justified). */
2170 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2171 extract_bit_field (src, bitsize,
2172 xbitpos % BITS_PER_WORD, 1,
2173 NULL_RTX, word_mode, word_mode,
2174 BITS_PER_WORD),
2175 BITS_PER_WORD);
2176 }
2177
2178 return tgtblk;
2179 }
2180
2181 /* Add a USE expression for REG to the (possibly empty) list pointed
2182 to by CALL_FUSAGE. REG must denote a hard register. */
2183
2184 void
2185 use_reg (rtx *call_fusage, rtx reg)
2186 {
2187 if (GET_CODE (reg) != REG
2188 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2189 abort ();
2190
2191 *call_fusage
2192 = gen_rtx_EXPR_LIST (VOIDmode,
2193 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2194 }
2195
2196 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2197 starting at REGNO. All of these registers must be hard registers. */
2198
2199 void
2200 use_regs (rtx *call_fusage, int regno, int nregs)
2201 {
2202 int i;
2203
2204 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2205 abort ();
2206
2207 for (i = 0; i < nregs; i++)
2208 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2209 }
2210
2211 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2212 PARALLEL REGS. This is for calls that pass values in multiple
2213 non-contiguous locations. The Irix 6 ABI has examples of this. */
2214
2215 void
2216 use_group_regs (rtx *call_fusage, rtx regs)
2217 {
2218 int i;
2219
2220 for (i = 0; i < XVECLEN (regs, 0); i++)
2221 {
2222 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2223
2224 /* A NULL entry means the parameter goes both on the stack and in
2225 registers. This can also be a MEM for targets that pass values
2226 partially on the stack and partially in registers. */
2227 if (reg != 0 && GET_CODE (reg) == REG)
2228 use_reg (call_fusage, reg);
2229 }
2230 }
2231 \f
2232
2233 /* Determine whether the LEN bytes generated by CONSTFUN can be
2234 stored to memory using several move instructions. CONSTFUNDATA is
2235 a pointer which will be passed as argument in every CONSTFUN call.
2236 ALIGN is maximum alignment we can assume. Return nonzero if a
2237 call to store_by_pieces should succeed. */
2238
2239 int
2240 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2241 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2242 void *constfundata, unsigned int align)
2243 {
2244 unsigned HOST_WIDE_INT max_size, l;
2245 HOST_WIDE_INT offset = 0;
2246 enum machine_mode mode, tmode;
2247 enum insn_code icode;
2248 int reverse;
2249 rtx cst;
2250
2251 if (len == 0)
2252 return 1;
2253
2254 if (! STORE_BY_PIECES_P (len, align))
2255 return 0;
2256
2257 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2258 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2259 align = MOVE_MAX * BITS_PER_UNIT;
2260
2261 /* We would first store what we can in the largest integer mode, then go to
2262 successively smaller modes. */
2263
2264 for (reverse = 0;
2265 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2266 reverse++)
2267 {
2268 l = len;
2269 mode = VOIDmode;
2270 max_size = STORE_MAX_PIECES + 1;
2271 while (max_size > 1)
2272 {
2273 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2274 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2275 if (GET_MODE_SIZE (tmode) < max_size)
2276 mode = tmode;
2277
2278 if (mode == VOIDmode)
2279 break;
2280
2281 icode = mov_optab->handlers[(int) mode].insn_code;
2282 if (icode != CODE_FOR_nothing
2283 && align >= GET_MODE_ALIGNMENT (mode))
2284 {
2285 unsigned int size = GET_MODE_SIZE (mode);
2286
2287 while (l >= size)
2288 {
2289 if (reverse)
2290 offset -= size;
2291
2292 cst = (*constfun) (constfundata, offset, mode);
2293 if (!LEGITIMATE_CONSTANT_P (cst))
2294 return 0;
2295
2296 if (!reverse)
2297 offset += size;
2298
2299 l -= size;
2300 }
2301 }
2302
2303 max_size = GET_MODE_SIZE (mode);
2304 }
2305
2306 /* The code above should have handled everything. */
2307 if (l != 0)
2308 abort ();
2309 }
2310
2311 return 1;
2312 }
2313
2314 /* Generate several move instructions to store LEN bytes generated by
2315 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2316 pointer which will be passed as argument in every CONSTFUN call.
2317 ALIGN is maximum alignment we can assume.
2318 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2319 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2320 stpcpy. */
2321
2322 rtx
2323 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2324 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2325 void *constfundata, unsigned int align, int endp)
2326 {
2327 struct store_by_pieces data;
2328
2329 if (len == 0)
2330 {
2331 if (endp == 2)
2332 abort ();
2333 return to;
2334 }
2335
2336 if (! STORE_BY_PIECES_P (len, align))
2337 abort ();
2338 to = protect_from_queue (to, 1);
2339 data.constfun = constfun;
2340 data.constfundata = constfundata;
2341 data.len = len;
2342 data.to = to;
2343 store_by_pieces_1 (&data, align);
2344 if (endp)
2345 {
2346 rtx to1;
2347
2348 if (data.reverse)
2349 abort ();
2350 if (data.autinc_to)
2351 {
2352 if (endp == 2)
2353 {
2354 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2355 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2356 else
2357 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2358 -1));
2359 }
2360 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2361 data.offset);
2362 }
2363 else
2364 {
2365 if (endp == 2)
2366 --data.offset;
2367 to1 = adjust_address (data.to, QImode, data.offset);
2368 }
2369 return to1;
2370 }
2371 else
2372 return data.to;
2373 }
2374
2375 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2376 rtx with BLKmode). The caller must pass TO through protect_from_queue
2377 before calling. ALIGN is maximum alignment we can assume. */
2378
2379 static void
2380 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2381 {
2382 struct store_by_pieces data;
2383
2384 if (len == 0)
2385 return;
2386
2387 data.constfun = clear_by_pieces_1;
2388 data.constfundata = NULL;
2389 data.len = len;
2390 data.to = to;
2391 store_by_pieces_1 (&data, align);
2392 }
2393
2394 /* Callback routine for clear_by_pieces.
2395 Return const0_rtx unconditionally. */
2396
2397 static rtx
2398 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2399 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2400 enum machine_mode mode ATTRIBUTE_UNUSED)
2401 {
2402 return const0_rtx;
2403 }
2404
2405 /* Subroutine of clear_by_pieces and store_by_pieces.
2406 Generate several move instructions to store LEN bytes of block TO. (A MEM
2407 rtx with BLKmode). The caller must pass TO through protect_from_queue
2408 before calling. ALIGN is maximum alignment we can assume. */
2409
2410 static void
2411 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2412 unsigned int align ATTRIBUTE_UNUSED)
2413 {
2414 rtx to_addr = XEXP (data->to, 0);
2415 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2416 enum machine_mode mode = VOIDmode, tmode;
2417 enum insn_code icode;
2418
2419 data->offset = 0;
2420 data->to_addr = to_addr;
2421 data->autinc_to
2422 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2423 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2424
2425 data->explicit_inc_to = 0;
2426 data->reverse
2427 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2428 if (data->reverse)
2429 data->offset = data->len;
2430
2431 /* If storing requires more than two move insns,
2432 copy addresses to registers (to make displacements shorter)
2433 and use post-increment if available. */
2434 if (!data->autinc_to
2435 && move_by_pieces_ninsns (data->len, align) > 2)
2436 {
2437 /* Determine the main mode we'll be using. */
2438 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2439 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2440 if (GET_MODE_SIZE (tmode) < max_size)
2441 mode = tmode;
2442
2443 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2444 {
2445 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2446 data->autinc_to = 1;
2447 data->explicit_inc_to = -1;
2448 }
2449
2450 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2451 && ! data->autinc_to)
2452 {
2453 data->to_addr = copy_addr_to_reg (to_addr);
2454 data->autinc_to = 1;
2455 data->explicit_inc_to = 1;
2456 }
2457
2458 if ( !data->autinc_to && CONSTANT_P (to_addr))
2459 data->to_addr = copy_addr_to_reg (to_addr);
2460 }
2461
2462 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2463 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2464 align = MOVE_MAX * BITS_PER_UNIT;
2465
2466 /* First store what we can in the largest integer mode, then go to
2467 successively smaller modes. */
2468
2469 while (max_size > 1)
2470 {
2471 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2472 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2473 if (GET_MODE_SIZE (tmode) < max_size)
2474 mode = tmode;
2475
2476 if (mode == VOIDmode)
2477 break;
2478
2479 icode = mov_optab->handlers[(int) mode].insn_code;
2480 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2481 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2482
2483 max_size = GET_MODE_SIZE (mode);
2484 }
2485
2486 /* The code above should have handled everything. */
2487 if (data->len != 0)
2488 abort ();
2489 }
2490
2491 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2492 with move instructions for mode MODE. GENFUN is the gen_... function
2493 to make a move insn for that mode. DATA has all the other info. */
2494
2495 static void
2496 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2497 struct store_by_pieces *data)
2498 {
2499 unsigned int size = GET_MODE_SIZE (mode);
2500 rtx to1, cst;
2501
2502 while (data->len >= size)
2503 {
2504 if (data->reverse)
2505 data->offset -= size;
2506
2507 if (data->autinc_to)
2508 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2509 data->offset);
2510 else
2511 to1 = adjust_address (data->to, mode, data->offset);
2512
2513 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2514 emit_insn (gen_add2_insn (data->to_addr,
2515 GEN_INT (-(HOST_WIDE_INT) size)));
2516
2517 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2518 emit_insn ((*genfun) (to1, cst));
2519
2520 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2521 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2522
2523 if (! data->reverse)
2524 data->offset += size;
2525
2526 data->len -= size;
2527 }
2528 }
2529 \f
2530 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2531 its length in bytes. */
2532
2533 rtx
2534 clear_storage (rtx object, rtx size)
2535 {
2536 rtx retval = 0;
2537 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2538 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2539
2540 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2541 just move a zero. Otherwise, do this a piece at a time. */
2542 if (GET_MODE (object) != BLKmode
2543 && GET_CODE (size) == CONST_INT
2544 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2545 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2546 else
2547 {
2548 object = protect_from_queue (object, 1);
2549 size = protect_from_queue (size, 0);
2550
2551 if (size == const0_rtx)
2552 ;
2553 else if (GET_CODE (size) == CONST_INT
2554 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2555 clear_by_pieces (object, INTVAL (size), align);
2556 else if (clear_storage_via_clrstr (object, size, align))
2557 ;
2558 else
2559 retval = clear_storage_via_libcall (object, size);
2560 }
2561
2562 return retval;
2563 }
2564
2565 /* A subroutine of clear_storage. Expand a clrstr pattern;
2566 return true if successful. */
2567
2568 static bool
2569 clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
2570 {
2571 /* Try the most limited insn first, because there's no point
2572 including more than one in the machine description unless
2573 the more limited one has some advantage. */
2574
2575 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2576 enum machine_mode mode;
2577
2578 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2579 mode = GET_MODE_WIDER_MODE (mode))
2580 {
2581 enum insn_code code = clrstr_optab[(int) mode];
2582 insn_operand_predicate_fn pred;
2583
2584 if (code != CODE_FOR_nothing
2585 /* We don't need MODE to be narrower than
2586 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2587 the mode mask, as it is returned by the macro, it will
2588 definitely be less than the actual mode mask. */
2589 && ((GET_CODE (size) == CONST_INT
2590 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2591 <= (GET_MODE_MASK (mode) >> 1)))
2592 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2593 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2594 || (*pred) (object, BLKmode))
2595 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2596 || (*pred) (opalign, VOIDmode)))
2597 {
2598 rtx op1;
2599 rtx last = get_last_insn ();
2600 rtx pat;
2601
2602 op1 = convert_to_mode (mode, size, 1);
2603 pred = insn_data[(int) code].operand[1].predicate;
2604 if (pred != 0 && ! (*pred) (op1, mode))
2605 op1 = copy_to_mode_reg (mode, op1);
2606
2607 pat = GEN_FCN ((int) code) (object, op1, opalign);
2608 if (pat)
2609 {
2610 emit_insn (pat);
2611 return true;
2612 }
2613 else
2614 delete_insns_since (last);
2615 }
2616 }
2617
2618 return false;
2619 }
2620
2621 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2622 Return the return value of memset, 0 otherwise. */
2623
2624 static rtx
2625 clear_storage_via_libcall (rtx object, rtx size)
2626 {
2627 tree call_expr, arg_list, fn, object_tree, size_tree;
2628 enum machine_mode size_mode;
2629 rtx retval;
2630
2631 /* OBJECT or SIZE may have been passed through protect_from_queue.
2632
2633 It is unsafe to save the value generated by protect_from_queue
2634 and reuse it later. Consider what happens if emit_queue is
2635 called before the return value from protect_from_queue is used.
2636
2637 Expansion of the CALL_EXPR below will call emit_queue before
2638 we are finished emitting RTL for argument setup. So if we are
2639 not careful we could get the wrong value for an argument.
2640
2641 To avoid this problem we go ahead and emit code to copy OBJECT
2642 and SIZE into new pseudos. We can then place those new pseudos
2643 into an RTL_EXPR and use them later, even after a call to
2644 emit_queue.
2645
2646 Note this is not strictly needed for library calls since they
2647 do not call emit_queue before loading their arguments. However,
2648 we may need to have library calls call emit_queue in the future
2649 since failing to do so could cause problems for targets which
2650 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2651
2652 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2653
2654 if (TARGET_MEM_FUNCTIONS)
2655 size_mode = TYPE_MODE (sizetype);
2656 else
2657 size_mode = TYPE_MODE (unsigned_type_node);
2658 size = convert_to_mode (size_mode, size, 1);
2659 size = copy_to_mode_reg (size_mode, size);
2660
2661 /* It is incorrect to use the libcall calling conventions to call
2662 memset in this context. This could be a user call to memset and
2663 the user may wish to examine the return value from memset. For
2664 targets where libcalls and normal calls have different conventions
2665 for returning pointers, we could end up generating incorrect code.
2666
2667 For convenience, we generate the call to bzero this way as well. */
2668
2669 object_tree = make_tree (ptr_type_node, object);
2670 if (TARGET_MEM_FUNCTIONS)
2671 size_tree = make_tree (sizetype, size);
2672 else
2673 size_tree = make_tree (unsigned_type_node, size);
2674
2675 fn = clear_storage_libcall_fn (true);
2676 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2677 if (TARGET_MEM_FUNCTIONS)
2678 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2679 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2680
2681 /* Now we have to build up the CALL_EXPR itself. */
2682 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2683 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2684 call_expr, arg_list, NULL_TREE);
2685
2686 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2687
2688 /* If we are initializing a readonly value, show the above call
2689 clobbered it. Otherwise, a load from it may erroneously be
2690 hoisted from a loop. */
2691 if (RTX_UNCHANGING_P (object))
2692 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2693
2694 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
2695 }
2696
2697 /* A subroutine of clear_storage_via_libcall. Create the tree node
2698 for the function we use for block clears. The first time FOR_CALL
2699 is true, we call assemble_external. */
2700
2701 static GTY(()) tree block_clear_fn;
2702
2703 void
2704 init_block_clear_fn (const char *asmspec)
2705 {
2706 if (!block_clear_fn)
2707 {
2708 tree fn, args;
2709
2710 if (TARGET_MEM_FUNCTIONS)
2711 {
2712 fn = get_identifier ("memset");
2713 args = build_function_type_list (ptr_type_node, ptr_type_node,
2714 integer_type_node, sizetype,
2715 NULL_TREE);
2716 }
2717 else
2718 {
2719 fn = get_identifier ("bzero");
2720 args = build_function_type_list (void_type_node, ptr_type_node,
2721 unsigned_type_node, NULL_TREE);
2722 }
2723
2724 fn = build_decl (FUNCTION_DECL, fn, args);
2725 DECL_EXTERNAL (fn) = 1;
2726 TREE_PUBLIC (fn) = 1;
2727 DECL_ARTIFICIAL (fn) = 1;
2728 TREE_NOTHROW (fn) = 1;
2729
2730 block_clear_fn = fn;
2731 }
2732
2733 if (asmspec)
2734 {
2735 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2736 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2737 }
2738 }
2739
2740 static tree
2741 clear_storage_libcall_fn (int for_call)
2742 {
2743 static bool emitted_extern;
2744
2745 if (!block_clear_fn)
2746 init_block_clear_fn (NULL);
2747
2748 if (for_call && !emitted_extern)
2749 {
2750 emitted_extern = true;
2751 make_decl_rtl (block_clear_fn, NULL);
2752 assemble_external (block_clear_fn);
2753 }
2754
2755 return block_clear_fn;
2756 }
2757 \f
2758 /* Generate code to copy Y into X.
2759 Both Y and X must have the same mode, except that
2760 Y can be a constant with VOIDmode.
2761 This mode cannot be BLKmode; use emit_block_move for that.
2762
2763 Return the last instruction emitted. */
2764
2765 rtx
2766 emit_move_insn (rtx x, rtx y)
2767 {
2768 enum machine_mode mode = GET_MODE (x);
2769 rtx y_cst = NULL_RTX;
2770 rtx last_insn, set;
2771
2772 x = protect_from_queue (x, 1);
2773 y = protect_from_queue (y, 0);
2774
2775 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2776 abort ();
2777
2778 /* Never force constant_p_rtx to memory. */
2779 if (GET_CODE (y) == CONSTANT_P_RTX)
2780 ;
2781 else if (CONSTANT_P (y))
2782 {
2783 if (optimize
2784 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2785 && (last_insn = compress_float_constant (x, y)))
2786 return last_insn;
2787
2788 y_cst = y;
2789
2790 if (!LEGITIMATE_CONSTANT_P (y))
2791 {
2792 y = force_const_mem (mode, y);
2793
2794 /* If the target's cannot_force_const_mem prevented the spill,
2795 assume that the target's move expanders will also take care
2796 of the non-legitimate constant. */
2797 if (!y)
2798 y = y_cst;
2799 }
2800 }
2801
2802 /* If X or Y are memory references, verify that their addresses are valid
2803 for the machine. */
2804 if (GET_CODE (x) == MEM
2805 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2806 && ! push_operand (x, GET_MODE (x)))
2807 || (flag_force_addr
2808 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2809 x = validize_mem (x);
2810
2811 if (GET_CODE (y) == MEM
2812 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2813 || (flag_force_addr
2814 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2815 y = validize_mem (y);
2816
2817 if (mode == BLKmode)
2818 abort ();
2819
2820 last_insn = emit_move_insn_1 (x, y);
2821
2822 if (y_cst && GET_CODE (x) == REG
2823 && (set = single_set (last_insn)) != NULL_RTX
2824 && SET_DEST (set) == x
2825 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2826 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2827
2828 return last_insn;
2829 }
2830
2831 /* Low level part of emit_move_insn.
2832 Called just like emit_move_insn, but assumes X and Y
2833 are basically valid. */
2834
2835 rtx
2836 emit_move_insn_1 (rtx x, rtx y)
2837 {
2838 enum machine_mode mode = GET_MODE (x);
2839 enum machine_mode submode;
2840 enum mode_class class = GET_MODE_CLASS (mode);
2841
2842 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2843 abort ();
2844
2845 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2846 return
2847 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2848
2849 /* Expand complex moves by moving real part and imag part, if possible. */
2850 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2851 && BLKmode != (submode = GET_MODE_INNER (mode))
2852 && (mov_optab->handlers[(int) submode].insn_code
2853 != CODE_FOR_nothing))
2854 {
2855 /* Don't split destination if it is a stack push. */
2856 int stack = push_operand (x, GET_MODE (x));
2857
2858 #ifdef PUSH_ROUNDING
2859 /* In case we output to the stack, but the size is smaller than the
2860 machine can push exactly, we need to use move instructions. */
2861 if (stack
2862 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2863 != GET_MODE_SIZE (submode)))
2864 {
2865 rtx temp;
2866 HOST_WIDE_INT offset1, offset2;
2867
2868 /* Do not use anti_adjust_stack, since we don't want to update
2869 stack_pointer_delta. */
2870 temp = expand_binop (Pmode,
2871 #ifdef STACK_GROWS_DOWNWARD
2872 sub_optab,
2873 #else
2874 add_optab,
2875 #endif
2876 stack_pointer_rtx,
2877 GEN_INT
2878 (PUSH_ROUNDING
2879 (GET_MODE_SIZE (GET_MODE (x)))),
2880 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2881
2882 if (temp != stack_pointer_rtx)
2883 emit_move_insn (stack_pointer_rtx, temp);
2884
2885 #ifdef STACK_GROWS_DOWNWARD
2886 offset1 = 0;
2887 offset2 = GET_MODE_SIZE (submode);
2888 #else
2889 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2890 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2891 + GET_MODE_SIZE (submode));
2892 #endif
2893
2894 emit_move_insn (change_address (x, submode,
2895 gen_rtx_PLUS (Pmode,
2896 stack_pointer_rtx,
2897 GEN_INT (offset1))),
2898 gen_realpart (submode, y));
2899 emit_move_insn (change_address (x, submode,
2900 gen_rtx_PLUS (Pmode,
2901 stack_pointer_rtx,
2902 GEN_INT (offset2))),
2903 gen_imagpart (submode, y));
2904 }
2905 else
2906 #endif
2907 /* If this is a stack, push the highpart first, so it
2908 will be in the argument order.
2909
2910 In that case, change_address is used only to convert
2911 the mode, not to change the address. */
2912 if (stack)
2913 {
2914 /* Note that the real part always precedes the imag part in memory
2915 regardless of machine's endianness. */
2916 #ifdef STACK_GROWS_DOWNWARD
2917 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2918 gen_imagpart (submode, y));
2919 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2920 gen_realpart (submode, y));
2921 #else
2922 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2923 gen_realpart (submode, y));
2924 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2925 gen_imagpart (submode, y));
2926 #endif
2927 }
2928 else
2929 {
2930 rtx realpart_x, realpart_y;
2931 rtx imagpart_x, imagpart_y;
2932
2933 /* If this is a complex value with each part being smaller than a
2934 word, the usual calling sequence will likely pack the pieces into
2935 a single register. Unfortunately, SUBREG of hard registers only
2936 deals in terms of words, so we have a problem converting input
2937 arguments to the CONCAT of two registers that is used elsewhere
2938 for complex values. If this is before reload, we can copy it into
2939 memory and reload. FIXME, we should see about using extract and
2940 insert on integer registers, but complex short and complex char
2941 variables should be rarely used. */
2942 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2943 && (reload_in_progress | reload_completed) == 0)
2944 {
2945 int packed_dest_p
2946 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2947 int packed_src_p
2948 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2949
2950 if (packed_dest_p || packed_src_p)
2951 {
2952 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2953 ? MODE_FLOAT : MODE_INT);
2954
2955 enum machine_mode reg_mode
2956 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2957
2958 if (reg_mode != BLKmode)
2959 {
2960 rtx mem = assign_stack_temp (reg_mode,
2961 GET_MODE_SIZE (mode), 0);
2962 rtx cmem = adjust_address (mem, mode, 0);
2963
2964 cfun->cannot_inline
2965 = N_("function using short complex types cannot be inline");
2966
2967 if (packed_dest_p)
2968 {
2969 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2970
2971 emit_move_insn_1 (cmem, y);
2972 return emit_move_insn_1 (sreg, mem);
2973 }
2974 else
2975 {
2976 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2977
2978 emit_move_insn_1 (mem, sreg);
2979 return emit_move_insn_1 (x, cmem);
2980 }
2981 }
2982 }
2983 }
2984
2985 realpart_x = gen_realpart (submode, x);
2986 realpart_y = gen_realpart (submode, y);
2987 imagpart_x = gen_imagpart (submode, x);
2988 imagpart_y = gen_imagpart (submode, y);
2989
2990 /* Show the output dies here. This is necessary for SUBREGs
2991 of pseudos since we cannot track their lifetimes correctly;
2992 hard regs shouldn't appear here except as return values.
2993 We never want to emit such a clobber after reload. */
2994 if (x != y
2995 && ! (reload_in_progress || reload_completed)
2996 && (GET_CODE (realpart_x) == SUBREG
2997 || GET_CODE (imagpart_x) == SUBREG))
2998 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2999
3000 emit_move_insn (realpart_x, realpart_y);
3001 emit_move_insn (imagpart_x, imagpart_y);
3002 }
3003
3004 return get_last_insn ();
3005 }
3006
3007 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3008 find a mode to do it in. If we have a movcc, use it. Otherwise,
3009 find the MODE_INT mode of the same width. */
3010 else if (GET_MODE_CLASS (mode) == MODE_CC
3011 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3012 {
3013 enum insn_code insn_code;
3014 enum machine_mode tmode = VOIDmode;
3015 rtx x1 = x, y1 = y;
3016
3017 if (mode != CCmode
3018 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3019 tmode = CCmode;
3020 else
3021 for (tmode = QImode; tmode != VOIDmode;
3022 tmode = GET_MODE_WIDER_MODE (tmode))
3023 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3024 break;
3025
3026 if (tmode == VOIDmode)
3027 abort ();
3028
3029 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3030 may call change_address which is not appropriate if we were
3031 called when a reload was in progress. We don't have to worry
3032 about changing the address since the size in bytes is supposed to
3033 be the same. Copy the MEM to change the mode and move any
3034 substitutions from the old MEM to the new one. */
3035
3036 if (reload_in_progress)
3037 {
3038 x = gen_lowpart_common (tmode, x1);
3039 if (x == 0 && GET_CODE (x1) == MEM)
3040 {
3041 x = adjust_address_nv (x1, tmode, 0);
3042 copy_replacements (x1, x);
3043 }
3044
3045 y = gen_lowpart_common (tmode, y1);
3046 if (y == 0 && GET_CODE (y1) == MEM)
3047 {
3048 y = adjust_address_nv (y1, tmode, 0);
3049 copy_replacements (y1, y);
3050 }
3051 }
3052 else
3053 {
3054 x = gen_lowpart (tmode, x);
3055 y = gen_lowpart (tmode, y);
3056 }
3057
3058 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3059 return emit_insn (GEN_FCN (insn_code) (x, y));
3060 }
3061
3062 /* Try using a move pattern for the corresponding integer mode. This is
3063 only safe when simplify_subreg can convert MODE constants into integer
3064 constants. At present, it can only do this reliably if the value
3065 fits within a HOST_WIDE_INT. */
3066 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3067 && (submode = int_mode_for_mode (mode)) != BLKmode
3068 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3069 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3070 (simplify_gen_subreg (submode, x, mode, 0),
3071 simplify_gen_subreg (submode, y, mode, 0)));
3072
3073 /* This will handle any multi-word or full-word mode that lacks a move_insn
3074 pattern. However, you will get better code if you define such patterns,
3075 even if they must turn into multiple assembler instructions. */
3076 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3077 {
3078 rtx last_insn = 0;
3079 rtx seq, inner;
3080 int need_clobber;
3081 int i;
3082
3083 #ifdef PUSH_ROUNDING
3084
3085 /* If X is a push on the stack, do the push now and replace
3086 X with a reference to the stack pointer. */
3087 if (push_operand (x, GET_MODE (x)))
3088 {
3089 rtx temp;
3090 enum rtx_code code;
3091
3092 /* Do not use anti_adjust_stack, since we don't want to update
3093 stack_pointer_delta. */
3094 temp = expand_binop (Pmode,
3095 #ifdef STACK_GROWS_DOWNWARD
3096 sub_optab,
3097 #else
3098 add_optab,
3099 #endif
3100 stack_pointer_rtx,
3101 GEN_INT
3102 (PUSH_ROUNDING
3103 (GET_MODE_SIZE (GET_MODE (x)))),
3104 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3105
3106 if (temp != stack_pointer_rtx)
3107 emit_move_insn (stack_pointer_rtx, temp);
3108
3109 code = GET_CODE (XEXP (x, 0));
3110
3111 /* Just hope that small offsets off SP are OK. */
3112 if (code == POST_INC)
3113 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3114 GEN_INT (-((HOST_WIDE_INT)
3115 GET_MODE_SIZE (GET_MODE (x)))));
3116 else if (code == POST_DEC)
3117 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3118 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3119 else
3120 temp = stack_pointer_rtx;
3121
3122 x = change_address (x, VOIDmode, temp);
3123 }
3124 #endif
3125
3126 /* If we are in reload, see if either operand is a MEM whose address
3127 is scheduled for replacement. */
3128 if (reload_in_progress && GET_CODE (x) == MEM
3129 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3130 x = replace_equiv_address_nv (x, inner);
3131 if (reload_in_progress && GET_CODE (y) == MEM
3132 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3133 y = replace_equiv_address_nv (y, inner);
3134
3135 start_sequence ();
3136
3137 need_clobber = 0;
3138 for (i = 0;
3139 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3140 i++)
3141 {
3142 rtx xpart = operand_subword (x, i, 1, mode);
3143 rtx ypart = operand_subword (y, i, 1, mode);
3144
3145 /* If we can't get a part of Y, put Y into memory if it is a
3146 constant. Otherwise, force it into a register. If we still
3147 can't get a part of Y, abort. */
3148 if (ypart == 0 && CONSTANT_P (y))
3149 {
3150 y = force_const_mem (mode, y);
3151 ypart = operand_subword (y, i, 1, mode);
3152 }
3153 else if (ypart == 0)
3154 ypart = operand_subword_force (y, i, mode);
3155
3156 if (xpart == 0 || ypart == 0)
3157 abort ();
3158
3159 need_clobber |= (GET_CODE (xpart) == SUBREG);
3160
3161 last_insn = emit_move_insn (xpart, ypart);
3162 }
3163
3164 seq = get_insns ();
3165 end_sequence ();
3166
3167 /* Show the output dies here. This is necessary for SUBREGs
3168 of pseudos since we cannot track their lifetimes correctly;
3169 hard regs shouldn't appear here except as return values.
3170 We never want to emit such a clobber after reload. */
3171 if (x != y
3172 && ! (reload_in_progress || reload_completed)
3173 && need_clobber != 0)
3174 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3175
3176 emit_insn (seq);
3177
3178 return last_insn;
3179 }
3180 else
3181 abort ();
3182 }
3183
3184 /* If Y is representable exactly in a narrower mode, and the target can
3185 perform the extension directly from constant or memory, then emit the
3186 move as an extension. */
3187
3188 static rtx
3189 compress_float_constant (rtx x, rtx y)
3190 {
3191 enum machine_mode dstmode = GET_MODE (x);
3192 enum machine_mode orig_srcmode = GET_MODE (y);
3193 enum machine_mode srcmode;
3194 REAL_VALUE_TYPE r;
3195
3196 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3197
3198 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3199 srcmode != orig_srcmode;
3200 srcmode = GET_MODE_WIDER_MODE (srcmode))
3201 {
3202 enum insn_code ic;
3203 rtx trunc_y, last_insn;
3204
3205 /* Skip if the target can't extend this way. */
3206 ic = can_extend_p (dstmode, srcmode, 0);
3207 if (ic == CODE_FOR_nothing)
3208 continue;
3209
3210 /* Skip if the narrowed value isn't exact. */
3211 if (! exact_real_truncate (srcmode, &r))
3212 continue;
3213
3214 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3215
3216 if (LEGITIMATE_CONSTANT_P (trunc_y))
3217 {
3218 /* Skip if the target needs extra instructions to perform
3219 the extension. */
3220 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3221 continue;
3222 }
3223 else if (float_extend_from_mem[dstmode][srcmode])
3224 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3225 else
3226 continue;
3227
3228 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3229 last_insn = get_last_insn ();
3230
3231 if (GET_CODE (x) == REG)
3232 set_unique_reg_note (last_insn, REG_EQUAL, y);
3233
3234 return last_insn;
3235 }
3236
3237 return NULL_RTX;
3238 }
3239 \f
3240 /* Pushing data onto the stack. */
3241
3242 /* Push a block of length SIZE (perhaps variable)
3243 and return an rtx to address the beginning of the block.
3244 Note that it is not possible for the value returned to be a QUEUED.
3245 The value may be virtual_outgoing_args_rtx.
3246
3247 EXTRA is the number of bytes of padding to push in addition to SIZE.
3248 BELOW nonzero means this padding comes at low addresses;
3249 otherwise, the padding comes at high addresses. */
3250
3251 rtx
3252 push_block (rtx size, int extra, int below)
3253 {
3254 rtx temp;
3255
3256 size = convert_modes (Pmode, ptr_mode, size, 1);
3257 if (CONSTANT_P (size))
3258 anti_adjust_stack (plus_constant (size, extra));
3259 else if (GET_CODE (size) == REG && extra == 0)
3260 anti_adjust_stack (size);
3261 else
3262 {
3263 temp = copy_to_mode_reg (Pmode, size);
3264 if (extra != 0)
3265 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3266 temp, 0, OPTAB_LIB_WIDEN);
3267 anti_adjust_stack (temp);
3268 }
3269
3270 #ifndef STACK_GROWS_DOWNWARD
3271 if (0)
3272 #else
3273 if (1)
3274 #endif
3275 {
3276 temp = virtual_outgoing_args_rtx;
3277 if (extra != 0 && below)
3278 temp = plus_constant (temp, extra);
3279 }
3280 else
3281 {
3282 if (GET_CODE (size) == CONST_INT)
3283 temp = plus_constant (virtual_outgoing_args_rtx,
3284 -INTVAL (size) - (below ? 0 : extra));
3285 else if (extra != 0 && !below)
3286 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3287 negate_rtx (Pmode, plus_constant (size, extra)));
3288 else
3289 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3290 negate_rtx (Pmode, size));
3291 }
3292
3293 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3294 }
3295
3296 #ifdef PUSH_ROUNDING
3297
3298 /* Emit single push insn. */
3299
3300 static void
3301 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3302 {
3303 rtx dest_addr;
3304 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3305 rtx dest;
3306 enum insn_code icode;
3307 insn_operand_predicate_fn pred;
3308
3309 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3310 /* If there is push pattern, use it. Otherwise try old way of throwing
3311 MEM representing push operation to move expander. */
3312 icode = push_optab->handlers[(int) mode].insn_code;
3313 if (icode != CODE_FOR_nothing)
3314 {
3315 if (((pred = insn_data[(int) icode].operand[0].predicate)
3316 && !((*pred) (x, mode))))
3317 x = force_reg (mode, x);
3318 emit_insn (GEN_FCN (icode) (x));
3319 return;
3320 }
3321 if (GET_MODE_SIZE (mode) == rounded_size)
3322 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3323 /* If we are to pad downward, adjust the stack pointer first and
3324 then store X into the stack location using an offset. This is
3325 because emit_move_insn does not know how to pad; it does not have
3326 access to type. */
3327 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3328 {
3329 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3330 HOST_WIDE_INT offset;
3331
3332 emit_move_insn (stack_pointer_rtx,
3333 expand_binop (Pmode,
3334 #ifdef STACK_GROWS_DOWNWARD
3335 sub_optab,
3336 #else
3337 add_optab,
3338 #endif
3339 stack_pointer_rtx,
3340 GEN_INT (rounded_size),
3341 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3342
3343 offset = (HOST_WIDE_INT) padding_size;
3344 #ifdef STACK_GROWS_DOWNWARD
3345 if (STACK_PUSH_CODE == POST_DEC)
3346 /* We have already decremented the stack pointer, so get the
3347 previous value. */
3348 offset += (HOST_WIDE_INT) rounded_size;
3349 #else
3350 if (STACK_PUSH_CODE == POST_INC)
3351 /* We have already incremented the stack pointer, so get the
3352 previous value. */
3353 offset -= (HOST_WIDE_INT) rounded_size;
3354 #endif
3355 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3356 }
3357 else
3358 {
3359 #ifdef STACK_GROWS_DOWNWARD
3360 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3361 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3362 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3363 #else
3364 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3365 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3366 GEN_INT (rounded_size));
3367 #endif
3368 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3369 }
3370
3371 dest = gen_rtx_MEM (mode, dest_addr);
3372
3373 if (type != 0)
3374 {
3375 set_mem_attributes (dest, type, 1);
3376
3377 if (flag_optimize_sibling_calls)
3378 /* Function incoming arguments may overlap with sibling call
3379 outgoing arguments and we cannot allow reordering of reads
3380 from function arguments with stores to outgoing arguments
3381 of sibling calls. */
3382 set_mem_alias_set (dest, 0);
3383 }
3384 emit_move_insn (dest, x);
3385 }
3386 #endif
3387
3388 /* Generate code to push X onto the stack, assuming it has mode MODE and
3389 type TYPE.
3390 MODE is redundant except when X is a CONST_INT (since they don't
3391 carry mode info).
3392 SIZE is an rtx for the size of data to be copied (in bytes),
3393 needed only if X is BLKmode.
3394
3395 ALIGN (in bits) is maximum alignment we can assume.
3396
3397 If PARTIAL and REG are both nonzero, then copy that many of the first
3398 words of X into registers starting with REG, and push the rest of X.
3399 The amount of space pushed is decreased by PARTIAL words,
3400 rounded *down* to a multiple of PARM_BOUNDARY.
3401 REG must be a hard register in this case.
3402 If REG is zero but PARTIAL is not, take any all others actions for an
3403 argument partially in registers, but do not actually load any
3404 registers.
3405
3406 EXTRA is the amount in bytes of extra space to leave next to this arg.
3407 This is ignored if an argument block has already been allocated.
3408
3409 On a machine that lacks real push insns, ARGS_ADDR is the address of
3410 the bottom of the argument block for this call. We use indexing off there
3411 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3412 argument block has not been preallocated.
3413
3414 ARGS_SO_FAR is the size of args previously pushed for this call.
3415
3416 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3417 for arguments passed in registers. If nonzero, it will be the number
3418 of bytes required. */
3419
3420 void
3421 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3422 unsigned int align, int partial, rtx reg, int extra,
3423 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3424 rtx alignment_pad)
3425 {
3426 rtx xinner;
3427 enum direction stack_direction
3428 #ifdef STACK_GROWS_DOWNWARD
3429 = downward;
3430 #else
3431 = upward;
3432 #endif
3433
3434 /* Decide where to pad the argument: `downward' for below,
3435 `upward' for above, or `none' for don't pad it.
3436 Default is below for small data on big-endian machines; else above. */
3437 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3438
3439 /* Invert direction if stack is post-decrement.
3440 FIXME: why? */
3441 if (STACK_PUSH_CODE == POST_DEC)
3442 if (where_pad != none)
3443 where_pad = (where_pad == downward ? upward : downward);
3444
3445 xinner = x = protect_from_queue (x, 0);
3446
3447 if (mode == BLKmode)
3448 {
3449 /* Copy a block into the stack, entirely or partially. */
3450
3451 rtx temp;
3452 int used = partial * UNITS_PER_WORD;
3453 int offset;
3454 int skip;
3455
3456 if (reg && GET_CODE (reg) == PARALLEL)
3457 {
3458 /* Use the size of the elt to compute offset. */
3459 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3460 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3461 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3462 }
3463 else
3464 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3465
3466 if (size == 0)
3467 abort ();
3468
3469 used -= offset;
3470
3471 /* USED is now the # of bytes we need not copy to the stack
3472 because registers will take care of them. */
3473
3474 if (partial != 0)
3475 xinner = adjust_address (xinner, BLKmode, used);
3476
3477 /* If the partial register-part of the arg counts in its stack size,
3478 skip the part of stack space corresponding to the registers.
3479 Otherwise, start copying to the beginning of the stack space,
3480 by setting SKIP to 0. */
3481 skip = (reg_parm_stack_space == 0) ? 0 : used;
3482
3483 #ifdef PUSH_ROUNDING
3484 /* Do it with several push insns if that doesn't take lots of insns
3485 and if there is no difficulty with push insns that skip bytes
3486 on the stack for alignment purposes. */
3487 if (args_addr == 0
3488 && PUSH_ARGS
3489 && GET_CODE (size) == CONST_INT
3490 && skip == 0
3491 && MEM_ALIGN (xinner) >= align
3492 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3493 /* Here we avoid the case of a structure whose weak alignment
3494 forces many pushes of a small amount of data,
3495 and such small pushes do rounding that causes trouble. */
3496 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3497 || align >= BIGGEST_ALIGNMENT
3498 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3499 == (align / BITS_PER_UNIT)))
3500 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3501 {
3502 /* Push padding now if padding above and stack grows down,
3503 or if padding below and stack grows up.
3504 But if space already allocated, this has already been done. */
3505 if (extra && args_addr == 0
3506 && where_pad != none && where_pad != stack_direction)
3507 anti_adjust_stack (GEN_INT (extra));
3508
3509 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3510 }
3511 else
3512 #endif /* PUSH_ROUNDING */
3513 {
3514 rtx target;
3515
3516 /* Otherwise make space on the stack and copy the data
3517 to the address of that space. */
3518
3519 /* Deduct words put into registers from the size we must copy. */
3520 if (partial != 0)
3521 {
3522 if (GET_CODE (size) == CONST_INT)
3523 size = GEN_INT (INTVAL (size) - used);
3524 else
3525 size = expand_binop (GET_MODE (size), sub_optab, size,
3526 GEN_INT (used), NULL_RTX, 0,
3527 OPTAB_LIB_WIDEN);
3528 }
3529
3530 /* Get the address of the stack space.
3531 In this case, we do not deal with EXTRA separately.
3532 A single stack adjust will do. */
3533 if (! args_addr)
3534 {
3535 temp = push_block (size, extra, where_pad == downward);
3536 extra = 0;
3537 }
3538 else if (GET_CODE (args_so_far) == CONST_INT)
3539 temp = memory_address (BLKmode,
3540 plus_constant (args_addr,
3541 skip + INTVAL (args_so_far)));
3542 else
3543 temp = memory_address (BLKmode,
3544 plus_constant (gen_rtx_PLUS (Pmode,
3545 args_addr,
3546 args_so_far),
3547 skip));
3548
3549 if (!ACCUMULATE_OUTGOING_ARGS)
3550 {
3551 /* If the source is referenced relative to the stack pointer,
3552 copy it to another register to stabilize it. We do not need
3553 to do this if we know that we won't be changing sp. */
3554
3555 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3556 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3557 temp = copy_to_reg (temp);
3558 }
3559
3560 target = gen_rtx_MEM (BLKmode, temp);
3561
3562 if (type != 0)
3563 {
3564 set_mem_attributes (target, type, 1);
3565 /* Function incoming arguments may overlap with sibling call
3566 outgoing arguments and we cannot allow reordering of reads
3567 from function arguments with stores to outgoing arguments
3568 of sibling calls. */
3569 set_mem_alias_set (target, 0);
3570 }
3571
3572 /* ALIGN may well be better aligned than TYPE, e.g. due to
3573 PARM_BOUNDARY. Assume the caller isn't lying. */
3574 set_mem_align (target, align);
3575
3576 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3577 }
3578 }
3579 else if (partial > 0)
3580 {
3581 /* Scalar partly in registers. */
3582
3583 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3584 int i;
3585 int not_stack;
3586 /* # words of start of argument
3587 that we must make space for but need not store. */
3588 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3589 int args_offset = INTVAL (args_so_far);
3590 int skip;
3591
3592 /* Push padding now if padding above and stack grows down,
3593 or if padding below and stack grows up.
3594 But if space already allocated, this has already been done. */
3595 if (extra && args_addr == 0
3596 && where_pad != none && where_pad != stack_direction)
3597 anti_adjust_stack (GEN_INT (extra));
3598
3599 /* If we make space by pushing it, we might as well push
3600 the real data. Otherwise, we can leave OFFSET nonzero
3601 and leave the space uninitialized. */
3602 if (args_addr == 0)
3603 offset = 0;
3604
3605 /* Now NOT_STACK gets the number of words that we don't need to
3606 allocate on the stack. */
3607 not_stack = partial - offset;
3608
3609 /* If the partial register-part of the arg counts in its stack size,
3610 skip the part of stack space corresponding to the registers.
3611 Otherwise, start copying to the beginning of the stack space,
3612 by setting SKIP to 0. */
3613 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3614
3615 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3616 x = validize_mem (force_const_mem (mode, x));
3617
3618 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3619 SUBREGs of such registers are not allowed. */
3620 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3621 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3622 x = copy_to_reg (x);
3623
3624 /* Loop over all the words allocated on the stack for this arg. */
3625 /* We can do it by words, because any scalar bigger than a word
3626 has a size a multiple of a word. */
3627 #ifndef PUSH_ARGS_REVERSED
3628 for (i = not_stack; i < size; i++)
3629 #else
3630 for (i = size - 1; i >= not_stack; i--)
3631 #endif
3632 if (i >= not_stack + offset)
3633 emit_push_insn (operand_subword_force (x, i, mode),
3634 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3635 0, args_addr,
3636 GEN_INT (args_offset + ((i - not_stack + skip)
3637 * UNITS_PER_WORD)),
3638 reg_parm_stack_space, alignment_pad);
3639 }
3640 else
3641 {
3642 rtx addr;
3643 rtx dest;
3644
3645 /* Push padding now if padding above and stack grows down,
3646 or if padding below and stack grows up.
3647 But if space already allocated, this has already been done. */
3648 if (extra && args_addr == 0
3649 && where_pad != none && where_pad != stack_direction)
3650 anti_adjust_stack (GEN_INT (extra));
3651
3652 #ifdef PUSH_ROUNDING
3653 if (args_addr == 0 && PUSH_ARGS)
3654 emit_single_push_insn (mode, x, type);
3655 else
3656 #endif
3657 {
3658 if (GET_CODE (args_so_far) == CONST_INT)
3659 addr
3660 = memory_address (mode,
3661 plus_constant (args_addr,
3662 INTVAL (args_so_far)));
3663 else
3664 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3665 args_so_far));
3666 dest = gen_rtx_MEM (mode, addr);
3667 if (type != 0)
3668 {
3669 set_mem_attributes (dest, type, 1);
3670 /* Function incoming arguments may overlap with sibling call
3671 outgoing arguments and we cannot allow reordering of reads
3672 from function arguments with stores to outgoing arguments
3673 of sibling calls. */
3674 set_mem_alias_set (dest, 0);
3675 }
3676
3677 emit_move_insn (dest, x);
3678 }
3679 }
3680
3681 /* If part should go in registers, copy that part
3682 into the appropriate registers. Do this now, at the end,
3683 since mem-to-mem copies above may do function calls. */
3684 if (partial > 0 && reg != 0)
3685 {
3686 /* Handle calls that pass values in multiple non-contiguous locations.
3687 The Irix 6 ABI has examples of this. */
3688 if (GET_CODE (reg) == PARALLEL)
3689 emit_group_load (reg, x, type, -1);
3690 else
3691 move_block_to_reg (REGNO (reg), x, partial, mode);
3692 }
3693
3694 if (extra && args_addr == 0 && where_pad == stack_direction)
3695 anti_adjust_stack (GEN_INT (extra));
3696
3697 if (alignment_pad && args_addr == 0)
3698 anti_adjust_stack (alignment_pad);
3699 }
3700 \f
3701 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3702 operations. */
3703
3704 static rtx
3705 get_subtarget (rtx x)
3706 {
3707 return ((x == 0
3708 /* Only registers can be subtargets. */
3709 || GET_CODE (x) != REG
3710 /* If the register is readonly, it can't be set more than once. */
3711 || RTX_UNCHANGING_P (x)
3712 /* Don't use hard regs to avoid extending their life. */
3713 || REGNO (x) < FIRST_PSEUDO_REGISTER
3714 /* Avoid subtargets inside loops,
3715 since they hide some invariant expressions. */
3716 || preserve_subexpressions_p ())
3717 ? 0 : x);
3718 }
3719
3720 /* Expand an assignment that stores the value of FROM into TO.
3721 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3722 (This may contain a QUEUED rtx;
3723 if the value is constant, this rtx is a constant.)
3724 Otherwise, the returned value is NULL_RTX. */
3725
3726 rtx
3727 expand_assignment (tree to, tree from, int want_value)
3728 {
3729 rtx to_rtx = 0;
3730 rtx result;
3731
3732 /* Don't crash if the lhs of the assignment was erroneous. */
3733
3734 if (TREE_CODE (to) == ERROR_MARK)
3735 {
3736 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3737 return want_value ? result : NULL_RTX;
3738 }
3739
3740 /* Assignment of a structure component needs special treatment
3741 if the structure component's rtx is not simply a MEM.
3742 Assignment of an array element at a constant index, and assignment of
3743 an array element in an unaligned packed structure field, has the same
3744 problem. */
3745
3746 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3747 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3748 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3749 {
3750 enum machine_mode mode1;
3751 HOST_WIDE_INT bitsize, bitpos;
3752 rtx orig_to_rtx;
3753 tree offset;
3754 int unsignedp;
3755 int volatilep = 0;
3756 tree tem;
3757
3758 push_temp_slots ();
3759 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3760 &unsignedp, &volatilep);
3761
3762 /* If we are going to use store_bit_field and extract_bit_field,
3763 make sure to_rtx will be safe for multiple use. */
3764
3765 if (mode1 == VOIDmode && want_value)
3766 tem = stabilize_reference (tem);
3767
3768 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3769
3770 if (offset != 0)
3771 {
3772 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3773
3774 if (GET_CODE (to_rtx) != MEM)
3775 abort ();
3776
3777 #ifdef POINTERS_EXTEND_UNSIGNED
3778 if (GET_MODE (offset_rtx) != Pmode)
3779 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3780 #else
3781 if (GET_MODE (offset_rtx) != ptr_mode)
3782 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3783 #endif
3784
3785 /* A constant address in TO_RTX can have VOIDmode, we must not try
3786 to call force_reg for that case. Avoid that case. */
3787 if (GET_CODE (to_rtx) == MEM
3788 && GET_MODE (to_rtx) == BLKmode
3789 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3790 && bitsize > 0
3791 && (bitpos % bitsize) == 0
3792 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3793 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3794 {
3795 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3796 bitpos = 0;
3797 }
3798
3799 to_rtx = offset_address (to_rtx, offset_rtx,
3800 highest_pow2_factor_for_type (TREE_TYPE (to),
3801 offset));
3802 }
3803
3804 if (GET_CODE (to_rtx) == MEM)
3805 {
3806 /* If the field is at offset zero, we could have been given the
3807 DECL_RTX of the parent struct. Don't munge it. */
3808 to_rtx = shallow_copy_rtx (to_rtx);
3809
3810 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3811 }
3812
3813 /* Deal with volatile and readonly fields. The former is only done
3814 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3815 if (volatilep && GET_CODE (to_rtx) == MEM)
3816 {
3817 if (to_rtx == orig_to_rtx)
3818 to_rtx = copy_rtx (to_rtx);
3819 MEM_VOLATILE_P (to_rtx) = 1;
3820 }
3821
3822 if (TREE_CODE (to) == COMPONENT_REF
3823 && TREE_READONLY (TREE_OPERAND (to, 1))
3824 /* We can't assert that a MEM won't be set more than once
3825 if the component is not addressable because another
3826 non-addressable component may be referenced by the same MEM. */
3827 && ! (GET_CODE (to_rtx) == MEM && ! can_address_p (to)))
3828 {
3829 if (to_rtx == orig_to_rtx)
3830 to_rtx = copy_rtx (to_rtx);
3831 RTX_UNCHANGING_P (to_rtx) = 1;
3832 }
3833
3834 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3835 {
3836 if (to_rtx == orig_to_rtx)
3837 to_rtx = copy_rtx (to_rtx);
3838 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3839 }
3840
3841 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3842 (want_value
3843 /* Spurious cast for HPUX compiler. */
3844 ? ((enum machine_mode)
3845 TYPE_MODE (TREE_TYPE (to)))
3846 : VOIDmode),
3847 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3848
3849 preserve_temp_slots (result);
3850 free_temp_slots ();
3851 pop_temp_slots ();
3852
3853 /* If the value is meaningful, convert RESULT to the proper mode.
3854 Otherwise, return nothing. */
3855 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3856 TYPE_MODE (TREE_TYPE (from)),
3857 result,
3858 TREE_UNSIGNED (TREE_TYPE (to)))
3859 : NULL_RTX);
3860 }
3861
3862 /* If the rhs is a function call and its value is not an aggregate,
3863 call the function before we start to compute the lhs.
3864 This is needed for correct code for cases such as
3865 val = setjmp (buf) on machines where reference to val
3866 requires loading up part of an address in a separate insn.
3867
3868 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3869 since it might be a promoted variable where the zero- or sign- extension
3870 needs to be done. Handling this in the normal way is safe because no
3871 computation is done before the call. */
3872 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3873 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3874 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3875 && GET_CODE (DECL_RTL (to)) == REG))
3876 {
3877 rtx value;
3878
3879 push_temp_slots ();
3880 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3881 if (to_rtx == 0)
3882 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3883
3884 /* Handle calls that return values in multiple non-contiguous locations.
3885 The Irix 6 ABI has examples of this. */
3886 if (GET_CODE (to_rtx) == PARALLEL)
3887 emit_group_load (to_rtx, value, TREE_TYPE (from),
3888 int_size_in_bytes (TREE_TYPE (from)));
3889 else if (GET_MODE (to_rtx) == BLKmode)
3890 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3891 else
3892 {
3893 if (POINTER_TYPE_P (TREE_TYPE (to)))
3894 value = convert_memory_address (GET_MODE (to_rtx), value);
3895 emit_move_insn (to_rtx, value);
3896 }
3897 preserve_temp_slots (to_rtx);
3898 free_temp_slots ();
3899 pop_temp_slots ();
3900 return want_value ? to_rtx : NULL_RTX;
3901 }
3902
3903 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3904 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3905
3906 if (to_rtx == 0)
3907 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3908
3909 /* Don't move directly into a return register. */
3910 if (TREE_CODE (to) == RESULT_DECL
3911 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3912 {
3913 rtx temp;
3914
3915 push_temp_slots ();
3916 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3917
3918 if (GET_CODE (to_rtx) == PARALLEL)
3919 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3920 int_size_in_bytes (TREE_TYPE (from)));
3921 else
3922 emit_move_insn (to_rtx, temp);
3923
3924 preserve_temp_slots (to_rtx);
3925 free_temp_slots ();
3926 pop_temp_slots ();
3927 return want_value ? to_rtx : NULL_RTX;
3928 }
3929
3930 /* In case we are returning the contents of an object which overlaps
3931 the place the value is being stored, use a safe function when copying
3932 a value through a pointer into a structure value return block. */
3933 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3934 && current_function_returns_struct
3935 && !current_function_returns_pcc_struct)
3936 {
3937 rtx from_rtx, size;
3938
3939 push_temp_slots ();
3940 size = expr_size (from);
3941 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3942
3943 if (TARGET_MEM_FUNCTIONS)
3944 emit_library_call (memmove_libfunc, LCT_NORMAL,
3945 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3946 XEXP (from_rtx, 0), Pmode,
3947 convert_to_mode (TYPE_MODE (sizetype),
3948 size, TREE_UNSIGNED (sizetype)),
3949 TYPE_MODE (sizetype));
3950 else
3951 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3952 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3953 XEXP (to_rtx, 0), Pmode,
3954 convert_to_mode (TYPE_MODE (integer_type_node),
3955 size,
3956 TREE_UNSIGNED (integer_type_node)),
3957 TYPE_MODE (integer_type_node));
3958
3959 preserve_temp_slots (to_rtx);
3960 free_temp_slots ();
3961 pop_temp_slots ();
3962 return want_value ? to_rtx : NULL_RTX;
3963 }
3964
3965 /* Compute FROM and store the value in the rtx we got. */
3966
3967 push_temp_slots ();
3968 result = store_expr (from, to_rtx, want_value);
3969 preserve_temp_slots (result);
3970 free_temp_slots ();
3971 pop_temp_slots ();
3972 return want_value ? result : NULL_RTX;
3973 }
3974
3975 /* Generate code for computing expression EXP,
3976 and storing the value into TARGET.
3977 TARGET may contain a QUEUED rtx.
3978
3979 If WANT_VALUE & 1 is nonzero, return a copy of the value
3980 not in TARGET, so that we can be sure to use the proper
3981 value in a containing expression even if TARGET has something
3982 else stored in it. If possible, we copy the value through a pseudo
3983 and return that pseudo. Or, if the value is constant, we try to
3984 return the constant. In some cases, we return a pseudo
3985 copied *from* TARGET.
3986
3987 If the mode is BLKmode then we may return TARGET itself.
3988 It turns out that in BLKmode it doesn't cause a problem.
3989 because C has no operators that could combine two different
3990 assignments into the same BLKmode object with different values
3991 with no sequence point. Will other languages need this to
3992 be more thorough?
3993
3994 If WANT_VALUE & 1 is 0, we return NULL, to make sure
3995 to catch quickly any cases where the caller uses the value
3996 and fails to set WANT_VALUE.
3997
3998 If WANT_VALUE & 2 is set, this is a store into a call param on the
3999 stack, and block moves may need to be treated specially. */
4000
4001 rtx
4002 store_expr (tree exp, rtx target, int want_value)
4003 {
4004 rtx temp;
4005 rtx alt_rtl = NULL_RTX;
4006 int dont_return_target = 0;
4007 int dont_store_target = 0;
4008
4009 if (VOID_TYPE_P (TREE_TYPE (exp)))
4010 {
4011 /* C++ can generate ?: expressions with a throw expression in one
4012 branch and an rvalue in the other. Here, we resolve attempts to
4013 store the throw expression's nonexistent result. */
4014 if (want_value)
4015 abort ();
4016 expand_expr (exp, const0_rtx, VOIDmode, 0);
4017 return NULL_RTX;
4018 }
4019 if (TREE_CODE (exp) == COMPOUND_EXPR)
4020 {
4021 /* Perform first part of compound expression, then assign from second
4022 part. */
4023 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4024 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4025 emit_queue ();
4026 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4027 }
4028 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4029 {
4030 /* For conditional expression, get safe form of the target. Then
4031 test the condition, doing the appropriate assignment on either
4032 side. This avoids the creation of unnecessary temporaries.
4033 For non-BLKmode, it is more efficient not to do this. */
4034
4035 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4036
4037 emit_queue ();
4038 target = protect_from_queue (target, 1);
4039
4040 do_pending_stack_adjust ();
4041 NO_DEFER_POP;
4042 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4043 start_cleanup_deferral ();
4044 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4045 end_cleanup_deferral ();
4046 emit_queue ();
4047 emit_jump_insn (gen_jump (lab2));
4048 emit_barrier ();
4049 emit_label (lab1);
4050 start_cleanup_deferral ();
4051 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4052 end_cleanup_deferral ();
4053 emit_queue ();
4054 emit_label (lab2);
4055 OK_DEFER_POP;
4056
4057 return want_value & 1 ? target : NULL_RTX;
4058 }
4059 else if (queued_subexp_p (target))
4060 /* If target contains a postincrement, let's not risk
4061 using it as the place to generate the rhs. */
4062 {
4063 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4064 {
4065 /* Expand EXP into a new pseudo. */
4066 temp = gen_reg_rtx (GET_MODE (target));
4067 temp = expand_expr (exp, temp, GET_MODE (target),
4068 (want_value & 2
4069 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4070 }
4071 else
4072 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4073 (want_value & 2
4074 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4075
4076 /* If target is volatile, ANSI requires accessing the value
4077 *from* the target, if it is accessed. So make that happen.
4078 In no case return the target itself. */
4079 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4080 dont_return_target = 1;
4081 }
4082 else if ((want_value & 1) != 0
4083 && GET_CODE (target) == MEM
4084 && ! MEM_VOLATILE_P (target)
4085 && GET_MODE (target) != BLKmode)
4086 /* If target is in memory and caller wants value in a register instead,
4087 arrange that. Pass TARGET as target for expand_expr so that,
4088 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4089 We know expand_expr will not use the target in that case.
4090 Don't do this if TARGET is volatile because we are supposed
4091 to write it and then read it. */
4092 {
4093 temp = expand_expr (exp, target, GET_MODE (target),
4094 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4095 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4096 {
4097 /* If TEMP is already in the desired TARGET, only copy it from
4098 memory and don't store it there again. */
4099 if (temp == target
4100 || (rtx_equal_p (temp, target)
4101 && ! side_effects_p (temp) && ! side_effects_p (target)))
4102 dont_store_target = 1;
4103 temp = copy_to_reg (temp);
4104 }
4105 dont_return_target = 1;
4106 }
4107 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4108 /* If this is a scalar in a register that is stored in a wider mode
4109 than the declared mode, compute the result into its declared mode
4110 and then convert to the wider mode. Our value is the computed
4111 expression. */
4112 {
4113 rtx inner_target = 0;
4114
4115 /* If we don't want a value, we can do the conversion inside EXP,
4116 which will often result in some optimizations. Do the conversion
4117 in two steps: first change the signedness, if needed, then
4118 the extend. But don't do this if the type of EXP is a subtype
4119 of something else since then the conversion might involve
4120 more than just converting modes. */
4121 if ((want_value & 1) == 0
4122 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4123 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4124 {
4125 if (TREE_UNSIGNED (TREE_TYPE (exp))
4126 != SUBREG_PROMOTED_UNSIGNED_P (target))
4127 exp = convert
4128 ((*lang_hooks.types.signed_or_unsigned_type)
4129 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4130
4131 exp = convert ((*lang_hooks.types.type_for_mode)
4132 (GET_MODE (SUBREG_REG (target)),
4133 SUBREG_PROMOTED_UNSIGNED_P (target)),
4134 exp);
4135
4136 inner_target = SUBREG_REG (target);
4137 }
4138
4139 temp = expand_expr (exp, inner_target, VOIDmode,
4140 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4141
4142 /* If TEMP is a MEM and we want a result value, make the access
4143 now so it gets done only once. Strictly speaking, this is
4144 only necessary if the MEM is volatile, or if the address
4145 overlaps TARGET. But not performing the load twice also
4146 reduces the amount of rtl we generate and then have to CSE. */
4147 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4148 temp = copy_to_reg (temp);
4149
4150 /* If TEMP is a VOIDmode constant, use convert_modes to make
4151 sure that we properly convert it. */
4152 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4153 {
4154 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4155 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4156 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4157 GET_MODE (target), temp,
4158 SUBREG_PROMOTED_UNSIGNED_P (target));
4159 }
4160
4161 convert_move (SUBREG_REG (target), temp,
4162 SUBREG_PROMOTED_UNSIGNED_P (target));
4163
4164 /* If we promoted a constant, change the mode back down to match
4165 target. Otherwise, the caller might get confused by a result whose
4166 mode is larger than expected. */
4167
4168 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4169 {
4170 if (GET_MODE (temp) != VOIDmode)
4171 {
4172 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4173 SUBREG_PROMOTED_VAR_P (temp) = 1;
4174 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4175 SUBREG_PROMOTED_UNSIGNED_P (target));
4176 }
4177 else
4178 temp = convert_modes (GET_MODE (target),
4179 GET_MODE (SUBREG_REG (target)),
4180 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4181 }
4182
4183 return want_value & 1 ? temp : NULL_RTX;
4184 }
4185 else
4186 {
4187 temp = expand_expr_real (exp, target, GET_MODE (target),
4188 (want_value & 2
4189 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4190 &alt_rtl);
4191 /* Return TARGET if it's a specified hardware register.
4192 If TARGET is a volatile mem ref, either return TARGET
4193 or return a reg copied *from* TARGET; ANSI requires this.
4194
4195 Otherwise, if TEMP is not TARGET, return TEMP
4196 if it is constant (for efficiency),
4197 or if we really want the correct value. */
4198 if (!(target && GET_CODE (target) == REG
4199 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4200 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4201 && ! rtx_equal_p (temp, target)
4202 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4203 dont_return_target = 1;
4204 }
4205
4206 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4207 the same as that of TARGET, adjust the constant. This is needed, for
4208 example, in case it is a CONST_DOUBLE and we want only a word-sized
4209 value. */
4210 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4211 && TREE_CODE (exp) != ERROR_MARK
4212 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4213 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4214 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4215
4216 /* If value was not generated in the target, store it there.
4217 Convert the value to TARGET's type first if necessary.
4218 If TEMP and TARGET compare equal according to rtx_equal_p, but
4219 one or both of them are volatile memory refs, we have to distinguish
4220 two cases:
4221 - expand_expr has used TARGET. In this case, we must not generate
4222 another copy. This can be detected by TARGET being equal according
4223 to == .
4224 - expand_expr has not used TARGET - that means that the source just
4225 happens to have the same RTX form. Since temp will have been created
4226 by expand_expr, it will compare unequal according to == .
4227 We must generate a copy in this case, to reach the correct number
4228 of volatile memory references. */
4229
4230 if ((! rtx_equal_p (temp, target)
4231 || (temp != target && (side_effects_p (temp)
4232 || side_effects_p (target))))
4233 && TREE_CODE (exp) != ERROR_MARK
4234 && ! dont_store_target
4235 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4236 but TARGET is not valid memory reference, TEMP will differ
4237 from TARGET although it is really the same location. */
4238 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4239 /* If there's nothing to copy, don't bother. Don't call expr_size
4240 unless necessary, because some front-ends (C++) expr_size-hook
4241 aborts on objects that are not supposed to be bit-copied or
4242 bit-initialized. */
4243 && expr_size (exp) != const0_rtx)
4244 {
4245 target = protect_from_queue (target, 1);
4246 if (GET_MODE (temp) != GET_MODE (target)
4247 && GET_MODE (temp) != VOIDmode)
4248 {
4249 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4250 if (dont_return_target)
4251 {
4252 /* In this case, we will return TEMP,
4253 so make sure it has the proper mode.
4254 But don't forget to store the value into TARGET. */
4255 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4256 emit_move_insn (target, temp);
4257 }
4258 else
4259 convert_move (target, temp, unsignedp);
4260 }
4261
4262 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4263 {
4264 /* Handle copying a string constant into an array. The string
4265 constant may be shorter than the array. So copy just the string's
4266 actual length, and clear the rest. First get the size of the data
4267 type of the string, which is actually the size of the target. */
4268 rtx size = expr_size (exp);
4269
4270 if (GET_CODE (size) == CONST_INT
4271 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4272 emit_block_move (target, temp, size,
4273 (want_value & 2
4274 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4275 else
4276 {
4277 /* Compute the size of the data to copy from the string. */
4278 tree copy_size
4279 = size_binop (MIN_EXPR,
4280 make_tree (sizetype, size),
4281 size_int (TREE_STRING_LENGTH (exp)));
4282 rtx copy_size_rtx
4283 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4284 (want_value & 2
4285 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4286 rtx label = 0;
4287
4288 /* Copy that much. */
4289 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4290 TREE_UNSIGNED (sizetype));
4291 emit_block_move (target, temp, copy_size_rtx,
4292 (want_value & 2
4293 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4294
4295 /* Figure out how much is left in TARGET that we have to clear.
4296 Do all calculations in ptr_mode. */
4297 if (GET_CODE (copy_size_rtx) == CONST_INT)
4298 {
4299 size = plus_constant (size, -INTVAL (copy_size_rtx));
4300 target = adjust_address (target, BLKmode,
4301 INTVAL (copy_size_rtx));
4302 }
4303 else
4304 {
4305 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4306 copy_size_rtx, NULL_RTX, 0,
4307 OPTAB_LIB_WIDEN);
4308
4309 #ifdef POINTERS_EXTEND_UNSIGNED
4310 if (GET_MODE (copy_size_rtx) != Pmode)
4311 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4312 TREE_UNSIGNED (sizetype));
4313 #endif
4314
4315 target = offset_address (target, copy_size_rtx,
4316 highest_pow2_factor (copy_size));
4317 label = gen_label_rtx ();
4318 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4319 GET_MODE (size), 0, label);
4320 }
4321
4322 if (size != const0_rtx)
4323 clear_storage (target, size);
4324
4325 if (label)
4326 emit_label (label);
4327 }
4328 }
4329 /* Handle calls that return values in multiple non-contiguous locations.
4330 The Irix 6 ABI has examples of this. */
4331 else if (GET_CODE (target) == PARALLEL)
4332 emit_group_load (target, temp, TREE_TYPE (exp),
4333 int_size_in_bytes (TREE_TYPE (exp)));
4334 else if (GET_MODE (temp) == BLKmode)
4335 emit_block_move (target, temp, expr_size (exp),
4336 (want_value & 2
4337 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4338 else
4339 emit_move_insn (target, temp);
4340 }
4341
4342 /* If we don't want a value, return NULL_RTX. */
4343 if ((want_value & 1) == 0)
4344 return NULL_RTX;
4345
4346 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4347 ??? The latter test doesn't seem to make sense. */
4348 else if (dont_return_target && GET_CODE (temp) != MEM)
4349 return temp;
4350
4351 /* Return TARGET itself if it is a hard register. */
4352 else if ((want_value & 1) != 0
4353 && GET_MODE (target) != BLKmode
4354 && ! (GET_CODE (target) == REG
4355 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4356 return copy_to_reg (target);
4357
4358 else
4359 return target;
4360 }
4361 \f
4362 /* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
4363
4364 static int
4365 is_zeros_p (tree exp)
4366 {
4367 tree elt;
4368
4369 switch (TREE_CODE (exp))
4370 {
4371 case CONVERT_EXPR:
4372 case NOP_EXPR:
4373 case NON_LVALUE_EXPR:
4374 case VIEW_CONVERT_EXPR:
4375 return is_zeros_p (TREE_OPERAND (exp, 0));
4376
4377 case INTEGER_CST:
4378 return integer_zerop (exp);
4379
4380 case COMPLEX_CST:
4381 return
4382 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4383
4384 case REAL_CST:
4385 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4386
4387 case VECTOR_CST:
4388 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4389 elt = TREE_CHAIN (elt))
4390 if (!is_zeros_p (TREE_VALUE (elt)))
4391 return 0;
4392
4393 return 1;
4394
4395 case CONSTRUCTOR:
4396 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4397 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4398 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4399 if (! is_zeros_p (TREE_VALUE (elt)))
4400 return 0;
4401
4402 return 1;
4403
4404 default:
4405 return 0;
4406 }
4407 }
4408
4409 /* Return 1 if EXP contains mostly (3/4) zeros. */
4410
4411 int
4412 mostly_zeros_p (tree exp)
4413 {
4414 if (TREE_CODE (exp) == CONSTRUCTOR)
4415 {
4416 int elts = 0, zeros = 0;
4417 tree elt = CONSTRUCTOR_ELTS (exp);
4418 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4419 {
4420 /* If there are no ranges of true bits, it is all zero. */
4421 return elt == NULL_TREE;
4422 }
4423 for (; elt; elt = TREE_CHAIN (elt))
4424 {
4425 /* We do not handle the case where the index is a RANGE_EXPR,
4426 so the statistic will be somewhat inaccurate.
4427 We do make a more accurate count in store_constructor itself,
4428 so since this function is only used for nested array elements,
4429 this should be close enough. */
4430 if (mostly_zeros_p (TREE_VALUE (elt)))
4431 zeros++;
4432 elts++;
4433 }
4434
4435 return 4 * zeros >= 3 * elts;
4436 }
4437
4438 return is_zeros_p (exp);
4439 }
4440 \f
4441 /* Helper function for store_constructor.
4442 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4443 TYPE is the type of the CONSTRUCTOR, not the element type.
4444 CLEARED is as for store_constructor.
4445 ALIAS_SET is the alias set to use for any stores.
4446
4447 This provides a recursive shortcut back to store_constructor when it isn't
4448 necessary to go through store_field. This is so that we can pass through
4449 the cleared field to let store_constructor know that we may not have to
4450 clear a substructure if the outer structure has already been cleared. */
4451
4452 static void
4453 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4454 HOST_WIDE_INT bitpos, enum machine_mode mode,
4455 tree exp, tree type, int cleared, int alias_set)
4456 {
4457 if (TREE_CODE (exp) == CONSTRUCTOR
4458 && bitpos % BITS_PER_UNIT == 0
4459 /* If we have a nonzero bitpos for a register target, then we just
4460 let store_field do the bitfield handling. This is unlikely to
4461 generate unnecessary clear instructions anyways. */
4462 && (bitpos == 0 || GET_CODE (target) == MEM))
4463 {
4464 if (GET_CODE (target) == MEM)
4465 target
4466 = adjust_address (target,
4467 GET_MODE (target) == BLKmode
4468 || 0 != (bitpos
4469 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4470 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4471
4472
4473 /* Update the alias set, if required. */
4474 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4475 && MEM_ALIAS_SET (target) != 0)
4476 {
4477 target = copy_rtx (target);
4478 set_mem_alias_set (target, alias_set);
4479 }
4480
4481 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4482 }
4483 else
4484 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4485 alias_set);
4486 }
4487
4488 /* Store the value of constructor EXP into the rtx TARGET.
4489 TARGET is either a REG or a MEM; we know it cannot conflict, since
4490 safe_from_p has been called.
4491 CLEARED is true if TARGET is known to have been zero'd.
4492 SIZE is the number of bytes of TARGET we are allowed to modify: this
4493 may not be the same as the size of EXP if we are assigning to a field
4494 which has been packed to exclude padding bits. */
4495
4496 static void
4497 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4498 {
4499 tree type = TREE_TYPE (exp);
4500 #ifdef WORD_REGISTER_OPERATIONS
4501 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4502 #endif
4503
4504 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4505 || TREE_CODE (type) == QUAL_UNION_TYPE)
4506 {
4507 tree elt;
4508
4509 /* If size is zero or the target is already cleared, do nothing. */
4510 if (size == 0 || cleared)
4511 cleared = 1;
4512 /* We either clear the aggregate or indicate the value is dead. */
4513 else if ((TREE_CODE (type) == UNION_TYPE
4514 || TREE_CODE (type) == QUAL_UNION_TYPE)
4515 && ! CONSTRUCTOR_ELTS (exp))
4516 /* If the constructor is empty, clear the union. */
4517 {
4518 clear_storage (target, expr_size (exp));
4519 cleared = 1;
4520 }
4521
4522 /* If we are building a static constructor into a register,
4523 set the initial value as zero so we can fold the value into
4524 a constant. But if more than one register is involved,
4525 this probably loses. */
4526 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4527 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4528 {
4529 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4530 cleared = 1;
4531 }
4532
4533 /* If the constructor has fewer fields than the structure
4534 or if we are initializing the structure to mostly zeros,
4535 clear the whole structure first. Don't do this if TARGET is a
4536 register whose mode size isn't equal to SIZE since clear_storage
4537 can't handle this case. */
4538 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4539 || mostly_zeros_p (exp))
4540 && (GET_CODE (target) != REG
4541 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4542 == size)))
4543 {
4544 rtx xtarget = target;
4545
4546 if (readonly_fields_p (type))
4547 {
4548 xtarget = copy_rtx (xtarget);
4549 RTX_UNCHANGING_P (xtarget) = 1;
4550 }
4551
4552 clear_storage (xtarget, GEN_INT (size));
4553 cleared = 1;
4554 }
4555
4556 if (! cleared)
4557 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4558
4559 /* Store each element of the constructor into
4560 the corresponding field of TARGET. */
4561
4562 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4563 {
4564 tree field = TREE_PURPOSE (elt);
4565 tree value = TREE_VALUE (elt);
4566 enum machine_mode mode;
4567 HOST_WIDE_INT bitsize;
4568 HOST_WIDE_INT bitpos = 0;
4569 tree offset;
4570 rtx to_rtx = target;
4571
4572 /* Just ignore missing fields.
4573 We cleared the whole structure, above,
4574 if any fields are missing. */
4575 if (field == 0)
4576 continue;
4577
4578 if (cleared && is_zeros_p (value))
4579 continue;
4580
4581 if (host_integerp (DECL_SIZE (field), 1))
4582 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4583 else
4584 bitsize = -1;
4585
4586 mode = DECL_MODE (field);
4587 if (DECL_BIT_FIELD (field))
4588 mode = VOIDmode;
4589
4590 offset = DECL_FIELD_OFFSET (field);
4591 if (host_integerp (offset, 0)
4592 && host_integerp (bit_position (field), 0))
4593 {
4594 bitpos = int_bit_position (field);
4595 offset = 0;
4596 }
4597 else
4598 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4599
4600 if (offset)
4601 {
4602 rtx offset_rtx;
4603
4604 if (CONTAINS_PLACEHOLDER_P (offset))
4605 offset = build (WITH_RECORD_EXPR, sizetype,
4606 offset, make_tree (TREE_TYPE (exp), target));
4607
4608 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4609 if (GET_CODE (to_rtx) != MEM)
4610 abort ();
4611
4612 #ifdef POINTERS_EXTEND_UNSIGNED
4613 if (GET_MODE (offset_rtx) != Pmode)
4614 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4615 #else
4616 if (GET_MODE (offset_rtx) != ptr_mode)
4617 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4618 #endif
4619
4620 to_rtx = offset_address (to_rtx, offset_rtx,
4621 highest_pow2_factor (offset));
4622 }
4623
4624 /* If the constructor has been cleared, setting RTX_UNCHANGING_P
4625 on the MEM might lead to scheduling the clearing after the
4626 store. */
4627 if (TREE_READONLY (field) && !cleared)
4628 {
4629 if (GET_CODE (to_rtx) == MEM)
4630 to_rtx = copy_rtx (to_rtx);
4631
4632 RTX_UNCHANGING_P (to_rtx) = 1;
4633 }
4634
4635 #ifdef WORD_REGISTER_OPERATIONS
4636 /* If this initializes a field that is smaller than a word, at the
4637 start of a word, try to widen it to a full word.
4638 This special case allows us to output C++ member function
4639 initializations in a form that the optimizers can understand. */
4640 if (GET_CODE (target) == REG
4641 && bitsize < BITS_PER_WORD
4642 && bitpos % BITS_PER_WORD == 0
4643 && GET_MODE_CLASS (mode) == MODE_INT
4644 && TREE_CODE (value) == INTEGER_CST
4645 && exp_size >= 0
4646 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4647 {
4648 tree type = TREE_TYPE (value);
4649
4650 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4651 {
4652 type = (*lang_hooks.types.type_for_size)
4653 (BITS_PER_WORD, TREE_UNSIGNED (type));
4654 value = convert (type, value);
4655 }
4656
4657 if (BYTES_BIG_ENDIAN)
4658 value
4659 = fold (build (LSHIFT_EXPR, type, value,
4660 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4661 bitsize = BITS_PER_WORD;
4662 mode = word_mode;
4663 }
4664 #endif
4665
4666 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4667 && DECL_NONADDRESSABLE_P (field))
4668 {
4669 to_rtx = copy_rtx (to_rtx);
4670 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4671 }
4672
4673 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4674 value, type, cleared,
4675 get_alias_set (TREE_TYPE (field)));
4676 }
4677 }
4678 else if (TREE_CODE (type) == ARRAY_TYPE
4679 || TREE_CODE (type) == VECTOR_TYPE)
4680 {
4681 tree elt;
4682 int i;
4683 int need_to_clear;
4684 tree domain = TYPE_DOMAIN (type);
4685 tree elttype = TREE_TYPE (type);
4686 int const_bounds_p;
4687 HOST_WIDE_INT minelt = 0;
4688 HOST_WIDE_INT maxelt = 0;
4689 int icode = 0;
4690 rtx *vector = NULL;
4691 int elt_size = 0;
4692 unsigned n_elts = 0;
4693
4694 /* Vectors are like arrays, but the domain is stored via an array
4695 type indirectly. */
4696 if (TREE_CODE (type) == VECTOR_TYPE)
4697 {
4698 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4699 the same field as TYPE_DOMAIN, we are not guaranteed that
4700 it always will. */
4701 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4702 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4703 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4704 {
4705 enum machine_mode mode = GET_MODE (target);
4706
4707 icode = (int) vec_init_optab->handlers[mode].insn_code;
4708 if (icode != CODE_FOR_nothing)
4709 {
4710 unsigned int i;
4711
4712 elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4713 n_elts = (GET_MODE_SIZE (mode) / elt_size);
4714 vector = alloca (n_elts);
4715 for (i = 0; i < n_elts; i++)
4716 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4717 }
4718 }
4719 }
4720
4721 const_bounds_p = (TYPE_MIN_VALUE (domain)
4722 && TYPE_MAX_VALUE (domain)
4723 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4724 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4725
4726 /* If we have constant bounds for the range of the type, get them. */
4727 if (const_bounds_p)
4728 {
4729 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4730 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4731 }
4732
4733 /* If the constructor has fewer elements than the array,
4734 clear the whole array first. Similarly if this is
4735 static constructor of a non-BLKmode object. */
4736 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4737 need_to_clear = 1;
4738 else
4739 {
4740 HOST_WIDE_INT count = 0, zero_count = 0;
4741 need_to_clear = ! const_bounds_p;
4742
4743 /* This loop is a more accurate version of the loop in
4744 mostly_zeros_p (it handles RANGE_EXPR in an index).
4745 It is also needed to check for missing elements. */
4746 for (elt = CONSTRUCTOR_ELTS (exp);
4747 elt != NULL_TREE && ! need_to_clear;
4748 elt = TREE_CHAIN (elt))
4749 {
4750 tree index = TREE_PURPOSE (elt);
4751 HOST_WIDE_INT this_node_count;
4752
4753 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4754 {
4755 tree lo_index = TREE_OPERAND (index, 0);
4756 tree hi_index = TREE_OPERAND (index, 1);
4757
4758 if (! host_integerp (lo_index, 1)
4759 || ! host_integerp (hi_index, 1))
4760 {
4761 need_to_clear = 1;
4762 break;
4763 }
4764
4765 this_node_count = (tree_low_cst (hi_index, 1)
4766 - tree_low_cst (lo_index, 1) + 1);
4767 }
4768 else
4769 this_node_count = 1;
4770
4771 count += this_node_count;
4772 if (mostly_zeros_p (TREE_VALUE (elt)))
4773 zero_count += this_node_count;
4774 }
4775
4776 /* Clear the entire array first if there are any missing elements,
4777 or if the incidence of zero elements is >= 75%. */
4778 if (! need_to_clear
4779 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4780 need_to_clear = 1;
4781 }
4782
4783 if (need_to_clear && size > 0 && !vector)
4784 {
4785 if (! cleared)
4786 {
4787 if (REG_P (target))
4788 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4789 else
4790 clear_storage (target, GEN_INT (size));
4791 }
4792 cleared = 1;
4793 }
4794 else if (REG_P (target))
4795 /* Inform later passes that the old value is dead. */
4796 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4797
4798 /* Store each element of the constructor into
4799 the corresponding element of TARGET, determined
4800 by counting the elements. */
4801 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4802 elt;
4803 elt = TREE_CHAIN (elt), i++)
4804 {
4805 enum machine_mode mode;
4806 HOST_WIDE_INT bitsize;
4807 HOST_WIDE_INT bitpos;
4808 int unsignedp;
4809 tree value = TREE_VALUE (elt);
4810 tree index = TREE_PURPOSE (elt);
4811 rtx xtarget = target;
4812
4813 if (cleared && is_zeros_p (value))
4814 continue;
4815
4816 unsignedp = TREE_UNSIGNED (elttype);
4817 mode = TYPE_MODE (elttype);
4818 if (mode == BLKmode)
4819 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4820 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4821 : -1);
4822 else
4823 bitsize = GET_MODE_BITSIZE (mode);
4824
4825 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4826 {
4827 tree lo_index = TREE_OPERAND (index, 0);
4828 tree hi_index = TREE_OPERAND (index, 1);
4829 rtx index_r, pos_rtx, loop_end;
4830 struct nesting *loop;
4831 HOST_WIDE_INT lo, hi, count;
4832 tree position;
4833
4834 if (vector)
4835 abort ();
4836
4837 /* If the range is constant and "small", unroll the loop. */
4838 if (const_bounds_p
4839 && host_integerp (lo_index, 0)
4840 && host_integerp (hi_index, 0)
4841 && (lo = tree_low_cst (lo_index, 0),
4842 hi = tree_low_cst (hi_index, 0),
4843 count = hi - lo + 1,
4844 (GET_CODE (target) != MEM
4845 || count <= 2
4846 || (host_integerp (TYPE_SIZE (elttype), 1)
4847 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4848 <= 40 * 8)))))
4849 {
4850 lo -= minelt; hi -= minelt;
4851 for (; lo <= hi; lo++)
4852 {
4853 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4854
4855 if (GET_CODE (target) == MEM
4856 && !MEM_KEEP_ALIAS_SET_P (target)
4857 && TREE_CODE (type) == ARRAY_TYPE
4858 && TYPE_NONALIASED_COMPONENT (type))
4859 {
4860 target = copy_rtx (target);
4861 MEM_KEEP_ALIAS_SET_P (target) = 1;
4862 }
4863
4864 store_constructor_field
4865 (target, bitsize, bitpos, mode, value, type, cleared,
4866 get_alias_set (elttype));
4867 }
4868 }
4869 else
4870 {
4871 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4872 loop_end = gen_label_rtx ();
4873
4874 unsignedp = TREE_UNSIGNED (domain);
4875
4876 index = build_decl (VAR_DECL, NULL_TREE, domain);
4877
4878 index_r
4879 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4880 &unsignedp, 0));
4881 SET_DECL_RTL (index, index_r);
4882 if (TREE_CODE (value) == SAVE_EXPR
4883 && SAVE_EXPR_RTL (value) == 0)
4884 {
4885 /* Make sure value gets expanded once before the
4886 loop. */
4887 expand_expr (value, const0_rtx, VOIDmode, 0);
4888 emit_queue ();
4889 }
4890 store_expr (lo_index, index_r, 0);
4891 loop = expand_start_loop (0);
4892
4893 /* Assign value to element index. */
4894 position
4895 = convert (ssizetype,
4896 fold (build (MINUS_EXPR, TREE_TYPE (index),
4897 index, TYPE_MIN_VALUE (domain))));
4898 position = size_binop (MULT_EXPR, position,
4899 convert (ssizetype,
4900 TYPE_SIZE_UNIT (elttype)));
4901
4902 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4903 xtarget = offset_address (target, pos_rtx,
4904 highest_pow2_factor (position));
4905 xtarget = adjust_address (xtarget, mode, 0);
4906 if (TREE_CODE (value) == CONSTRUCTOR)
4907 store_constructor (value, xtarget, cleared,
4908 bitsize / BITS_PER_UNIT);
4909 else
4910 store_expr (value, xtarget, 0);
4911
4912 expand_exit_loop_if_false (loop,
4913 build (LT_EXPR, integer_type_node,
4914 index, hi_index));
4915
4916 expand_increment (build (PREINCREMENT_EXPR,
4917 TREE_TYPE (index),
4918 index, integer_one_node), 0, 0);
4919 expand_end_loop ();
4920 emit_label (loop_end);
4921 }
4922 }
4923 else if ((index != 0 && ! host_integerp (index, 0))
4924 || ! host_integerp (TYPE_SIZE (elttype), 1))
4925 {
4926 tree position;
4927
4928 if (vector)
4929 abort ();
4930
4931 if (index == 0)
4932 index = ssize_int (1);
4933
4934 if (minelt)
4935 index = convert (ssizetype,
4936 fold (build (MINUS_EXPR, index,
4937 TYPE_MIN_VALUE (domain))));
4938
4939 position = size_binop (MULT_EXPR, index,
4940 convert (ssizetype,
4941 TYPE_SIZE_UNIT (elttype)));
4942 xtarget = offset_address (target,
4943 expand_expr (position, 0, VOIDmode, 0),
4944 highest_pow2_factor (position));
4945 xtarget = adjust_address (xtarget, mode, 0);
4946 store_expr (value, xtarget, 0);
4947 }
4948 else if (vector)
4949 {
4950 int pos;
4951
4952 if (index != 0)
4953 pos = tree_low_cst (index, 0) - minelt;
4954 else
4955 pos = i;
4956 vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4957 }
4958 else
4959 {
4960 if (index != 0)
4961 bitpos = ((tree_low_cst (index, 0) - minelt)
4962 * tree_low_cst (TYPE_SIZE (elttype), 1));
4963 else
4964 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4965
4966 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4967 && TREE_CODE (type) == ARRAY_TYPE
4968 && TYPE_NONALIASED_COMPONENT (type))
4969 {
4970 target = copy_rtx (target);
4971 MEM_KEEP_ALIAS_SET_P (target) = 1;
4972 }
4973 store_constructor_field (target, bitsize, bitpos, mode, value,
4974 type, cleared, get_alias_set (elttype));
4975 }
4976 }
4977 if (vector)
4978 {
4979 emit_insn (GEN_FCN (icode) (target,
4980 gen_rtx_PARALLEL (GET_MODE (target),
4981 gen_rtvec_v (n_elts, vector))));
4982 }
4983 }
4984
4985 /* Set constructor assignments. */
4986 else if (TREE_CODE (type) == SET_TYPE)
4987 {
4988 tree elt = CONSTRUCTOR_ELTS (exp);
4989 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4990 tree domain = TYPE_DOMAIN (type);
4991 tree domain_min, domain_max, bitlength;
4992
4993 /* The default implementation strategy is to extract the constant
4994 parts of the constructor, use that to initialize the target,
4995 and then "or" in whatever non-constant ranges we need in addition.
4996
4997 If a large set is all zero or all ones, it is
4998 probably better to set it using memset (if available) or bzero.
4999 Also, if a large set has just a single range, it may also be
5000 better to first clear all the first clear the set (using
5001 bzero/memset), and set the bits we want. */
5002
5003 /* Check for all zeros. */
5004 if (elt == NULL_TREE && size > 0)
5005 {
5006 if (!cleared)
5007 clear_storage (target, GEN_INT (size));
5008 return;
5009 }
5010
5011 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5012 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5013 bitlength = size_binop (PLUS_EXPR,
5014 size_diffop (domain_max, domain_min),
5015 ssize_int (1));
5016
5017 nbits = tree_low_cst (bitlength, 1);
5018
5019 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5020 are "complicated" (more than one range), initialize (the
5021 constant parts) by copying from a constant. */
5022 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5023 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5024 {
5025 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5026 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5027 char *bit_buffer = alloca (nbits);
5028 HOST_WIDE_INT word = 0;
5029 unsigned int bit_pos = 0;
5030 unsigned int ibit = 0;
5031 unsigned int offset = 0; /* In bytes from beginning of set. */
5032
5033 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5034 for (;;)
5035 {
5036 if (bit_buffer[ibit])
5037 {
5038 if (BYTES_BIG_ENDIAN)
5039 word |= (1 << (set_word_size - 1 - bit_pos));
5040 else
5041 word |= 1 << bit_pos;
5042 }
5043
5044 bit_pos++; ibit++;
5045 if (bit_pos >= set_word_size || ibit == nbits)
5046 {
5047 if (word != 0 || ! cleared)
5048 {
5049 rtx datum = GEN_INT (word);
5050 rtx to_rtx;
5051
5052 /* The assumption here is that it is safe to use
5053 XEXP if the set is multi-word, but not if
5054 it's single-word. */
5055 if (GET_CODE (target) == MEM)
5056 to_rtx = adjust_address (target, mode, offset);
5057 else if (offset == 0)
5058 to_rtx = target;
5059 else
5060 abort ();
5061 emit_move_insn (to_rtx, datum);
5062 }
5063
5064 if (ibit == nbits)
5065 break;
5066 word = 0;
5067 bit_pos = 0;
5068 offset += set_word_size / BITS_PER_UNIT;
5069 }
5070 }
5071 }
5072 else if (!cleared)
5073 /* Don't bother clearing storage if the set is all ones. */
5074 if (TREE_CHAIN (elt) != NULL_TREE
5075 || (TREE_PURPOSE (elt) == NULL_TREE
5076 ? nbits != 1
5077 : ( ! host_integerp (TREE_VALUE (elt), 0)
5078 || ! host_integerp (TREE_PURPOSE (elt), 0)
5079 || (tree_low_cst (TREE_VALUE (elt), 0)
5080 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5081 != (HOST_WIDE_INT) nbits))))
5082 clear_storage (target, expr_size (exp));
5083
5084 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5085 {
5086 /* Start of range of element or NULL. */
5087 tree startbit = TREE_PURPOSE (elt);
5088 /* End of range of element, or element value. */
5089 tree endbit = TREE_VALUE (elt);
5090 HOST_WIDE_INT startb, endb;
5091 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5092
5093 bitlength_rtx = expand_expr (bitlength,
5094 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5095
5096 /* Handle non-range tuple element like [ expr ]. */
5097 if (startbit == NULL_TREE)
5098 {
5099 startbit = save_expr (endbit);
5100 endbit = startbit;
5101 }
5102
5103 startbit = convert (sizetype, startbit);
5104 endbit = convert (sizetype, endbit);
5105 if (! integer_zerop (domain_min))
5106 {
5107 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5108 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5109 }
5110 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5111 EXPAND_CONST_ADDRESS);
5112 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5113 EXPAND_CONST_ADDRESS);
5114
5115 if (REG_P (target))
5116 {
5117 targetx
5118 = assign_temp
5119 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5120 (GET_MODE (target), 0),
5121 TYPE_QUAL_CONST)),
5122 0, 1, 1);
5123 emit_move_insn (targetx, target);
5124 }
5125
5126 else if (GET_CODE (target) == MEM)
5127 targetx = target;
5128 else
5129 abort ();
5130
5131 /* Optimization: If startbit and endbit are constants divisible
5132 by BITS_PER_UNIT, call memset instead. */
5133 if (TARGET_MEM_FUNCTIONS
5134 && TREE_CODE (startbit) == INTEGER_CST
5135 && TREE_CODE (endbit) == INTEGER_CST
5136 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5137 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5138 {
5139 emit_library_call (memset_libfunc, LCT_NORMAL,
5140 VOIDmode, 3,
5141 plus_constant (XEXP (targetx, 0),
5142 startb / BITS_PER_UNIT),
5143 Pmode,
5144 constm1_rtx, TYPE_MODE (integer_type_node),
5145 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5146 TYPE_MODE (sizetype));
5147 }
5148 else
5149 emit_library_call (setbits_libfunc, LCT_NORMAL,
5150 VOIDmode, 4, XEXP (targetx, 0),
5151 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5152 startbit_rtx, TYPE_MODE (sizetype),
5153 endbit_rtx, TYPE_MODE (sizetype));
5154
5155 if (REG_P (target))
5156 emit_move_insn (target, targetx);
5157 }
5158 }
5159
5160 else
5161 abort ();
5162 }
5163
5164 /* Store the value of EXP (an expression tree)
5165 into a subfield of TARGET which has mode MODE and occupies
5166 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5167 If MODE is VOIDmode, it means that we are storing into a bit-field.
5168
5169 If VALUE_MODE is VOIDmode, return nothing in particular.
5170 UNSIGNEDP is not used in this case.
5171
5172 Otherwise, return an rtx for the value stored. This rtx
5173 has mode VALUE_MODE if that is convenient to do.
5174 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5175
5176 TYPE is the type of the underlying object,
5177
5178 ALIAS_SET is the alias set for the destination. This value will
5179 (in general) be different from that for TARGET, since TARGET is a
5180 reference to the containing structure. */
5181
5182 static rtx
5183 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5184 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5185 int unsignedp, tree type, int alias_set)
5186 {
5187 HOST_WIDE_INT width_mask = 0;
5188
5189 if (TREE_CODE (exp) == ERROR_MARK)
5190 return const0_rtx;
5191
5192 /* If we have nothing to store, do nothing unless the expression has
5193 side-effects. */
5194 if (bitsize == 0)
5195 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5196 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5197 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5198
5199 /* If we are storing into an unaligned field of an aligned union that is
5200 in a register, we may have the mode of TARGET being an integer mode but
5201 MODE == BLKmode. In that case, get an aligned object whose size and
5202 alignment are the same as TARGET and store TARGET into it (we can avoid
5203 the store if the field being stored is the entire width of TARGET). Then
5204 call ourselves recursively to store the field into a BLKmode version of
5205 that object. Finally, load from the object into TARGET. This is not
5206 very efficient in general, but should only be slightly more expensive
5207 than the otherwise-required unaligned accesses. Perhaps this can be
5208 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5209 twice, once with emit_move_insn and once via store_field. */
5210
5211 if (mode == BLKmode
5212 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5213 {
5214 rtx object = assign_temp (type, 0, 1, 1);
5215 rtx blk_object = adjust_address (object, BLKmode, 0);
5216
5217 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5218 emit_move_insn (object, target);
5219
5220 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5221 alias_set);
5222
5223 emit_move_insn (target, object);
5224
5225 /* We want to return the BLKmode version of the data. */
5226 return blk_object;
5227 }
5228
5229 if (GET_CODE (target) == CONCAT)
5230 {
5231 /* We're storing into a struct containing a single __complex. */
5232
5233 if (bitpos != 0)
5234 abort ();
5235 return store_expr (exp, target, 0);
5236 }
5237
5238 /* If the structure is in a register or if the component
5239 is a bit field, we cannot use addressing to access it.
5240 Use bit-field techniques or SUBREG to store in it. */
5241
5242 if (mode == VOIDmode
5243 || (mode != BLKmode && ! direct_store[(int) mode]
5244 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5245 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5246 || GET_CODE (target) == REG
5247 || GET_CODE (target) == SUBREG
5248 /* If the field isn't aligned enough to store as an ordinary memref,
5249 store it as a bit field. */
5250 || (mode != BLKmode
5251 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5252 || bitpos % GET_MODE_ALIGNMENT (mode))
5253 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5254 || (bitpos % BITS_PER_UNIT != 0)))
5255 /* If the RHS and field are a constant size and the size of the
5256 RHS isn't the same size as the bitfield, we must use bitfield
5257 operations. */
5258 || (bitsize >= 0
5259 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5260 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5261 {
5262 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5263
5264 /* If BITSIZE is narrower than the size of the type of EXP
5265 we will be narrowing TEMP. Normally, what's wanted are the
5266 low-order bits. However, if EXP's type is a record and this is
5267 big-endian machine, we want the upper BITSIZE bits. */
5268 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5269 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5270 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5271 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5272 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5273 - bitsize),
5274 NULL_RTX, 1);
5275
5276 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5277 MODE. */
5278 if (mode != VOIDmode && mode != BLKmode
5279 && mode != TYPE_MODE (TREE_TYPE (exp)))
5280 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5281
5282 /* If the modes of TARGET and TEMP are both BLKmode, both
5283 must be in memory and BITPOS must be aligned on a byte
5284 boundary. If so, we simply do a block copy. */
5285 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5286 {
5287 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5288 || bitpos % BITS_PER_UNIT != 0)
5289 abort ();
5290
5291 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5292 emit_block_move (target, temp,
5293 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5294 / BITS_PER_UNIT),
5295 BLOCK_OP_NORMAL);
5296
5297 return value_mode == VOIDmode ? const0_rtx : target;
5298 }
5299
5300 /* Store the value in the bitfield. */
5301 store_bit_field (target, bitsize, bitpos, mode, temp,
5302 int_size_in_bytes (type));
5303
5304 if (value_mode != VOIDmode)
5305 {
5306 /* The caller wants an rtx for the value.
5307 If possible, avoid refetching from the bitfield itself. */
5308 if (width_mask != 0
5309 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5310 {
5311 tree count;
5312 enum machine_mode tmode;
5313
5314 tmode = GET_MODE (temp);
5315 if (tmode == VOIDmode)
5316 tmode = value_mode;
5317
5318 if (unsignedp)
5319 return expand_and (tmode, temp,
5320 gen_int_mode (width_mask, tmode),
5321 NULL_RTX);
5322
5323 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5324 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5325 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5326 }
5327
5328 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5329 NULL_RTX, value_mode, VOIDmode,
5330 int_size_in_bytes (type));
5331 }
5332 return const0_rtx;
5333 }
5334 else
5335 {
5336 rtx addr = XEXP (target, 0);
5337 rtx to_rtx = target;
5338
5339 /* If a value is wanted, it must be the lhs;
5340 so make the address stable for multiple use. */
5341
5342 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5343 && ! CONSTANT_ADDRESS_P (addr)
5344 /* A frame-pointer reference is already stable. */
5345 && ! (GET_CODE (addr) == PLUS
5346 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5347 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5348 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5349 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5350
5351 /* Now build a reference to just the desired component. */
5352
5353 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5354
5355 if (to_rtx == target)
5356 to_rtx = copy_rtx (to_rtx);
5357
5358 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5359 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5360 set_mem_alias_set (to_rtx, alias_set);
5361
5362 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5363 }
5364 }
5365 \f
5366 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5367 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5368 codes and find the ultimate containing object, which we return.
5369
5370 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5371 bit position, and *PUNSIGNEDP to the signedness of the field.
5372 If the position of the field is variable, we store a tree
5373 giving the variable offset (in units) in *POFFSET.
5374 This offset is in addition to the bit position.
5375 If the position is not variable, we store 0 in *POFFSET.
5376
5377 If any of the extraction expressions is volatile,
5378 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5379
5380 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5381 is a mode that can be used to access the field. In that case, *PBITSIZE
5382 is redundant.
5383
5384 If the field describes a variable-sized object, *PMODE is set to
5385 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5386 this case, but the address of the object can be found. */
5387
5388 tree
5389 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5390 HOST_WIDE_INT *pbitpos, tree *poffset,
5391 enum machine_mode *pmode, int *punsignedp,
5392 int *pvolatilep)
5393 {
5394 tree size_tree = 0;
5395 enum machine_mode mode = VOIDmode;
5396 tree offset = size_zero_node;
5397 tree bit_offset = bitsize_zero_node;
5398 tree placeholder_ptr = 0;
5399 tree tem;
5400
5401 /* First get the mode, signedness, and size. We do this from just the
5402 outermost expression. */
5403 if (TREE_CODE (exp) == COMPONENT_REF)
5404 {
5405 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5406 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5407 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5408
5409 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5410 }
5411 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5412 {
5413 size_tree = TREE_OPERAND (exp, 1);
5414 *punsignedp = TREE_UNSIGNED (exp);
5415 }
5416 else
5417 {
5418 mode = TYPE_MODE (TREE_TYPE (exp));
5419 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5420
5421 if (mode == BLKmode)
5422 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5423 else
5424 *pbitsize = GET_MODE_BITSIZE (mode);
5425 }
5426
5427 if (size_tree != 0)
5428 {
5429 if (! host_integerp (size_tree, 1))
5430 mode = BLKmode, *pbitsize = -1;
5431 else
5432 *pbitsize = tree_low_cst (size_tree, 1);
5433 }
5434
5435 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5436 and find the ultimate containing object. */
5437 while (1)
5438 {
5439 if (TREE_CODE (exp) == BIT_FIELD_REF)
5440 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5441 else if (TREE_CODE (exp) == COMPONENT_REF)
5442 {
5443 tree field = TREE_OPERAND (exp, 1);
5444 tree this_offset = DECL_FIELD_OFFSET (field);
5445
5446 /* If this field hasn't been filled in yet, don't go
5447 past it. This should only happen when folding expressions
5448 made during type construction. */
5449 if (this_offset == 0)
5450 break;
5451 else if (CONTAINS_PLACEHOLDER_P (this_offset))
5452 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5453
5454 offset = size_binop (PLUS_EXPR, offset, this_offset);
5455 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5456 DECL_FIELD_BIT_OFFSET (field));
5457
5458 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5459 }
5460
5461 else if (TREE_CODE (exp) == ARRAY_REF
5462 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5463 {
5464 tree index = TREE_OPERAND (exp, 1);
5465 tree array = TREE_OPERAND (exp, 0);
5466 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5467 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5468 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5469
5470 /* We assume all arrays have sizes that are a multiple of a byte.
5471 First subtract the lower bound, if any, in the type of the
5472 index, then convert to sizetype and multiply by the size of the
5473 array element. */
5474 if (low_bound != 0 && ! integer_zerop (low_bound))
5475 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5476 index, low_bound));
5477
5478 /* If the index has a self-referential type, pass it to a
5479 WITH_RECORD_EXPR; if the component size is, pass our
5480 component to one. */
5481 if (CONTAINS_PLACEHOLDER_P (index))
5482 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5483 if (CONTAINS_PLACEHOLDER_P (unit_size))
5484 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5485
5486 offset = size_binop (PLUS_EXPR, offset,
5487 size_binop (MULT_EXPR,
5488 convert (sizetype, index),
5489 unit_size));
5490 }
5491
5492 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5493 {
5494 tree new = find_placeholder (exp, &placeholder_ptr);
5495
5496 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5497 We might have been called from tree optimization where we
5498 haven't set up an object yet. */
5499 if (new == 0)
5500 break;
5501 else
5502 exp = new;
5503
5504 continue;
5505 }
5506
5507 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5508 conversions that don't change the mode, and all view conversions
5509 except those that need to "step up" the alignment. */
5510 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5511 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5512 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5513 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5514 && STRICT_ALIGNMENT
5515 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5516 < BIGGEST_ALIGNMENT)
5517 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5518 || TYPE_ALIGN_OK (TREE_TYPE
5519 (TREE_OPERAND (exp, 0))))))
5520 && ! ((TREE_CODE (exp) == NOP_EXPR
5521 || TREE_CODE (exp) == CONVERT_EXPR)
5522 && (TYPE_MODE (TREE_TYPE (exp))
5523 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5524 break;
5525
5526 /* If any reference in the chain is volatile, the effect is volatile. */
5527 if (TREE_THIS_VOLATILE (exp))
5528 *pvolatilep = 1;
5529
5530 exp = TREE_OPERAND (exp, 0);
5531 }
5532
5533 /* If OFFSET is constant, see if we can return the whole thing as a
5534 constant bit position. Otherwise, split it up. */
5535 if (host_integerp (offset, 0)
5536 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5537 bitsize_unit_node))
5538 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5539 && host_integerp (tem, 0))
5540 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5541 else
5542 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5543
5544 *pmode = mode;
5545 return exp;
5546 }
5547
5548 /* Return 1 if T is an expression that get_inner_reference handles. */
5549
5550 int
5551 handled_component_p (tree t)
5552 {
5553 switch (TREE_CODE (t))
5554 {
5555 case BIT_FIELD_REF:
5556 case COMPONENT_REF:
5557 case ARRAY_REF:
5558 case ARRAY_RANGE_REF:
5559 case NON_LVALUE_EXPR:
5560 case VIEW_CONVERT_EXPR:
5561 return 1;
5562
5563 /* ??? Sure they are handled, but get_inner_reference may return
5564 a different PBITSIZE, depending upon whether the expression is
5565 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5566 case NOP_EXPR:
5567 case CONVERT_EXPR:
5568 return (TYPE_MODE (TREE_TYPE (t))
5569 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5570
5571 default:
5572 return 0;
5573 }
5574 }
5575 \f
5576 /* Given an rtx VALUE that may contain additions and multiplications, return
5577 an equivalent value that just refers to a register, memory, or constant.
5578 This is done by generating instructions to perform the arithmetic and
5579 returning a pseudo-register containing the value.
5580
5581 The returned value may be a REG, SUBREG, MEM or constant. */
5582
5583 rtx
5584 force_operand (rtx value, rtx target)
5585 {
5586 rtx op1, op2;
5587 /* Use subtarget as the target for operand 0 of a binary operation. */
5588 rtx subtarget = get_subtarget (target);
5589 enum rtx_code code = GET_CODE (value);
5590
5591 /* Check for a PIC address load. */
5592 if ((code == PLUS || code == MINUS)
5593 && XEXP (value, 0) == pic_offset_table_rtx
5594 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5595 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5596 || GET_CODE (XEXP (value, 1)) == CONST))
5597 {
5598 if (!subtarget)
5599 subtarget = gen_reg_rtx (GET_MODE (value));
5600 emit_move_insn (subtarget, value);
5601 return subtarget;
5602 }
5603
5604 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5605 {
5606 if (!target)
5607 target = gen_reg_rtx (GET_MODE (value));
5608 convert_move (target, force_operand (XEXP (value, 0), NULL),
5609 code == ZERO_EXTEND);
5610 return target;
5611 }
5612
5613 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5614 {
5615 op2 = XEXP (value, 1);
5616 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5617 subtarget = 0;
5618 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5619 {
5620 code = PLUS;
5621 op2 = negate_rtx (GET_MODE (value), op2);
5622 }
5623
5624 /* Check for an addition with OP2 a constant integer and our first
5625 operand a PLUS of a virtual register and something else. In that
5626 case, we want to emit the sum of the virtual register and the
5627 constant first and then add the other value. This allows virtual
5628 register instantiation to simply modify the constant rather than
5629 creating another one around this addition. */
5630 if (code == PLUS && GET_CODE (op2) == CONST_INT
5631 && GET_CODE (XEXP (value, 0)) == PLUS
5632 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5633 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5634 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5635 {
5636 rtx temp = expand_simple_binop (GET_MODE (value), code,
5637 XEXP (XEXP (value, 0), 0), op2,
5638 subtarget, 0, OPTAB_LIB_WIDEN);
5639 return expand_simple_binop (GET_MODE (value), code, temp,
5640 force_operand (XEXP (XEXP (value,
5641 0), 1), 0),
5642 target, 0, OPTAB_LIB_WIDEN);
5643 }
5644
5645 op1 = force_operand (XEXP (value, 0), subtarget);
5646 op2 = force_operand (op2, NULL_RTX);
5647 switch (code)
5648 {
5649 case MULT:
5650 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5651 case DIV:
5652 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5653 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5654 target, 1, OPTAB_LIB_WIDEN);
5655 else
5656 return expand_divmod (0,
5657 FLOAT_MODE_P (GET_MODE (value))
5658 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5659 GET_MODE (value), op1, op2, target, 0);
5660 break;
5661 case MOD:
5662 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5663 target, 0);
5664 break;
5665 case UDIV:
5666 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5667 target, 1);
5668 break;
5669 case UMOD:
5670 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5671 target, 1);
5672 break;
5673 case ASHIFTRT:
5674 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5675 target, 0, OPTAB_LIB_WIDEN);
5676 break;
5677 default:
5678 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5679 target, 1, OPTAB_LIB_WIDEN);
5680 }
5681 }
5682 if (GET_RTX_CLASS (code) == '1')
5683 {
5684 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5685 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5686 }
5687
5688 #ifdef INSN_SCHEDULING
5689 /* On machines that have insn scheduling, we want all memory reference to be
5690 explicit, so we need to deal with such paradoxical SUBREGs. */
5691 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5692 && (GET_MODE_SIZE (GET_MODE (value))
5693 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5694 value
5695 = simplify_gen_subreg (GET_MODE (value),
5696 force_reg (GET_MODE (SUBREG_REG (value)),
5697 force_operand (SUBREG_REG (value),
5698 NULL_RTX)),
5699 GET_MODE (SUBREG_REG (value)),
5700 SUBREG_BYTE (value));
5701 #endif
5702
5703 return value;
5704 }
5705 \f
5706 /* Subroutine of expand_expr: return nonzero iff there is no way that
5707 EXP can reference X, which is being modified. TOP_P is nonzero if this
5708 call is going to be used to determine whether we need a temporary
5709 for EXP, as opposed to a recursive call to this function.
5710
5711 It is always safe for this routine to return zero since it merely
5712 searches for optimization opportunities. */
5713
5714 int
5715 safe_from_p (rtx x, tree exp, int top_p)
5716 {
5717 rtx exp_rtl = 0;
5718 int i, nops;
5719 static tree save_expr_list;
5720
5721 if (x == 0
5722 /* If EXP has varying size, we MUST use a target since we currently
5723 have no way of allocating temporaries of variable size
5724 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5725 So we assume here that something at a higher level has prevented a
5726 clash. This is somewhat bogus, but the best we can do. Only
5727 do this when X is BLKmode and when we are at the top level. */
5728 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5729 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5730 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5731 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5732 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5733 != INTEGER_CST)
5734 && GET_MODE (x) == BLKmode)
5735 /* If X is in the outgoing argument area, it is always safe. */
5736 || (GET_CODE (x) == MEM
5737 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5738 || (GET_CODE (XEXP (x, 0)) == PLUS
5739 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5740 return 1;
5741
5742 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5743 find the underlying pseudo. */
5744 if (GET_CODE (x) == SUBREG)
5745 {
5746 x = SUBREG_REG (x);
5747 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5748 return 0;
5749 }
5750
5751 /* A SAVE_EXPR might appear many times in the expression passed to the
5752 top-level safe_from_p call, and if it has a complex subexpression,
5753 examining it multiple times could result in a combinatorial explosion.
5754 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
5755 with optimization took about 28 minutes to compile -- even though it was
5756 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5757 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5758 we have processed. Note that the only test of top_p was above. */
5759
5760 if (top_p)
5761 {
5762 int rtn;
5763 tree t;
5764
5765 save_expr_list = 0;
5766
5767 rtn = safe_from_p (x, exp, 0);
5768
5769 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5770 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5771
5772 return rtn;
5773 }
5774
5775 /* Now look at our tree code and possibly recurse. */
5776 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5777 {
5778 case 'd':
5779 exp_rtl = DECL_RTL_IF_SET (exp);
5780 break;
5781
5782 case 'c':
5783 return 1;
5784
5785 case 'x':
5786 if (TREE_CODE (exp) == TREE_LIST)
5787 {
5788 while (1)
5789 {
5790 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5791 return 0;
5792 exp = TREE_CHAIN (exp);
5793 if (!exp)
5794 return 1;
5795 if (TREE_CODE (exp) != TREE_LIST)
5796 return safe_from_p (x, exp, 0);
5797 }
5798 }
5799 else if (TREE_CODE (exp) == ERROR_MARK)
5800 return 1; /* An already-visited SAVE_EXPR? */
5801 else
5802 return 0;
5803
5804 case '2':
5805 case '<':
5806 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5807 return 0;
5808 /* Fall through. */
5809
5810 case '1':
5811 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5812
5813 case 'e':
5814 case 'r':
5815 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5816 the expression. If it is set, we conflict iff we are that rtx or
5817 both are in memory. Otherwise, we check all operands of the
5818 expression recursively. */
5819
5820 switch (TREE_CODE (exp))
5821 {
5822 case ADDR_EXPR:
5823 /* If the operand is static or we are static, we can't conflict.
5824 Likewise if we don't conflict with the operand at all. */
5825 if (staticp (TREE_OPERAND (exp, 0))
5826 || TREE_STATIC (exp)
5827 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5828 return 1;
5829
5830 /* Otherwise, the only way this can conflict is if we are taking
5831 the address of a DECL a that address if part of X, which is
5832 very rare. */
5833 exp = TREE_OPERAND (exp, 0);
5834 if (DECL_P (exp))
5835 {
5836 if (!DECL_RTL_SET_P (exp)
5837 || GET_CODE (DECL_RTL (exp)) != MEM)
5838 return 0;
5839 else
5840 exp_rtl = XEXP (DECL_RTL (exp), 0);
5841 }
5842 break;
5843
5844 case INDIRECT_REF:
5845 if (GET_CODE (x) == MEM
5846 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5847 get_alias_set (exp)))
5848 return 0;
5849 break;
5850
5851 case CALL_EXPR:
5852 /* Assume that the call will clobber all hard registers and
5853 all of memory. */
5854 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5855 || GET_CODE (x) == MEM)
5856 return 0;
5857 break;
5858
5859 case RTL_EXPR:
5860 /* If a sequence exists, we would have to scan every instruction
5861 in the sequence to see if it was safe. This is probably not
5862 worthwhile. */
5863 if (RTL_EXPR_SEQUENCE (exp))
5864 return 0;
5865
5866 exp_rtl = RTL_EXPR_RTL (exp);
5867 break;
5868
5869 case WITH_CLEANUP_EXPR:
5870 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5871 break;
5872
5873 case CLEANUP_POINT_EXPR:
5874 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5875
5876 case SAVE_EXPR:
5877 exp_rtl = SAVE_EXPR_RTL (exp);
5878 if (exp_rtl)
5879 break;
5880
5881 /* If we've already scanned this, don't do it again. Otherwise,
5882 show we've scanned it and record for clearing the flag if we're
5883 going on. */
5884 if (TREE_PRIVATE (exp))
5885 return 1;
5886
5887 TREE_PRIVATE (exp) = 1;
5888 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5889 {
5890 TREE_PRIVATE (exp) = 0;
5891 return 0;
5892 }
5893
5894 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5895 return 1;
5896
5897 case BIND_EXPR:
5898 /* The only operand we look at is operand 1. The rest aren't
5899 part of the expression. */
5900 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5901
5902 default:
5903 break;
5904 }
5905
5906 /* If we have an rtx, we do not need to scan our operands. */
5907 if (exp_rtl)
5908 break;
5909
5910 nops = first_rtl_op (TREE_CODE (exp));
5911 for (i = 0; i < nops; i++)
5912 if (TREE_OPERAND (exp, i) != 0
5913 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5914 return 0;
5915
5916 /* If this is a language-specific tree code, it may require
5917 special handling. */
5918 if ((unsigned int) TREE_CODE (exp)
5919 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5920 && !(*lang_hooks.safe_from_p) (x, exp))
5921 return 0;
5922 }
5923
5924 /* If we have an rtl, find any enclosed object. Then see if we conflict
5925 with it. */
5926 if (exp_rtl)
5927 {
5928 if (GET_CODE (exp_rtl) == SUBREG)
5929 {
5930 exp_rtl = SUBREG_REG (exp_rtl);
5931 if (GET_CODE (exp_rtl) == REG
5932 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5933 return 0;
5934 }
5935
5936 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5937 are memory and they conflict. */
5938 return ! (rtx_equal_p (x, exp_rtl)
5939 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5940 && true_dependence (exp_rtl, VOIDmode, x,
5941 rtx_addr_varies_p)));
5942 }
5943
5944 /* If we reach here, it is safe. */
5945 return 1;
5946 }
5947
5948 /* Subroutine of expand_expr: return rtx if EXP is a
5949 variable or parameter; else return 0. */
5950
5951 static rtx
5952 var_rtx (tree exp)
5953 {
5954 STRIP_NOPS (exp);
5955 switch (TREE_CODE (exp))
5956 {
5957 case PARM_DECL:
5958 case VAR_DECL:
5959 return DECL_RTL (exp);
5960 default:
5961 return 0;
5962 }
5963 }
5964 \f
5965 /* Return the highest power of two that EXP is known to be a multiple of.
5966 This is used in updating alignment of MEMs in array references. */
5967
5968 static unsigned HOST_WIDE_INT
5969 highest_pow2_factor (tree exp)
5970 {
5971 unsigned HOST_WIDE_INT c0, c1;
5972
5973 switch (TREE_CODE (exp))
5974 {
5975 case INTEGER_CST:
5976 /* We can find the lowest bit that's a one. If the low
5977 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5978 We need to handle this case since we can find it in a COND_EXPR,
5979 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5980 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5981 later ICE. */
5982 if (TREE_CONSTANT_OVERFLOW (exp))
5983 return BIGGEST_ALIGNMENT;
5984 else
5985 {
5986 /* Note: tree_low_cst is intentionally not used here,
5987 we don't care about the upper bits. */
5988 c0 = TREE_INT_CST_LOW (exp);
5989 c0 &= -c0;
5990 return c0 ? c0 : BIGGEST_ALIGNMENT;
5991 }
5992 break;
5993
5994 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5995 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5996 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5997 return MIN (c0, c1);
5998
5999 case MULT_EXPR:
6000 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6001 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6002 return c0 * c1;
6003
6004 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6005 case CEIL_DIV_EXPR:
6006 if (integer_pow2p (TREE_OPERAND (exp, 1))
6007 && host_integerp (TREE_OPERAND (exp, 1), 1))
6008 {
6009 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6010 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6011 return MAX (1, c0 / c1);
6012 }
6013 break;
6014
6015 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6016 case SAVE_EXPR: case WITH_RECORD_EXPR:
6017 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6018
6019 case COMPOUND_EXPR:
6020 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6021
6022 case COND_EXPR:
6023 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6024 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6025 return MIN (c0, c1);
6026
6027 default:
6028 break;
6029 }
6030
6031 return 1;
6032 }
6033
6034 /* Similar, except that it is known that the expression must be a multiple
6035 of the alignment of TYPE. */
6036
6037 static unsigned HOST_WIDE_INT
6038 highest_pow2_factor_for_type (tree type, tree exp)
6039 {
6040 unsigned HOST_WIDE_INT type_align, factor;
6041
6042 factor = highest_pow2_factor (exp);
6043 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6044 return MAX (factor, type_align);
6045 }
6046 \f
6047 /* Return an object on the placeholder list that matches EXP, a
6048 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6049 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6050 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6051 is a location which initially points to a starting location in the
6052 placeholder list (zero means start of the list) and where a pointer into
6053 the placeholder list at which the object is found is placed. */
6054
6055 tree
6056 find_placeholder (tree exp, tree *plist)
6057 {
6058 tree type = TREE_TYPE (exp);
6059 tree placeholder_expr;
6060
6061 for (placeholder_expr
6062 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6063 placeholder_expr != 0;
6064 placeholder_expr = TREE_CHAIN (placeholder_expr))
6065 {
6066 tree need_type = TYPE_MAIN_VARIANT (type);
6067 tree elt;
6068
6069 /* Find the outermost reference that is of the type we want. If none,
6070 see if any object has a type that is a pointer to the type we
6071 want. */
6072 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6073 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6074 || TREE_CODE (elt) == COND_EXPR)
6075 ? TREE_OPERAND (elt, 1)
6076 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6077 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6078 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6079 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6080 ? TREE_OPERAND (elt, 0) : 0))
6081 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6082 {
6083 if (plist)
6084 *plist = placeholder_expr;
6085 return elt;
6086 }
6087
6088 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6089 elt
6090 = ((TREE_CODE (elt) == COMPOUND_EXPR
6091 || TREE_CODE (elt) == COND_EXPR)
6092 ? TREE_OPERAND (elt, 1)
6093 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6094 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6095 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6096 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6097 ? TREE_OPERAND (elt, 0) : 0))
6098 if (POINTER_TYPE_P (TREE_TYPE (elt))
6099 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6100 == need_type))
6101 {
6102 if (plist)
6103 *plist = placeholder_expr;
6104 return build1 (INDIRECT_REF, need_type, elt);
6105 }
6106 }
6107
6108 return 0;
6109 }
6110
6111 /* Subroutine of expand_expr. Expand the two operands of a binary
6112 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6113 The value may be stored in TARGET if TARGET is nonzero. The
6114 MODIFIER argument is as documented by expand_expr. */
6115
6116 static void
6117 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6118 enum expand_modifier modifier)
6119 {
6120 if (! safe_from_p (target, exp1, 1))
6121 target = 0;
6122 if (operand_equal_p (exp0, exp1, 0))
6123 {
6124 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6125 *op1 = copy_rtx (*op0);
6126 }
6127 else
6128 {
6129 /* If we need to preserve evaluation order, copy exp0 into its own
6130 temporary variable so that it can't be clobbered by exp1. */
6131 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6132 exp0 = save_expr (exp0);
6133 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6134 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6135 }
6136 }
6137
6138 \f
6139 /* expand_expr: generate code for computing expression EXP.
6140 An rtx for the computed value is returned. The value is never null.
6141 In the case of a void EXP, const0_rtx is returned.
6142
6143 The value may be stored in TARGET if TARGET is nonzero.
6144 TARGET is just a suggestion; callers must assume that
6145 the rtx returned may not be the same as TARGET.
6146
6147 If TARGET is CONST0_RTX, it means that the value will be ignored.
6148
6149 If TMODE is not VOIDmode, it suggests generating the
6150 result in mode TMODE. But this is done only when convenient.
6151 Otherwise, TMODE is ignored and the value generated in its natural mode.
6152 TMODE is just a suggestion; callers must assume that
6153 the rtx returned may not have mode TMODE.
6154
6155 Note that TARGET may have neither TMODE nor MODE. In that case, it
6156 probably will not be used.
6157
6158 If MODIFIER is EXPAND_SUM then when EXP is an addition
6159 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6160 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6161 products as above, or REG or MEM, or constant.
6162 Ordinarily in such cases we would output mul or add instructions
6163 and then return a pseudo reg containing the sum.
6164
6165 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6166 it also marks a label as absolutely required (it can't be dead).
6167 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6168 This is used for outputting expressions used in initializers.
6169
6170 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6171 with a constant address even if that address is not normally legitimate.
6172 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6173
6174 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6175 a call parameter. Such targets require special care as we haven't yet
6176 marked TARGET so that it's safe from being trashed by libcalls. We
6177 don't want to use TARGET for anything but the final result;
6178 Intermediate values must go elsewhere. Additionally, calls to
6179 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6180
6181 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6182 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6183 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6184 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6185 recursively. */
6186
6187 rtx
6188 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6189 enum expand_modifier modifier, rtx *alt_rtl)
6190 {
6191 rtx op0, op1, temp;
6192 tree type = TREE_TYPE (exp);
6193 int unsignedp = TREE_UNSIGNED (type);
6194 enum machine_mode mode;
6195 enum tree_code code = TREE_CODE (exp);
6196 optab this_optab;
6197 rtx subtarget, original_target;
6198 int ignore;
6199 tree context;
6200
6201 /* Handle ERROR_MARK before anybody tries to access its type. */
6202 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6203 {
6204 op0 = CONST0_RTX (tmode);
6205 if (op0 != 0)
6206 return op0;
6207 return const0_rtx;
6208 }
6209
6210 mode = TYPE_MODE (type);
6211 /* Use subtarget as the target for operand 0 of a binary operation. */
6212 subtarget = get_subtarget (target);
6213 original_target = target;
6214 ignore = (target == const0_rtx
6215 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6216 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6217 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6218 && TREE_CODE (type) == VOID_TYPE));
6219
6220 /* If we are going to ignore this result, we need only do something
6221 if there is a side-effect somewhere in the expression. If there
6222 is, short-circuit the most common cases here. Note that we must
6223 not call expand_expr with anything but const0_rtx in case this
6224 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6225
6226 if (ignore)
6227 {
6228 if (! TREE_SIDE_EFFECTS (exp))
6229 return const0_rtx;
6230
6231 /* Ensure we reference a volatile object even if value is ignored, but
6232 don't do this if all we are doing is taking its address. */
6233 if (TREE_THIS_VOLATILE (exp)
6234 && TREE_CODE (exp) != FUNCTION_DECL
6235 && mode != VOIDmode && mode != BLKmode
6236 && modifier != EXPAND_CONST_ADDRESS)
6237 {
6238 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6239 if (GET_CODE (temp) == MEM)
6240 temp = copy_to_reg (temp);
6241 return const0_rtx;
6242 }
6243
6244 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6245 || code == INDIRECT_REF || code == BUFFER_REF)
6246 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6247 modifier);
6248
6249 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6250 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6251 {
6252 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6253 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6254 return const0_rtx;
6255 }
6256 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6257 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6258 /* If the second operand has no side effects, just evaluate
6259 the first. */
6260 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6261 modifier);
6262 else if (code == BIT_FIELD_REF)
6263 {
6264 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6265 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6266 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6267 return const0_rtx;
6268 }
6269
6270 target = 0;
6271 }
6272
6273 /* If will do cse, generate all results into pseudo registers
6274 since 1) that allows cse to find more things
6275 and 2) otherwise cse could produce an insn the machine
6276 cannot support. An exception is a CONSTRUCTOR into a multi-word
6277 MEM: that's much more likely to be most efficient into the MEM.
6278 Another is a CALL_EXPR which must return in memory. */
6279
6280 if (! cse_not_expected && mode != BLKmode && target
6281 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6282 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6283 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6284 target = 0;
6285
6286 switch (code)
6287 {
6288 case LABEL_DECL:
6289 {
6290 tree function = decl_function_context (exp);
6291 /* Labels in containing functions, or labels used from initializers,
6292 must be forced. */
6293 if (modifier == EXPAND_INITIALIZER
6294 || (function != current_function_decl
6295 && function != inline_function_decl
6296 && function != 0))
6297 temp = force_label_rtx (exp);
6298 else
6299 temp = label_rtx (exp);
6300
6301 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6302 if (function != current_function_decl
6303 && function != inline_function_decl && function != 0)
6304 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6305 return temp;
6306 }
6307
6308 case PARM_DECL:
6309 if (!DECL_RTL_SET_P (exp))
6310 {
6311 error ("%Jprior parameter's size depends on '%D'", exp, exp);
6312 return CONST0_RTX (mode);
6313 }
6314
6315 /* ... fall through ... */
6316
6317 case VAR_DECL:
6318 /* If a static var's type was incomplete when the decl was written,
6319 but the type is complete now, lay out the decl now. */
6320 if (DECL_SIZE (exp) == 0
6321 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6322 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6323 layout_decl (exp, 0);
6324
6325 /* ... fall through ... */
6326
6327 case FUNCTION_DECL:
6328 case RESULT_DECL:
6329 if (DECL_RTL (exp) == 0)
6330 abort ();
6331
6332 /* Ensure variable marked as used even if it doesn't go through
6333 a parser. If it hasn't be used yet, write out an external
6334 definition. */
6335 if (! TREE_USED (exp))
6336 {
6337 assemble_external (exp);
6338 TREE_USED (exp) = 1;
6339 }
6340
6341 /* Show we haven't gotten RTL for this yet. */
6342 temp = 0;
6343
6344 /* Handle variables inherited from containing functions. */
6345 context = decl_function_context (exp);
6346
6347 /* We treat inline_function_decl as an alias for the current function
6348 because that is the inline function whose vars, types, etc.
6349 are being merged into the current function.
6350 See expand_inline_function. */
6351
6352 if (context != 0 && context != current_function_decl
6353 && context != inline_function_decl
6354 /* If var is static, we don't need a static chain to access it. */
6355 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6356 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6357 {
6358 rtx addr;
6359
6360 /* Mark as non-local and addressable. */
6361 DECL_NONLOCAL (exp) = 1;
6362 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6363 abort ();
6364 (*lang_hooks.mark_addressable) (exp);
6365 if (GET_CODE (DECL_RTL (exp)) != MEM)
6366 abort ();
6367 addr = XEXP (DECL_RTL (exp), 0);
6368 if (GET_CODE (addr) == MEM)
6369 addr
6370 = replace_equiv_address (addr,
6371 fix_lexical_addr (XEXP (addr, 0), exp));
6372 else
6373 addr = fix_lexical_addr (addr, exp);
6374
6375 temp = replace_equiv_address (DECL_RTL (exp), addr);
6376 }
6377
6378 /* This is the case of an array whose size is to be determined
6379 from its initializer, while the initializer is still being parsed.
6380 See expand_decl. */
6381
6382 else if (GET_CODE (DECL_RTL (exp)) == MEM
6383 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6384 temp = validize_mem (DECL_RTL (exp));
6385
6386 /* If DECL_RTL is memory, we are in the normal case and either
6387 the address is not valid or it is not a register and -fforce-addr
6388 is specified, get the address into a register. */
6389
6390 else if (GET_CODE (DECL_RTL (exp)) == MEM
6391 && modifier != EXPAND_CONST_ADDRESS
6392 && modifier != EXPAND_SUM
6393 && modifier != EXPAND_INITIALIZER
6394 && (! memory_address_p (DECL_MODE (exp),
6395 XEXP (DECL_RTL (exp), 0))
6396 || (flag_force_addr
6397 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6398 {
6399 if (alt_rtl)
6400 *alt_rtl = DECL_RTL (exp);
6401 temp = replace_equiv_address (DECL_RTL (exp),
6402 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6403 }
6404
6405 /* If we got something, return it. But first, set the alignment
6406 if the address is a register. */
6407 if (temp != 0)
6408 {
6409 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6410 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6411
6412 return temp;
6413 }
6414
6415 /* If the mode of DECL_RTL does not match that of the decl, it
6416 must be a promoted value. We return a SUBREG of the wanted mode,
6417 but mark it so that we know that it was already extended. */
6418
6419 if (GET_CODE (DECL_RTL (exp)) == REG
6420 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6421 {
6422 /* Get the signedness used for this variable. Ensure we get the
6423 same mode we got when the variable was declared. */
6424 if (GET_MODE (DECL_RTL (exp))
6425 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6426 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6427 abort ();
6428
6429 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6430 SUBREG_PROMOTED_VAR_P (temp) = 1;
6431 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6432 return temp;
6433 }
6434
6435 return DECL_RTL (exp);
6436
6437 case INTEGER_CST:
6438 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6439 TREE_INT_CST_HIGH (exp), mode);
6440
6441 /* ??? If overflow is set, fold will have done an incomplete job,
6442 which can result in (plus xx (const_int 0)), which can get
6443 simplified by validate_replace_rtx during virtual register
6444 instantiation, which can result in unrecognizable insns.
6445 Avoid this by forcing all overflows into registers. */
6446 if (TREE_CONSTANT_OVERFLOW (exp)
6447 && modifier != EXPAND_INITIALIZER)
6448 temp = force_reg (mode, temp);
6449
6450 return temp;
6451
6452 case VECTOR_CST:
6453 return const_vector_from_tree (exp);
6454
6455 case CONST_DECL:
6456 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6457
6458 case REAL_CST:
6459 /* If optimized, generate immediate CONST_DOUBLE
6460 which will be turned into memory by reload if necessary.
6461
6462 We used to force a register so that loop.c could see it. But
6463 this does not allow gen_* patterns to perform optimizations with
6464 the constants. It also produces two insns in cases like "x = 1.0;".
6465 On most machines, floating-point constants are not permitted in
6466 many insns, so we'd end up copying it to a register in any case.
6467
6468 Now, we do the copying in expand_binop, if appropriate. */
6469 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6470 TYPE_MODE (TREE_TYPE (exp)));
6471
6472 case COMPLEX_CST:
6473 /* Handle evaluating a complex constant in a CONCAT target. */
6474 if (original_target && GET_CODE (original_target) == CONCAT)
6475 {
6476 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6477 rtx rtarg, itarg;
6478
6479 rtarg = XEXP (original_target, 0);
6480 itarg = XEXP (original_target, 1);
6481
6482 /* Move the real and imaginary parts separately. */
6483 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6484 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6485
6486 if (op0 != rtarg)
6487 emit_move_insn (rtarg, op0);
6488 if (op1 != itarg)
6489 emit_move_insn (itarg, op1);
6490
6491 return original_target;
6492 }
6493
6494 /* ... fall through ... */
6495
6496 case STRING_CST:
6497 temp = output_constant_def (exp, 1);
6498
6499 /* temp contains a constant address.
6500 On RISC machines where a constant address isn't valid,
6501 make some insns to get that address into a register. */
6502 if (modifier != EXPAND_CONST_ADDRESS
6503 && modifier != EXPAND_INITIALIZER
6504 && modifier != EXPAND_SUM
6505 && (! memory_address_p (mode, XEXP (temp, 0))
6506 || flag_force_addr))
6507 return replace_equiv_address (temp,
6508 copy_rtx (XEXP (temp, 0)));
6509 return temp;
6510
6511 case EXPR_WITH_FILE_LOCATION:
6512 {
6513 rtx to_return;
6514 struct file_stack fs;
6515
6516 fs.location = input_location;
6517 fs.next = expr_wfl_stack;
6518 input_filename = EXPR_WFL_FILENAME (exp);
6519 input_line = EXPR_WFL_LINENO (exp);
6520 expr_wfl_stack = &fs;
6521 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6522 emit_line_note (input_location);
6523 /* Possibly avoid switching back and forth here. */
6524 to_return = expand_expr (EXPR_WFL_NODE (exp),
6525 (ignore ? const0_rtx : target),
6526 tmode, modifier);
6527 if (expr_wfl_stack != &fs)
6528 abort ();
6529 input_location = fs.location;
6530 expr_wfl_stack = fs.next;
6531 return to_return;
6532 }
6533
6534 case SAVE_EXPR:
6535 context = decl_function_context (exp);
6536
6537 /* If this SAVE_EXPR was at global context, assume we are an
6538 initialization function and move it into our context. */
6539 if (context == 0)
6540 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6541
6542 /* We treat inline_function_decl as an alias for the current function
6543 because that is the inline function whose vars, types, etc.
6544 are being merged into the current function.
6545 See expand_inline_function. */
6546 if (context == current_function_decl || context == inline_function_decl)
6547 context = 0;
6548
6549 /* If this is non-local, handle it. */
6550 if (context)
6551 {
6552 /* The following call just exists to abort if the context is
6553 not of a containing function. */
6554 find_function_data (context);
6555
6556 temp = SAVE_EXPR_RTL (exp);
6557 if (temp && GET_CODE (temp) == REG)
6558 {
6559 put_var_into_stack (exp, /*rescan=*/true);
6560 temp = SAVE_EXPR_RTL (exp);
6561 }
6562 if (temp == 0 || GET_CODE (temp) != MEM)
6563 abort ();
6564 return
6565 replace_equiv_address (temp,
6566 fix_lexical_addr (XEXP (temp, 0), exp));
6567 }
6568 if (SAVE_EXPR_RTL (exp) == 0)
6569 {
6570 if (mode == VOIDmode)
6571 temp = const0_rtx;
6572 else
6573 temp = assign_temp (build_qualified_type (type,
6574 (TYPE_QUALS (type)
6575 | TYPE_QUAL_CONST)),
6576 3, 0, 0);
6577
6578 SAVE_EXPR_RTL (exp) = temp;
6579 if (!optimize && GET_CODE (temp) == REG)
6580 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6581 save_expr_regs);
6582
6583 /* If the mode of TEMP does not match that of the expression, it
6584 must be a promoted value. We pass store_expr a SUBREG of the
6585 wanted mode but mark it so that we know that it was already
6586 extended. */
6587
6588 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6589 {
6590 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6591 promote_mode (type, mode, &unsignedp, 0);
6592 SUBREG_PROMOTED_VAR_P (temp) = 1;
6593 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6594 }
6595
6596 if (temp == const0_rtx)
6597 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6598 else
6599 store_expr (TREE_OPERAND (exp, 0), temp,
6600 modifier == EXPAND_STACK_PARM ? 2 : 0);
6601
6602 TREE_USED (exp) = 1;
6603 }
6604
6605 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6606 must be a promoted value. We return a SUBREG of the wanted mode,
6607 but mark it so that we know that it was already extended. */
6608
6609 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6610 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6611 {
6612 /* Compute the signedness and make the proper SUBREG. */
6613 promote_mode (type, mode, &unsignedp, 0);
6614 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6615 SUBREG_PROMOTED_VAR_P (temp) = 1;
6616 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6617 return temp;
6618 }
6619
6620 return SAVE_EXPR_RTL (exp);
6621
6622 case UNSAVE_EXPR:
6623 {
6624 rtx temp;
6625 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6626 TREE_OPERAND (exp, 0)
6627 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6628 return temp;
6629 }
6630
6631 case PLACEHOLDER_EXPR:
6632 {
6633 tree old_list = placeholder_list;
6634 tree placeholder_expr = 0;
6635
6636 exp = find_placeholder (exp, &placeholder_expr);
6637 if (exp == 0)
6638 abort ();
6639
6640 placeholder_list = TREE_CHAIN (placeholder_expr);
6641 temp = expand_expr (exp, original_target, tmode, modifier);
6642 placeholder_list = old_list;
6643 return temp;
6644 }
6645
6646 case WITH_RECORD_EXPR:
6647 /* Put the object on the placeholder list, expand our first operand,
6648 and pop the list. */
6649 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6650 placeholder_list);
6651 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6652 modifier);
6653 placeholder_list = TREE_CHAIN (placeholder_list);
6654 return target;
6655
6656 case GOTO_EXPR:
6657 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6658 expand_goto (TREE_OPERAND (exp, 0));
6659 else
6660 expand_computed_goto (TREE_OPERAND (exp, 0));
6661 return const0_rtx;
6662
6663 case EXIT_EXPR:
6664 expand_exit_loop_if_false (NULL,
6665 invert_truthvalue (TREE_OPERAND (exp, 0)));
6666 return const0_rtx;
6667
6668 case LABELED_BLOCK_EXPR:
6669 if (LABELED_BLOCK_BODY (exp))
6670 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6671 /* Should perhaps use expand_label, but this is simpler and safer. */
6672 do_pending_stack_adjust ();
6673 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6674 return const0_rtx;
6675
6676 case EXIT_BLOCK_EXPR:
6677 if (EXIT_BLOCK_RETURN (exp))
6678 sorry ("returned value in block_exit_expr");
6679 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6680 return const0_rtx;
6681
6682 case LOOP_EXPR:
6683 push_temp_slots ();
6684 expand_start_loop (1);
6685 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6686 expand_end_loop ();
6687 pop_temp_slots ();
6688
6689 return const0_rtx;
6690
6691 case BIND_EXPR:
6692 {
6693 tree vars = TREE_OPERAND (exp, 0);
6694
6695 /* Need to open a binding contour here because
6696 if there are any cleanups they must be contained here. */
6697 expand_start_bindings (2);
6698
6699 /* Mark the corresponding BLOCK for output in its proper place. */
6700 if (TREE_OPERAND (exp, 2) != 0
6701 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6702 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6703
6704 /* If VARS have not yet been expanded, expand them now. */
6705 while (vars)
6706 {
6707 if (!DECL_RTL_SET_P (vars))
6708 expand_decl (vars);
6709 expand_decl_init (vars);
6710 vars = TREE_CHAIN (vars);
6711 }
6712
6713 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6714
6715 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6716
6717 return temp;
6718 }
6719
6720 case RTL_EXPR:
6721 if (RTL_EXPR_SEQUENCE (exp))
6722 {
6723 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6724 abort ();
6725 emit_insn (RTL_EXPR_SEQUENCE (exp));
6726 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6727 }
6728 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6729 free_temps_for_rtl_expr (exp);
6730 if (alt_rtl)
6731 *alt_rtl = RTL_EXPR_ALT_RTL (exp);
6732 return RTL_EXPR_RTL (exp);
6733
6734 case CONSTRUCTOR:
6735 /* If we don't need the result, just ensure we evaluate any
6736 subexpressions. */
6737 if (ignore)
6738 {
6739 tree elt;
6740
6741 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6742 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6743
6744 return const0_rtx;
6745 }
6746
6747 /* All elts simple constants => refer to a constant in memory. But
6748 if this is a non-BLKmode mode, let it store a field at a time
6749 since that should make a CONST_INT or CONST_DOUBLE when we
6750 fold. Likewise, if we have a target we can use, it is best to
6751 store directly into the target unless the type is large enough
6752 that memcpy will be used. If we are making an initializer and
6753 all operands are constant, put it in memory as well.
6754
6755 FIXME: Avoid trying to fill vector constructors piece-meal.
6756 Output them with output_constant_def below unless we're sure
6757 they're zeros. This should go away when vector initializers
6758 are treated like VECTOR_CST instead of arrays.
6759 */
6760 else if ((TREE_STATIC (exp)
6761 && ((mode == BLKmode
6762 && ! (target != 0 && safe_from_p (target, exp, 1)))
6763 || TREE_ADDRESSABLE (exp)
6764 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6765 && (! MOVE_BY_PIECES_P
6766 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6767 TYPE_ALIGN (type)))
6768 && ((TREE_CODE (type) == VECTOR_TYPE
6769 && !is_zeros_p (exp))
6770 || ! mostly_zeros_p (exp)))))
6771 || ((modifier == EXPAND_INITIALIZER
6772 || modifier == EXPAND_CONST_ADDRESS)
6773 && TREE_CONSTANT (exp)))
6774 {
6775 rtx constructor = output_constant_def (exp, 1);
6776
6777 if (modifier != EXPAND_CONST_ADDRESS
6778 && modifier != EXPAND_INITIALIZER
6779 && modifier != EXPAND_SUM)
6780 constructor = validize_mem (constructor);
6781
6782 return constructor;
6783 }
6784 else
6785 {
6786 /* Handle calls that pass values in multiple non-contiguous
6787 locations. The Irix 6 ABI has examples of this. */
6788 if (target == 0 || ! safe_from_p (target, exp, 1)
6789 || GET_CODE (target) == PARALLEL
6790 || modifier == EXPAND_STACK_PARM)
6791 target
6792 = assign_temp (build_qualified_type (type,
6793 (TYPE_QUALS (type)
6794 | (TREE_READONLY (exp)
6795 * TYPE_QUAL_CONST))),
6796 0, TREE_ADDRESSABLE (exp), 1);
6797
6798 store_constructor (exp, target, 0, int_expr_size (exp));
6799 return target;
6800 }
6801
6802 case INDIRECT_REF:
6803 {
6804 tree exp1 = TREE_OPERAND (exp, 0);
6805 tree index;
6806 tree string = string_constant (exp1, &index);
6807
6808 /* Try to optimize reads from const strings. */
6809 if (string
6810 && TREE_CODE (string) == STRING_CST
6811 && TREE_CODE (index) == INTEGER_CST
6812 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6813 && GET_MODE_CLASS (mode) == MODE_INT
6814 && GET_MODE_SIZE (mode) == 1
6815 && modifier != EXPAND_WRITE)
6816 return gen_int_mode (TREE_STRING_POINTER (string)
6817 [TREE_INT_CST_LOW (index)], mode);
6818
6819 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6820 op0 = memory_address (mode, op0);
6821 temp = gen_rtx_MEM (mode, op0);
6822 set_mem_attributes (temp, exp, 0);
6823
6824 /* If we are writing to this object and its type is a record with
6825 readonly fields, we must mark it as readonly so it will
6826 conflict with readonly references to those fields. */
6827 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6828 RTX_UNCHANGING_P (temp) = 1;
6829
6830 return temp;
6831 }
6832
6833 case ARRAY_REF:
6834 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6835 abort ();
6836
6837 {
6838 tree array = TREE_OPERAND (exp, 0);
6839 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6840 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6841 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6842 HOST_WIDE_INT i;
6843
6844 /* Optimize the special-case of a zero lower bound.
6845
6846 We convert the low_bound to sizetype to avoid some problems
6847 with constant folding. (E.g. suppose the lower bound is 1,
6848 and its mode is QI. Without the conversion, (ARRAY
6849 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6850 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6851
6852 if (! integer_zerop (low_bound))
6853 index = size_diffop (index, convert (sizetype, low_bound));
6854
6855 /* Fold an expression like: "foo"[2].
6856 This is not done in fold so it won't happen inside &.
6857 Don't fold if this is for wide characters since it's too
6858 difficult to do correctly and this is a very rare case. */
6859
6860 if (modifier != EXPAND_CONST_ADDRESS
6861 && modifier != EXPAND_INITIALIZER
6862 && modifier != EXPAND_MEMORY
6863 && TREE_CODE (array) == STRING_CST
6864 && TREE_CODE (index) == INTEGER_CST
6865 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6866 && GET_MODE_CLASS (mode) == MODE_INT
6867 && GET_MODE_SIZE (mode) == 1)
6868 return gen_int_mode (TREE_STRING_POINTER (array)
6869 [TREE_INT_CST_LOW (index)], mode);
6870
6871 /* If this is a constant index into a constant array,
6872 just get the value from the array. Handle both the cases when
6873 we have an explicit constructor and when our operand is a variable
6874 that was declared const. */
6875
6876 if (modifier != EXPAND_CONST_ADDRESS
6877 && modifier != EXPAND_INITIALIZER
6878 && modifier != EXPAND_MEMORY
6879 && TREE_CODE (array) == CONSTRUCTOR
6880 && ! TREE_SIDE_EFFECTS (array)
6881 && TREE_CODE (index) == INTEGER_CST
6882 && 0 > compare_tree_int (index,
6883 list_length (CONSTRUCTOR_ELTS
6884 (TREE_OPERAND (exp, 0)))))
6885 {
6886 tree elem;
6887
6888 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6889 i = TREE_INT_CST_LOW (index);
6890 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6891 ;
6892
6893 if (elem)
6894 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6895 modifier);
6896 }
6897
6898 else if (optimize >= 1
6899 && modifier != EXPAND_CONST_ADDRESS
6900 && modifier != EXPAND_INITIALIZER
6901 && modifier != EXPAND_MEMORY
6902 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6903 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6904 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6905 && targetm.binds_local_p (array))
6906 {
6907 if (TREE_CODE (index) == INTEGER_CST)
6908 {
6909 tree init = DECL_INITIAL (array);
6910
6911 if (TREE_CODE (init) == CONSTRUCTOR)
6912 {
6913 tree elem;
6914
6915 for (elem = CONSTRUCTOR_ELTS (init);
6916 (elem
6917 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6918 elem = TREE_CHAIN (elem))
6919 ;
6920
6921 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6922 return expand_expr (fold (TREE_VALUE (elem)), target,
6923 tmode, modifier);
6924 }
6925 else if (TREE_CODE (init) == STRING_CST
6926 && 0 > compare_tree_int (index,
6927 TREE_STRING_LENGTH (init)))
6928 {
6929 tree type = TREE_TYPE (TREE_TYPE (init));
6930 enum machine_mode mode = TYPE_MODE (type);
6931
6932 if (GET_MODE_CLASS (mode) == MODE_INT
6933 && GET_MODE_SIZE (mode) == 1)
6934 return gen_int_mode (TREE_STRING_POINTER (init)
6935 [TREE_INT_CST_LOW (index)], mode);
6936 }
6937 }
6938 }
6939 }
6940 goto normal_inner_ref;
6941
6942 case COMPONENT_REF:
6943 /* If the operand is a CONSTRUCTOR, we can just extract the
6944 appropriate field if it is present. */
6945 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6946 {
6947 tree elt;
6948
6949 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6950 elt = TREE_CHAIN (elt))
6951 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6952 /* We can normally use the value of the field in the
6953 CONSTRUCTOR. However, if this is a bitfield in
6954 an integral mode that we can fit in a HOST_WIDE_INT,
6955 we must mask only the number of bits in the bitfield,
6956 since this is done implicitly by the constructor. If
6957 the bitfield does not meet either of those conditions,
6958 we can't do this optimization. */
6959 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6960 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6961 == MODE_INT)
6962 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6963 <= HOST_BITS_PER_WIDE_INT))))
6964 {
6965 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6966 && modifier == EXPAND_STACK_PARM)
6967 target = 0;
6968 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6969 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6970 {
6971 HOST_WIDE_INT bitsize
6972 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6973 enum machine_mode imode
6974 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6975
6976 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6977 {
6978 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6979 op0 = expand_and (imode, op0, op1, target);
6980 }
6981 else
6982 {
6983 tree count
6984 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6985 0);
6986
6987 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6988 target, 0);
6989 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6990 target, 0);
6991 }
6992 }
6993
6994 return op0;
6995 }
6996 }
6997 goto normal_inner_ref;
6998
6999 case BIT_FIELD_REF:
7000 case ARRAY_RANGE_REF:
7001 normal_inner_ref:
7002 {
7003 enum machine_mode mode1;
7004 HOST_WIDE_INT bitsize, bitpos;
7005 tree offset;
7006 int volatilep = 0;
7007 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7008 &mode1, &unsignedp, &volatilep);
7009 rtx orig_op0;
7010
7011 /* If we got back the original object, something is wrong. Perhaps
7012 we are evaluating an expression too early. In any event, don't
7013 infinitely recurse. */
7014 if (tem == exp)
7015 abort ();
7016
7017 /* If TEM's type is a union of variable size, pass TARGET to the inner
7018 computation, since it will need a temporary and TARGET is known
7019 to have to do. This occurs in unchecked conversion in Ada. */
7020
7021 orig_op0 = op0
7022 = expand_expr (tem,
7023 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7024 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7025 != INTEGER_CST)
7026 && modifier != EXPAND_STACK_PARM
7027 ? target : NULL_RTX),
7028 VOIDmode,
7029 (modifier == EXPAND_INITIALIZER
7030 || modifier == EXPAND_CONST_ADDRESS
7031 || modifier == EXPAND_STACK_PARM)
7032 ? modifier : EXPAND_NORMAL);
7033
7034 /* If this is a constant, put it into a register if it is a
7035 legitimate constant and OFFSET is 0 and memory if it isn't. */
7036 if (CONSTANT_P (op0))
7037 {
7038 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7039 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7040 && offset == 0)
7041 op0 = force_reg (mode, op0);
7042 else
7043 op0 = validize_mem (force_const_mem (mode, op0));
7044 }
7045
7046 /* Otherwise, if this object not in memory and we either have an
7047 offset or a BLKmode result, put it there. This case can't occur in
7048 C, but can in Ada if we have unchecked conversion of an expression
7049 from a scalar type to an array or record type or for an
7050 ARRAY_RANGE_REF whose type is BLKmode. */
7051 else if (GET_CODE (op0) != MEM
7052 && (offset != 0
7053 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7054 {
7055 /* If the operand is a SAVE_EXPR, we can deal with this by
7056 forcing the SAVE_EXPR into memory. */
7057 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7058 {
7059 put_var_into_stack (TREE_OPERAND (exp, 0),
7060 /*rescan=*/true);
7061 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7062 }
7063 else
7064 {
7065 tree nt
7066 = build_qualified_type (TREE_TYPE (tem),
7067 (TYPE_QUALS (TREE_TYPE (tem))
7068 | TYPE_QUAL_CONST));
7069 rtx memloc = assign_temp (nt, 1, 1, 1);
7070
7071 emit_move_insn (memloc, op0);
7072 op0 = memloc;
7073 }
7074 }
7075
7076 if (offset != 0)
7077 {
7078 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7079 EXPAND_SUM);
7080
7081 if (GET_CODE (op0) != MEM)
7082 abort ();
7083
7084 #ifdef POINTERS_EXTEND_UNSIGNED
7085 if (GET_MODE (offset_rtx) != Pmode)
7086 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7087 #else
7088 if (GET_MODE (offset_rtx) != ptr_mode)
7089 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7090 #endif
7091
7092 if (GET_MODE (op0) == BLKmode
7093 /* A constant address in OP0 can have VOIDmode, we must
7094 not try to call force_reg in that case. */
7095 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7096 && bitsize != 0
7097 && (bitpos % bitsize) == 0
7098 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7099 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7100 {
7101 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7102 bitpos = 0;
7103 }
7104
7105 op0 = offset_address (op0, offset_rtx,
7106 highest_pow2_factor (offset));
7107 }
7108
7109 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7110 record its alignment as BIGGEST_ALIGNMENT. */
7111 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7112 && is_aligning_offset (offset, tem))
7113 set_mem_align (op0, BIGGEST_ALIGNMENT);
7114
7115 /* Don't forget about volatility even if this is a bitfield. */
7116 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7117 {
7118 if (op0 == orig_op0)
7119 op0 = copy_rtx (op0);
7120
7121 MEM_VOLATILE_P (op0) = 1;
7122 }
7123
7124 /* The following code doesn't handle CONCAT.
7125 Assume only bitpos == 0 can be used for CONCAT, due to
7126 one element arrays having the same mode as its element. */
7127 if (GET_CODE (op0) == CONCAT)
7128 {
7129 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7130 abort ();
7131 return op0;
7132 }
7133
7134 /* In cases where an aligned union has an unaligned object
7135 as a field, we might be extracting a BLKmode value from
7136 an integer-mode (e.g., SImode) object. Handle this case
7137 by doing the extract into an object as wide as the field
7138 (which we know to be the width of a basic mode), then
7139 storing into memory, and changing the mode to BLKmode. */
7140 if (mode1 == VOIDmode
7141 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7142 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7143 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7144 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7145 && modifier != EXPAND_CONST_ADDRESS
7146 && modifier != EXPAND_INITIALIZER)
7147 /* If the field isn't aligned enough to fetch as a memref,
7148 fetch it as a bit field. */
7149 || (mode1 != BLKmode
7150 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7151 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7152 || (GET_CODE (op0) == MEM
7153 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7154 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7155 && ((modifier == EXPAND_CONST_ADDRESS
7156 || modifier == EXPAND_INITIALIZER)
7157 ? STRICT_ALIGNMENT
7158 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7159 || (bitpos % BITS_PER_UNIT != 0)))
7160 /* If the type and the field are a constant size and the
7161 size of the type isn't the same size as the bitfield,
7162 we must use bitfield operations. */
7163 || (bitsize >= 0
7164 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7165 == INTEGER_CST)
7166 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7167 bitsize)))
7168 {
7169 enum machine_mode ext_mode = mode;
7170
7171 if (ext_mode == BLKmode
7172 && ! (target != 0 && GET_CODE (op0) == MEM
7173 && GET_CODE (target) == MEM
7174 && bitpos % BITS_PER_UNIT == 0))
7175 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7176
7177 if (ext_mode == BLKmode)
7178 {
7179 if (target == 0)
7180 target = assign_temp (type, 0, 1, 1);
7181
7182 if (bitsize == 0)
7183 return target;
7184
7185 /* In this case, BITPOS must start at a byte boundary and
7186 TARGET, if specified, must be a MEM. */
7187 if (GET_CODE (op0) != MEM
7188 || (target != 0 && GET_CODE (target) != MEM)
7189 || bitpos % BITS_PER_UNIT != 0)
7190 abort ();
7191
7192 emit_block_move (target,
7193 adjust_address (op0, VOIDmode,
7194 bitpos / BITS_PER_UNIT),
7195 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7196 / BITS_PER_UNIT),
7197 (modifier == EXPAND_STACK_PARM
7198 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7199
7200 return target;
7201 }
7202
7203 op0 = validize_mem (op0);
7204
7205 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7206 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7207
7208 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7209 (modifier == EXPAND_STACK_PARM
7210 ? NULL_RTX : target),
7211 ext_mode, ext_mode,
7212 int_size_in_bytes (TREE_TYPE (tem)));
7213
7214 /* If the result is a record type and BITSIZE is narrower than
7215 the mode of OP0, an integral mode, and this is a big endian
7216 machine, we must put the field into the high-order bits. */
7217 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7218 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7219 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7220 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7221 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7222 - bitsize),
7223 op0, 1);
7224
7225 if (mode == BLKmode)
7226 {
7227 rtx new = assign_temp (build_qualified_type
7228 ((*lang_hooks.types.type_for_mode)
7229 (ext_mode, 0),
7230 TYPE_QUAL_CONST), 0, 1, 1);
7231
7232 emit_move_insn (new, op0);
7233 op0 = copy_rtx (new);
7234 PUT_MODE (op0, BLKmode);
7235 set_mem_attributes (op0, exp, 1);
7236 }
7237
7238 return op0;
7239 }
7240
7241 /* If the result is BLKmode, use that to access the object
7242 now as well. */
7243 if (mode == BLKmode)
7244 mode1 = BLKmode;
7245
7246 /* Get a reference to just this component. */
7247 if (modifier == EXPAND_CONST_ADDRESS
7248 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7249 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7250 else
7251 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7252
7253 if (op0 == orig_op0)
7254 op0 = copy_rtx (op0);
7255
7256 set_mem_attributes (op0, exp, 0);
7257 if (GET_CODE (XEXP (op0, 0)) == REG)
7258 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7259
7260 MEM_VOLATILE_P (op0) |= volatilep;
7261 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7262 || modifier == EXPAND_CONST_ADDRESS
7263 || modifier == EXPAND_INITIALIZER)
7264 return op0;
7265 else if (target == 0)
7266 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7267
7268 convert_move (target, op0, unsignedp);
7269 return target;
7270 }
7271
7272 case VTABLE_REF:
7273 {
7274 rtx insn, before = get_last_insn (), vtbl_ref;
7275
7276 /* Evaluate the interior expression. */
7277 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7278 tmode, modifier);
7279
7280 /* Get or create an instruction off which to hang a note. */
7281 if (REG_P (subtarget))
7282 {
7283 target = subtarget;
7284 insn = get_last_insn ();
7285 if (insn == before)
7286 abort ();
7287 if (! INSN_P (insn))
7288 insn = prev_nonnote_insn (insn);
7289 }
7290 else
7291 {
7292 target = gen_reg_rtx (GET_MODE (subtarget));
7293 insn = emit_move_insn (target, subtarget);
7294 }
7295
7296 /* Collect the data for the note. */
7297 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7298 vtbl_ref = plus_constant (vtbl_ref,
7299 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7300 /* Discard the initial CONST that was added. */
7301 vtbl_ref = XEXP (vtbl_ref, 0);
7302
7303 REG_NOTES (insn)
7304 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7305
7306 return target;
7307 }
7308
7309 /* Intended for a reference to a buffer of a file-object in Pascal.
7310 But it's not certain that a special tree code will really be
7311 necessary for these. INDIRECT_REF might work for them. */
7312 case BUFFER_REF:
7313 abort ();
7314
7315 case IN_EXPR:
7316 {
7317 /* Pascal set IN expression.
7318
7319 Algorithm:
7320 rlo = set_low - (set_low%bits_per_word);
7321 the_word = set [ (index - rlo)/bits_per_word ];
7322 bit_index = index % bits_per_word;
7323 bitmask = 1 << bit_index;
7324 return !!(the_word & bitmask); */
7325
7326 tree set = TREE_OPERAND (exp, 0);
7327 tree index = TREE_OPERAND (exp, 1);
7328 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7329 tree set_type = TREE_TYPE (set);
7330 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7331 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7332 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7333 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7334 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7335 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7336 rtx setaddr = XEXP (setval, 0);
7337 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7338 rtx rlow;
7339 rtx diff, quo, rem, addr, bit, result;
7340
7341 /* If domain is empty, answer is no. Likewise if index is constant
7342 and out of bounds. */
7343 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7344 && TREE_CODE (set_low_bound) == INTEGER_CST
7345 && tree_int_cst_lt (set_high_bound, set_low_bound))
7346 || (TREE_CODE (index) == INTEGER_CST
7347 && TREE_CODE (set_low_bound) == INTEGER_CST
7348 && tree_int_cst_lt (index, set_low_bound))
7349 || (TREE_CODE (set_high_bound) == INTEGER_CST
7350 && TREE_CODE (index) == INTEGER_CST
7351 && tree_int_cst_lt (set_high_bound, index))))
7352 return const0_rtx;
7353
7354 if (target == 0)
7355 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7356
7357 /* If we get here, we have to generate the code for both cases
7358 (in range and out of range). */
7359
7360 op0 = gen_label_rtx ();
7361 op1 = gen_label_rtx ();
7362
7363 if (! (GET_CODE (index_val) == CONST_INT
7364 && GET_CODE (lo_r) == CONST_INT))
7365 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7366 GET_MODE (index_val), iunsignedp, op1);
7367
7368 if (! (GET_CODE (index_val) == CONST_INT
7369 && GET_CODE (hi_r) == CONST_INT))
7370 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7371 GET_MODE (index_val), iunsignedp, op1);
7372
7373 /* Calculate the element number of bit zero in the first word
7374 of the set. */
7375 if (GET_CODE (lo_r) == CONST_INT)
7376 rlow = GEN_INT (INTVAL (lo_r)
7377 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7378 else
7379 rlow = expand_binop (index_mode, and_optab, lo_r,
7380 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7381 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7382
7383 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7384 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7385
7386 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7387 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7388 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7389 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7390
7391 addr = memory_address (byte_mode,
7392 expand_binop (index_mode, add_optab, diff,
7393 setaddr, NULL_RTX, iunsignedp,
7394 OPTAB_LIB_WIDEN));
7395
7396 /* Extract the bit we want to examine. */
7397 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7398 gen_rtx_MEM (byte_mode, addr),
7399 make_tree (TREE_TYPE (index), rem),
7400 NULL_RTX, 1);
7401 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7402 GET_MODE (target) == byte_mode ? target : 0,
7403 1, OPTAB_LIB_WIDEN);
7404
7405 if (result != target)
7406 convert_move (target, result, 1);
7407
7408 /* Output the code to handle the out-of-range case. */
7409 emit_jump (op0);
7410 emit_label (op1);
7411 emit_move_insn (target, const0_rtx);
7412 emit_label (op0);
7413 return target;
7414 }
7415
7416 case WITH_CLEANUP_EXPR:
7417 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7418 {
7419 WITH_CLEANUP_EXPR_RTL (exp)
7420 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7421 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7422 CLEANUP_EH_ONLY (exp));
7423
7424 /* That's it for this cleanup. */
7425 TREE_OPERAND (exp, 1) = 0;
7426 }
7427 return WITH_CLEANUP_EXPR_RTL (exp);
7428
7429 case CLEANUP_POINT_EXPR:
7430 {
7431 /* Start a new binding layer that will keep track of all cleanup
7432 actions to be performed. */
7433 expand_start_bindings (2);
7434
7435 target_temp_slot_level = temp_slot_level;
7436
7437 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7438 /* If we're going to use this value, load it up now. */
7439 if (! ignore)
7440 op0 = force_not_mem (op0);
7441 preserve_temp_slots (op0);
7442 expand_end_bindings (NULL_TREE, 0, 0);
7443 }
7444 return op0;
7445
7446 case CALL_EXPR:
7447 /* Check for a built-in function. */
7448 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7449 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7450 == FUNCTION_DECL)
7451 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7452 {
7453 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7454 == BUILT_IN_FRONTEND)
7455 return (*lang_hooks.expand_expr) (exp, original_target,
7456 tmode, modifier,
7457 alt_rtl);
7458 else
7459 return expand_builtin (exp, target, subtarget, tmode, ignore);
7460 }
7461
7462 return expand_call (exp, target, ignore);
7463
7464 case NON_LVALUE_EXPR:
7465 case NOP_EXPR:
7466 case CONVERT_EXPR:
7467 case REFERENCE_EXPR:
7468 if (TREE_OPERAND (exp, 0) == error_mark_node)
7469 return const0_rtx;
7470
7471 if (TREE_CODE (type) == UNION_TYPE)
7472 {
7473 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7474
7475 /* If both input and output are BLKmode, this conversion isn't doing
7476 anything except possibly changing memory attribute. */
7477 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7478 {
7479 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7480 modifier);
7481
7482 result = copy_rtx (result);
7483 set_mem_attributes (result, exp, 0);
7484 return result;
7485 }
7486
7487 if (target == 0)
7488 {
7489 if (TYPE_MODE (type) != BLKmode)
7490 target = gen_reg_rtx (TYPE_MODE (type));
7491 else
7492 target = assign_temp (type, 0, 1, 1);
7493 }
7494
7495 if (GET_CODE (target) == MEM)
7496 /* Store data into beginning of memory target. */
7497 store_expr (TREE_OPERAND (exp, 0),
7498 adjust_address (target, TYPE_MODE (valtype), 0),
7499 modifier == EXPAND_STACK_PARM ? 2 : 0);
7500
7501 else if (GET_CODE (target) == REG)
7502 /* Store this field into a union of the proper type. */
7503 store_field (target,
7504 MIN ((int_size_in_bytes (TREE_TYPE
7505 (TREE_OPERAND (exp, 0)))
7506 * BITS_PER_UNIT),
7507 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7508 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7509 VOIDmode, 0, type, 0);
7510 else
7511 abort ();
7512
7513 /* Return the entire union. */
7514 return target;
7515 }
7516
7517 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7518 {
7519 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7520 modifier);
7521
7522 /* If the signedness of the conversion differs and OP0 is
7523 a promoted SUBREG, clear that indication since we now
7524 have to do the proper extension. */
7525 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7526 && GET_CODE (op0) == SUBREG)
7527 SUBREG_PROMOTED_VAR_P (op0) = 0;
7528
7529 return op0;
7530 }
7531
7532 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7533 if (GET_MODE (op0) == mode)
7534 return op0;
7535
7536 /* If OP0 is a constant, just convert it into the proper mode. */
7537 if (CONSTANT_P (op0))
7538 {
7539 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7540 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7541
7542 if (modifier == EXPAND_INITIALIZER)
7543 return simplify_gen_subreg (mode, op0, inner_mode,
7544 subreg_lowpart_offset (mode,
7545 inner_mode));
7546 else
7547 return convert_modes (mode, inner_mode, op0,
7548 TREE_UNSIGNED (inner_type));
7549 }
7550
7551 if (modifier == EXPAND_INITIALIZER)
7552 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7553
7554 if (target == 0)
7555 return
7556 convert_to_mode (mode, op0,
7557 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7558 else
7559 convert_move (target, op0,
7560 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7561 return target;
7562
7563 case VIEW_CONVERT_EXPR:
7564 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7565
7566 /* If the input and output modes are both the same, we are done.
7567 Otherwise, if neither mode is BLKmode and both are integral and within
7568 a word, we can use gen_lowpart. If neither is true, make sure the
7569 operand is in memory and convert the MEM to the new mode. */
7570 if (TYPE_MODE (type) == GET_MODE (op0))
7571 ;
7572 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7573 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7574 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7575 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7576 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7577 op0 = gen_lowpart (TYPE_MODE (type), op0);
7578 else if (GET_CODE (op0) != MEM)
7579 {
7580 /* If the operand is not a MEM, force it into memory. Since we
7581 are going to be be changing the mode of the MEM, don't call
7582 force_const_mem for constants because we don't allow pool
7583 constants to change mode. */
7584 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7585
7586 if (TREE_ADDRESSABLE (exp))
7587 abort ();
7588
7589 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7590 target
7591 = assign_stack_temp_for_type
7592 (TYPE_MODE (inner_type),
7593 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7594
7595 emit_move_insn (target, op0);
7596 op0 = target;
7597 }
7598
7599 /* At this point, OP0 is in the correct mode. If the output type is such
7600 that the operand is known to be aligned, indicate that it is.
7601 Otherwise, we need only be concerned about alignment for non-BLKmode
7602 results. */
7603 if (GET_CODE (op0) == MEM)
7604 {
7605 op0 = copy_rtx (op0);
7606
7607 if (TYPE_ALIGN_OK (type))
7608 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7609 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7610 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7611 {
7612 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7613 HOST_WIDE_INT temp_size
7614 = MAX (int_size_in_bytes (inner_type),
7615 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7616 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7617 temp_size, 0, type);
7618 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7619
7620 if (TREE_ADDRESSABLE (exp))
7621 abort ();
7622
7623 if (GET_MODE (op0) == BLKmode)
7624 emit_block_move (new_with_op0_mode, op0,
7625 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7626 (modifier == EXPAND_STACK_PARM
7627 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7628 else
7629 emit_move_insn (new_with_op0_mode, op0);
7630
7631 op0 = new;
7632 }
7633
7634 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7635 }
7636
7637 return op0;
7638
7639 case PLUS_EXPR:
7640 this_optab = ! unsignedp && flag_trapv
7641 && (GET_MODE_CLASS (mode) == MODE_INT)
7642 ? addv_optab : add_optab;
7643
7644 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7645 something else, make sure we add the register to the constant and
7646 then to the other thing. This case can occur during strength
7647 reduction and doing it this way will produce better code if the
7648 frame pointer or argument pointer is eliminated.
7649
7650 fold-const.c will ensure that the constant is always in the inner
7651 PLUS_EXPR, so the only case we need to do anything about is if
7652 sp, ap, or fp is our second argument, in which case we must swap
7653 the innermost first argument and our second argument. */
7654
7655 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7656 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7657 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7658 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7659 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7660 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7661 {
7662 tree t = TREE_OPERAND (exp, 1);
7663
7664 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7665 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7666 }
7667
7668 /* If the result is to be ptr_mode and we are adding an integer to
7669 something, we might be forming a constant. So try to use
7670 plus_constant. If it produces a sum and we can't accept it,
7671 use force_operand. This allows P = &ARR[const] to generate
7672 efficient code on machines where a SYMBOL_REF is not a valid
7673 address.
7674
7675 If this is an EXPAND_SUM call, always return the sum. */
7676 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7677 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7678 {
7679 if (modifier == EXPAND_STACK_PARM)
7680 target = 0;
7681 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7682 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7683 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7684 {
7685 rtx constant_part;
7686
7687 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7688 EXPAND_SUM);
7689 /* Use immed_double_const to ensure that the constant is
7690 truncated according to the mode of OP1, then sign extended
7691 to a HOST_WIDE_INT. Using the constant directly can result
7692 in non-canonical RTL in a 64x32 cross compile. */
7693 constant_part
7694 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7695 (HOST_WIDE_INT) 0,
7696 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7697 op1 = plus_constant (op1, INTVAL (constant_part));
7698 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7699 op1 = force_operand (op1, target);
7700 return op1;
7701 }
7702
7703 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7704 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7705 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7706 {
7707 rtx constant_part;
7708
7709 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7710 (modifier == EXPAND_INITIALIZER
7711 ? EXPAND_INITIALIZER : EXPAND_SUM));
7712 if (! CONSTANT_P (op0))
7713 {
7714 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7715 VOIDmode, modifier);
7716 /* Return a PLUS if modifier says it's OK. */
7717 if (modifier == EXPAND_SUM
7718 || modifier == EXPAND_INITIALIZER)
7719 return simplify_gen_binary (PLUS, mode, op0, op1);
7720 goto binop2;
7721 }
7722 /* Use immed_double_const to ensure that the constant is
7723 truncated according to the mode of OP1, then sign extended
7724 to a HOST_WIDE_INT. Using the constant directly can result
7725 in non-canonical RTL in a 64x32 cross compile. */
7726 constant_part
7727 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7728 (HOST_WIDE_INT) 0,
7729 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7730 op0 = plus_constant (op0, INTVAL (constant_part));
7731 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7732 op0 = force_operand (op0, target);
7733 return op0;
7734 }
7735 }
7736
7737 /* No sense saving up arithmetic to be done
7738 if it's all in the wrong mode to form part of an address.
7739 And force_operand won't know whether to sign-extend or
7740 zero-extend. */
7741 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7742 || mode != ptr_mode)
7743 {
7744 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7745 subtarget, &op0, &op1, 0);
7746 if (op0 == const0_rtx)
7747 return op1;
7748 if (op1 == const0_rtx)
7749 return op0;
7750 goto binop2;
7751 }
7752
7753 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7754 subtarget, &op0, &op1, modifier);
7755 return simplify_gen_binary (PLUS, mode, op0, op1);
7756
7757 case MINUS_EXPR:
7758 /* For initializers, we are allowed to return a MINUS of two
7759 symbolic constants. Here we handle all cases when both operands
7760 are constant. */
7761 /* Handle difference of two symbolic constants,
7762 for the sake of an initializer. */
7763 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7764 && really_constant_p (TREE_OPERAND (exp, 0))
7765 && really_constant_p (TREE_OPERAND (exp, 1)))
7766 {
7767 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7768 NULL_RTX, &op0, &op1, modifier);
7769
7770 /* If the last operand is a CONST_INT, use plus_constant of
7771 the negated constant. Else make the MINUS. */
7772 if (GET_CODE (op1) == CONST_INT)
7773 return plus_constant (op0, - INTVAL (op1));
7774 else
7775 return gen_rtx_MINUS (mode, op0, op1);
7776 }
7777
7778 this_optab = ! unsignedp && flag_trapv
7779 && (GET_MODE_CLASS(mode) == MODE_INT)
7780 ? subv_optab : sub_optab;
7781
7782 /* No sense saving up arithmetic to be done
7783 if it's all in the wrong mode to form part of an address.
7784 And force_operand won't know whether to sign-extend or
7785 zero-extend. */
7786 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7787 || mode != ptr_mode)
7788 goto binop;
7789
7790 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7791 subtarget, &op0, &op1, modifier);
7792
7793 /* Convert A - const to A + (-const). */
7794 if (GET_CODE (op1) == CONST_INT)
7795 {
7796 op1 = negate_rtx (mode, op1);
7797 return simplify_gen_binary (PLUS, mode, op0, op1);
7798 }
7799
7800 goto binop2;
7801
7802 case MULT_EXPR:
7803 /* If first operand is constant, swap them.
7804 Thus the following special case checks need only
7805 check the second operand. */
7806 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7807 {
7808 tree t1 = TREE_OPERAND (exp, 0);
7809 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7810 TREE_OPERAND (exp, 1) = t1;
7811 }
7812
7813 /* Attempt to return something suitable for generating an
7814 indexed address, for machines that support that. */
7815
7816 if (modifier == EXPAND_SUM && mode == ptr_mode
7817 && host_integerp (TREE_OPERAND (exp, 1), 0))
7818 {
7819 tree exp1 = TREE_OPERAND (exp, 1);
7820
7821 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7822 EXPAND_SUM);
7823
7824 if (GET_CODE (op0) != REG)
7825 op0 = force_operand (op0, NULL_RTX);
7826 if (GET_CODE (op0) != REG)
7827 op0 = copy_to_mode_reg (mode, op0);
7828
7829 return gen_rtx_MULT (mode, op0,
7830 gen_int_mode (tree_low_cst (exp1, 0),
7831 TYPE_MODE (TREE_TYPE (exp1))));
7832 }
7833
7834 if (modifier == EXPAND_STACK_PARM)
7835 target = 0;
7836
7837 /* Check for multiplying things that have been extended
7838 from a narrower type. If this machine supports multiplying
7839 in that narrower type with a result in the desired type,
7840 do it that way, and avoid the explicit type-conversion. */
7841 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7842 && TREE_CODE (type) == INTEGER_TYPE
7843 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7844 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7845 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7846 && int_fits_type_p (TREE_OPERAND (exp, 1),
7847 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7848 /* Don't use a widening multiply if a shift will do. */
7849 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7850 > HOST_BITS_PER_WIDE_INT)
7851 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7852 ||
7853 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7854 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7855 ==
7856 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7857 /* If both operands are extended, they must either both
7858 be zero-extended or both be sign-extended. */
7859 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7860 ==
7861 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7862 {
7863 enum machine_mode innermode
7864 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7865 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7866 ? smul_widen_optab : umul_widen_optab);
7867 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7868 ? umul_widen_optab : smul_widen_optab);
7869 if (mode == GET_MODE_WIDER_MODE (innermode))
7870 {
7871 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7872 {
7873 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7874 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7875 TREE_OPERAND (exp, 1),
7876 NULL_RTX, &op0, &op1, 0);
7877 else
7878 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7879 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7880 NULL_RTX, &op0, &op1, 0);
7881 goto binop2;
7882 }
7883 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7884 && innermode == word_mode)
7885 {
7886 rtx htem;
7887 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7888 NULL_RTX, VOIDmode, 0);
7889 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7890 op1 = convert_modes (innermode, mode,
7891 expand_expr (TREE_OPERAND (exp, 1),
7892 NULL_RTX, VOIDmode, 0),
7893 unsignedp);
7894 else
7895 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7896 NULL_RTX, VOIDmode, 0);
7897 temp = expand_binop (mode, other_optab, op0, op1, target,
7898 unsignedp, OPTAB_LIB_WIDEN);
7899 htem = expand_mult_highpart_adjust (innermode,
7900 gen_highpart (innermode, temp),
7901 op0, op1,
7902 gen_highpart (innermode, temp),
7903 unsignedp);
7904 emit_move_insn (gen_highpart (innermode, temp), htem);
7905 return temp;
7906 }
7907 }
7908 }
7909 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7910 subtarget, &op0, &op1, 0);
7911 return expand_mult (mode, op0, op1, target, unsignedp);
7912
7913 case TRUNC_DIV_EXPR:
7914 case FLOOR_DIV_EXPR:
7915 case CEIL_DIV_EXPR:
7916 case ROUND_DIV_EXPR:
7917 case EXACT_DIV_EXPR:
7918 if (modifier == EXPAND_STACK_PARM)
7919 target = 0;
7920 /* Possible optimization: compute the dividend with EXPAND_SUM
7921 then if the divisor is constant can optimize the case
7922 where some terms of the dividend have coeffs divisible by it. */
7923 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7924 subtarget, &op0, &op1, 0);
7925 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7926
7927 case RDIV_EXPR:
7928 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7929 expensive divide. If not, combine will rebuild the original
7930 computation. */
7931 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7932 && TREE_CODE (type) == REAL_TYPE
7933 && !real_onep (TREE_OPERAND (exp, 0)))
7934 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7935 build (RDIV_EXPR, type,
7936 build_real (type, dconst1),
7937 TREE_OPERAND (exp, 1))),
7938 target, tmode, modifier);
7939 this_optab = sdiv_optab;
7940 goto binop;
7941
7942 case TRUNC_MOD_EXPR:
7943 case FLOOR_MOD_EXPR:
7944 case CEIL_MOD_EXPR:
7945 case ROUND_MOD_EXPR:
7946 if (modifier == EXPAND_STACK_PARM)
7947 target = 0;
7948 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7949 subtarget, &op0, &op1, 0);
7950 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7951
7952 case FIX_ROUND_EXPR:
7953 case FIX_FLOOR_EXPR:
7954 case FIX_CEIL_EXPR:
7955 abort (); /* Not used for C. */
7956
7957 case FIX_TRUNC_EXPR:
7958 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7959 if (target == 0 || modifier == EXPAND_STACK_PARM)
7960 target = gen_reg_rtx (mode);
7961 expand_fix (target, op0, unsignedp);
7962 return target;
7963
7964 case FLOAT_EXPR:
7965 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7966 if (target == 0 || modifier == EXPAND_STACK_PARM)
7967 target = gen_reg_rtx (mode);
7968 /* expand_float can't figure out what to do if FROM has VOIDmode.
7969 So give it the correct mode. With -O, cse will optimize this. */
7970 if (GET_MODE (op0) == VOIDmode)
7971 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7972 op0);
7973 expand_float (target, op0,
7974 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7975 return target;
7976
7977 case NEGATE_EXPR:
7978 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7979 if (modifier == EXPAND_STACK_PARM)
7980 target = 0;
7981 temp = expand_unop (mode,
7982 ! unsignedp && flag_trapv
7983 && (GET_MODE_CLASS(mode) == MODE_INT)
7984 ? negv_optab : neg_optab, op0, target, 0);
7985 if (temp == 0)
7986 abort ();
7987 return temp;
7988
7989 case ABS_EXPR:
7990 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7991 if (modifier == EXPAND_STACK_PARM)
7992 target = 0;
7993
7994 /* ABS_EXPR is not valid for complex arguments. */
7995 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7996 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7997 abort ();
7998
7999 /* Unsigned abs is simply the operand. Testing here means we don't
8000 risk generating incorrect code below. */
8001 if (TREE_UNSIGNED (type))
8002 return op0;
8003
8004 return expand_abs (mode, op0, target, unsignedp,
8005 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8006
8007 case MAX_EXPR:
8008 case MIN_EXPR:
8009 target = original_target;
8010 if (target == 0
8011 || modifier == EXPAND_STACK_PARM
8012 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8013 || GET_MODE (target) != mode
8014 || (GET_CODE (target) == REG
8015 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8016 target = gen_reg_rtx (mode);
8017 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8018 target, &op0, &op1, 0);
8019
8020 /* First try to do it with a special MIN or MAX instruction.
8021 If that does not win, use a conditional jump to select the proper
8022 value. */
8023 this_optab = (TREE_UNSIGNED (type)
8024 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8025 : (code == MIN_EXPR ? smin_optab : smax_optab));
8026
8027 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8028 OPTAB_WIDEN);
8029 if (temp != 0)
8030 return temp;
8031
8032 /* At this point, a MEM target is no longer useful; we will get better
8033 code without it. */
8034
8035 if (GET_CODE (target) == MEM)
8036 target = gen_reg_rtx (mode);
8037
8038 /* If op1 was placed in target, swap op0 and op1. */
8039 if (target != op0 && target == op1)
8040 {
8041 rtx tem = op0;
8042 op0 = op1;
8043 op1 = tem;
8044 }
8045
8046 if (target != op0)
8047 emit_move_insn (target, op0);
8048
8049 op0 = gen_label_rtx ();
8050
8051 /* If this mode is an integer too wide to compare properly,
8052 compare word by word. Rely on cse to optimize constant cases. */
8053 if (GET_MODE_CLASS (mode) == MODE_INT
8054 && ! can_compare_p (GE, mode, ccp_jump))
8055 {
8056 if (code == MAX_EXPR)
8057 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8058 target, op1, NULL_RTX, op0);
8059 else
8060 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8061 op1, target, NULL_RTX, op0);
8062 }
8063 else
8064 {
8065 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8066 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8067 unsignedp, mode, NULL_RTX, NULL_RTX,
8068 op0);
8069 }
8070 emit_move_insn (target, op1);
8071 emit_label (op0);
8072 return target;
8073
8074 case BIT_NOT_EXPR:
8075 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8076 if (modifier == EXPAND_STACK_PARM)
8077 target = 0;
8078 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8079 if (temp == 0)
8080 abort ();
8081 return temp;
8082
8083 /* ??? Can optimize bitwise operations with one arg constant.
8084 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8085 and (a bitwise1 b) bitwise2 b (etc)
8086 but that is probably not worth while. */
8087
8088 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8089 boolean values when we want in all cases to compute both of them. In
8090 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8091 as actual zero-or-1 values and then bitwise anding. In cases where
8092 there cannot be any side effects, better code would be made by
8093 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8094 how to recognize those cases. */
8095
8096 case TRUTH_AND_EXPR:
8097 case BIT_AND_EXPR:
8098 this_optab = and_optab;
8099 goto binop;
8100
8101 case TRUTH_OR_EXPR:
8102 case BIT_IOR_EXPR:
8103 this_optab = ior_optab;
8104 goto binop;
8105
8106 case TRUTH_XOR_EXPR:
8107 case BIT_XOR_EXPR:
8108 this_optab = xor_optab;
8109 goto binop;
8110
8111 case LSHIFT_EXPR:
8112 case RSHIFT_EXPR:
8113 case LROTATE_EXPR:
8114 case RROTATE_EXPR:
8115 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8116 subtarget = 0;
8117 if (modifier == EXPAND_STACK_PARM)
8118 target = 0;
8119 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8120 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8121 unsignedp);
8122
8123 /* Could determine the answer when only additive constants differ. Also,
8124 the addition of one can be handled by changing the condition. */
8125 case LT_EXPR:
8126 case LE_EXPR:
8127 case GT_EXPR:
8128 case GE_EXPR:
8129 case EQ_EXPR:
8130 case NE_EXPR:
8131 case UNORDERED_EXPR:
8132 case ORDERED_EXPR:
8133 case UNLT_EXPR:
8134 case UNLE_EXPR:
8135 case UNGT_EXPR:
8136 case UNGE_EXPR:
8137 case UNEQ_EXPR:
8138 temp = do_store_flag (exp,
8139 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8140 tmode != VOIDmode ? tmode : mode, 0);
8141 if (temp != 0)
8142 return temp;
8143
8144 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8145 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8146 && original_target
8147 && GET_CODE (original_target) == REG
8148 && (GET_MODE (original_target)
8149 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8150 {
8151 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8152 VOIDmode, 0);
8153
8154 /* If temp is constant, we can just compute the result. */
8155 if (GET_CODE (temp) == CONST_INT)
8156 {
8157 if (INTVAL (temp) != 0)
8158 emit_move_insn (target, const1_rtx);
8159 else
8160 emit_move_insn (target, const0_rtx);
8161
8162 return target;
8163 }
8164
8165 if (temp != original_target)
8166 {
8167 enum machine_mode mode1 = GET_MODE (temp);
8168 if (mode1 == VOIDmode)
8169 mode1 = tmode != VOIDmode ? tmode : mode;
8170
8171 temp = copy_to_mode_reg (mode1, temp);
8172 }
8173
8174 op1 = gen_label_rtx ();
8175 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8176 GET_MODE (temp), unsignedp, op1);
8177 emit_move_insn (temp, const1_rtx);
8178 emit_label (op1);
8179 return temp;
8180 }
8181
8182 /* If no set-flag instruction, must generate a conditional
8183 store into a temporary variable. Drop through
8184 and handle this like && and ||. */
8185
8186 case TRUTH_ANDIF_EXPR:
8187 case TRUTH_ORIF_EXPR:
8188 if (! ignore
8189 && (target == 0
8190 || modifier == EXPAND_STACK_PARM
8191 || ! safe_from_p (target, exp, 1)
8192 /* Make sure we don't have a hard reg (such as function's return
8193 value) live across basic blocks, if not optimizing. */
8194 || (!optimize && GET_CODE (target) == REG
8195 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8196 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8197
8198 if (target)
8199 emit_clr_insn (target);
8200
8201 op1 = gen_label_rtx ();
8202 jumpifnot (exp, op1);
8203
8204 if (target)
8205 emit_0_to_1_insn (target);
8206
8207 emit_label (op1);
8208 return ignore ? const0_rtx : target;
8209
8210 case TRUTH_NOT_EXPR:
8211 if (modifier == EXPAND_STACK_PARM)
8212 target = 0;
8213 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8214 /* The parser is careful to generate TRUTH_NOT_EXPR
8215 only with operands that are always zero or one. */
8216 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8217 target, 1, OPTAB_LIB_WIDEN);
8218 if (temp == 0)
8219 abort ();
8220 return temp;
8221
8222 case COMPOUND_EXPR:
8223 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8224 emit_queue ();
8225 return expand_expr_real (TREE_OPERAND (exp, 1),
8226 (ignore ? const0_rtx : target),
8227 VOIDmode, modifier, alt_rtl);
8228
8229 case COND_EXPR:
8230 /* If we would have a "singleton" (see below) were it not for a
8231 conversion in each arm, bring that conversion back out. */
8232 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8233 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8234 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8235 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8236 {
8237 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8238 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8239
8240 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8241 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8242 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8243 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8244 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8245 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8246 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8247 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8248 return expand_expr (build1 (NOP_EXPR, type,
8249 build (COND_EXPR, TREE_TYPE (iftrue),
8250 TREE_OPERAND (exp, 0),
8251 iftrue, iffalse)),
8252 target, tmode, modifier);
8253 }
8254
8255 {
8256 /* Note that COND_EXPRs whose type is a structure or union
8257 are required to be constructed to contain assignments of
8258 a temporary variable, so that we can evaluate them here
8259 for side effect only. If type is void, we must do likewise. */
8260
8261 /* If an arm of the branch requires a cleanup,
8262 only that cleanup is performed. */
8263
8264 tree singleton = 0;
8265 tree binary_op = 0, unary_op = 0;
8266
8267 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8268 convert it to our mode, if necessary. */
8269 if (integer_onep (TREE_OPERAND (exp, 1))
8270 && integer_zerop (TREE_OPERAND (exp, 2))
8271 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8272 {
8273 if (ignore)
8274 {
8275 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8276 modifier);
8277 return const0_rtx;
8278 }
8279
8280 if (modifier == EXPAND_STACK_PARM)
8281 target = 0;
8282 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8283 if (GET_MODE (op0) == mode)
8284 return op0;
8285
8286 if (target == 0)
8287 target = gen_reg_rtx (mode);
8288 convert_move (target, op0, unsignedp);
8289 return target;
8290 }
8291
8292 /* Check for X ? A + B : A. If we have this, we can copy A to the
8293 output and conditionally add B. Similarly for unary operations.
8294 Don't do this if X has side-effects because those side effects
8295 might affect A or B and the "?" operation is a sequence point in
8296 ANSI. (operand_equal_p tests for side effects.) */
8297
8298 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8299 && operand_equal_p (TREE_OPERAND (exp, 2),
8300 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8301 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8302 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8303 && operand_equal_p (TREE_OPERAND (exp, 1),
8304 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8305 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8306 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8307 && operand_equal_p (TREE_OPERAND (exp, 2),
8308 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8309 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8310 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8311 && operand_equal_p (TREE_OPERAND (exp, 1),
8312 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8313 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8314
8315 /* If we are not to produce a result, we have no target. Otherwise,
8316 if a target was specified use it; it will not be used as an
8317 intermediate target unless it is safe. If no target, use a
8318 temporary. */
8319
8320 if (ignore)
8321 temp = 0;
8322 else if (modifier == EXPAND_STACK_PARM)
8323 temp = assign_temp (type, 0, 0, 1);
8324 else if (original_target
8325 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8326 || (singleton && GET_CODE (original_target) == REG
8327 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8328 && original_target == var_rtx (singleton)))
8329 && GET_MODE (original_target) == mode
8330 #ifdef HAVE_conditional_move
8331 && (! can_conditionally_move_p (mode)
8332 || GET_CODE (original_target) == REG
8333 || TREE_ADDRESSABLE (type))
8334 #endif
8335 && (GET_CODE (original_target) != MEM
8336 || TREE_ADDRESSABLE (type)))
8337 temp = original_target;
8338 else if (TREE_ADDRESSABLE (type))
8339 abort ();
8340 else
8341 temp = assign_temp (type, 0, 0, 1);
8342
8343 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8344 do the test of X as a store-flag operation, do this as
8345 A + ((X != 0) << log C). Similarly for other simple binary
8346 operators. Only do for C == 1 if BRANCH_COST is low. */
8347 if (temp && singleton && binary_op
8348 && (TREE_CODE (binary_op) == PLUS_EXPR
8349 || TREE_CODE (binary_op) == MINUS_EXPR
8350 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8351 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8352 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8353 : integer_onep (TREE_OPERAND (binary_op, 1)))
8354 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8355 {
8356 rtx result;
8357 tree cond;
8358 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8359 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8360 ? addv_optab : add_optab)
8361 : TREE_CODE (binary_op) == MINUS_EXPR
8362 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8363 ? subv_optab : sub_optab)
8364 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8365 : xor_optab);
8366
8367 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8368 if (singleton == TREE_OPERAND (exp, 1))
8369 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8370 else
8371 cond = TREE_OPERAND (exp, 0);
8372
8373 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8374 ? temp : NULL_RTX),
8375 mode, BRANCH_COST <= 1);
8376
8377 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8378 result = expand_shift (LSHIFT_EXPR, mode, result,
8379 build_int_2 (tree_log2
8380 (TREE_OPERAND
8381 (binary_op, 1)),
8382 0),
8383 (safe_from_p (temp, singleton, 1)
8384 ? temp : NULL_RTX), 0);
8385
8386 if (result)
8387 {
8388 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8389 return expand_binop (mode, boptab, op1, result, temp,
8390 unsignedp, OPTAB_LIB_WIDEN);
8391 }
8392 }
8393
8394 do_pending_stack_adjust ();
8395 NO_DEFER_POP;
8396 op0 = gen_label_rtx ();
8397
8398 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8399 {
8400 if (temp != 0)
8401 {
8402 /* If the target conflicts with the other operand of the
8403 binary op, we can't use it. Also, we can't use the target
8404 if it is a hard register, because evaluating the condition
8405 might clobber it. */
8406 if ((binary_op
8407 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8408 || (GET_CODE (temp) == REG
8409 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8410 temp = gen_reg_rtx (mode);
8411 store_expr (singleton, temp,
8412 modifier == EXPAND_STACK_PARM ? 2 : 0);
8413 }
8414 else
8415 expand_expr (singleton,
8416 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8417 if (singleton == TREE_OPERAND (exp, 1))
8418 jumpif (TREE_OPERAND (exp, 0), op0);
8419 else
8420 jumpifnot (TREE_OPERAND (exp, 0), op0);
8421
8422 start_cleanup_deferral ();
8423 if (binary_op && temp == 0)
8424 /* Just touch the other operand. */
8425 expand_expr (TREE_OPERAND (binary_op, 1),
8426 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8427 else if (binary_op)
8428 store_expr (build (TREE_CODE (binary_op), type,
8429 make_tree (type, temp),
8430 TREE_OPERAND (binary_op, 1)),
8431 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8432 else
8433 store_expr (build1 (TREE_CODE (unary_op), type,
8434 make_tree (type, temp)),
8435 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8436 op1 = op0;
8437 }
8438 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8439 comparison operator. If we have one of these cases, set the
8440 output to A, branch on A (cse will merge these two references),
8441 then set the output to FOO. */
8442 else if (temp
8443 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8444 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8445 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8446 TREE_OPERAND (exp, 1), 0)
8447 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8448 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8449 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8450 {
8451 if (GET_CODE (temp) == REG
8452 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8453 temp = gen_reg_rtx (mode);
8454 store_expr (TREE_OPERAND (exp, 1), temp,
8455 modifier == EXPAND_STACK_PARM ? 2 : 0);
8456 jumpif (TREE_OPERAND (exp, 0), op0);
8457
8458 start_cleanup_deferral ();
8459 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8460 store_expr (TREE_OPERAND (exp, 2), temp,
8461 modifier == EXPAND_STACK_PARM ? 2 : 0);
8462 else
8463 expand_expr (TREE_OPERAND (exp, 2),
8464 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8465 op1 = op0;
8466 }
8467 else if (temp
8468 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8469 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8470 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8471 TREE_OPERAND (exp, 2), 0)
8472 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8473 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8474 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8475 {
8476 if (GET_CODE (temp) == REG
8477 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8478 temp = gen_reg_rtx (mode);
8479 store_expr (TREE_OPERAND (exp, 2), temp,
8480 modifier == EXPAND_STACK_PARM ? 2 : 0);
8481 jumpifnot (TREE_OPERAND (exp, 0), op0);
8482
8483 start_cleanup_deferral ();
8484 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8485 store_expr (TREE_OPERAND (exp, 1), temp,
8486 modifier == EXPAND_STACK_PARM ? 2 : 0);
8487 else
8488 expand_expr (TREE_OPERAND (exp, 1),
8489 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8490 op1 = op0;
8491 }
8492 else
8493 {
8494 op1 = gen_label_rtx ();
8495 jumpifnot (TREE_OPERAND (exp, 0), op0);
8496
8497 start_cleanup_deferral ();
8498
8499 /* One branch of the cond can be void, if it never returns. For
8500 example A ? throw : E */
8501 if (temp != 0
8502 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8503 store_expr (TREE_OPERAND (exp, 1), temp,
8504 modifier == EXPAND_STACK_PARM ? 2 : 0);
8505 else
8506 expand_expr (TREE_OPERAND (exp, 1),
8507 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8508 end_cleanup_deferral ();
8509 emit_queue ();
8510 emit_jump_insn (gen_jump (op1));
8511 emit_barrier ();
8512 emit_label (op0);
8513 start_cleanup_deferral ();
8514 if (temp != 0
8515 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8516 store_expr (TREE_OPERAND (exp, 2), temp,
8517 modifier == EXPAND_STACK_PARM ? 2 : 0);
8518 else
8519 expand_expr (TREE_OPERAND (exp, 2),
8520 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8521 }
8522
8523 end_cleanup_deferral ();
8524
8525 emit_queue ();
8526 emit_label (op1);
8527 OK_DEFER_POP;
8528
8529 return temp;
8530 }
8531
8532 case TARGET_EXPR:
8533 {
8534 /* Something needs to be initialized, but we didn't know
8535 where that thing was when building the tree. For example,
8536 it could be the return value of a function, or a parameter
8537 to a function which lays down in the stack, or a temporary
8538 variable which must be passed by reference.
8539
8540 We guarantee that the expression will either be constructed
8541 or copied into our original target. */
8542
8543 tree slot = TREE_OPERAND (exp, 0);
8544 tree cleanups = NULL_TREE;
8545 tree exp1;
8546
8547 if (TREE_CODE (slot) != VAR_DECL)
8548 abort ();
8549
8550 if (! ignore)
8551 target = original_target;
8552
8553 /* Set this here so that if we get a target that refers to a
8554 register variable that's already been used, put_reg_into_stack
8555 knows that it should fix up those uses. */
8556 TREE_USED (slot) = 1;
8557
8558 if (target == 0)
8559 {
8560 if (DECL_RTL_SET_P (slot))
8561 {
8562 target = DECL_RTL (slot);
8563 /* If we have already expanded the slot, so don't do
8564 it again. (mrs) */
8565 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8566 return target;
8567 }
8568 else
8569 {
8570 target = assign_temp (type, 2, 0, 1);
8571 /* All temp slots at this level must not conflict. */
8572 preserve_temp_slots (target);
8573 SET_DECL_RTL (slot, target);
8574 if (TREE_ADDRESSABLE (slot))
8575 put_var_into_stack (slot, /*rescan=*/false);
8576
8577 /* Since SLOT is not known to the called function
8578 to belong to its stack frame, we must build an explicit
8579 cleanup. This case occurs when we must build up a reference
8580 to pass the reference as an argument. In this case,
8581 it is very likely that such a reference need not be
8582 built here. */
8583
8584 if (TREE_OPERAND (exp, 2) == 0)
8585 TREE_OPERAND (exp, 2)
8586 = (*lang_hooks.maybe_build_cleanup) (slot);
8587 cleanups = TREE_OPERAND (exp, 2);
8588 }
8589 }
8590 else
8591 {
8592 /* This case does occur, when expanding a parameter which
8593 needs to be constructed on the stack. The target
8594 is the actual stack address that we want to initialize.
8595 The function we call will perform the cleanup in this case. */
8596
8597 /* If we have already assigned it space, use that space,
8598 not target that we were passed in, as our target
8599 parameter is only a hint. */
8600 if (DECL_RTL_SET_P (slot))
8601 {
8602 target = DECL_RTL (slot);
8603 /* If we have already expanded the slot, so don't do
8604 it again. (mrs) */
8605 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8606 return target;
8607 }
8608 else
8609 {
8610 SET_DECL_RTL (slot, target);
8611 /* If we must have an addressable slot, then make sure that
8612 the RTL that we just stored in slot is OK. */
8613 if (TREE_ADDRESSABLE (slot))
8614 put_var_into_stack (slot, /*rescan=*/true);
8615 }
8616 }
8617
8618 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8619 /* Mark it as expanded. */
8620 TREE_OPERAND (exp, 1) = NULL_TREE;
8621
8622 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
8623
8624 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8625
8626 return target;
8627 }
8628
8629 case INIT_EXPR:
8630 {
8631 tree lhs = TREE_OPERAND (exp, 0);
8632 tree rhs = TREE_OPERAND (exp, 1);
8633
8634 temp = expand_assignment (lhs, rhs, ! ignore);
8635 return temp;
8636 }
8637
8638 case MODIFY_EXPR:
8639 {
8640 /* If lhs is complex, expand calls in rhs before computing it.
8641 That's so we don't compute a pointer and save it over a
8642 call. If lhs is simple, compute it first so we can give it
8643 as a target if the rhs is just a call. This avoids an
8644 extra temp and copy and that prevents a partial-subsumption
8645 which makes bad code. Actually we could treat
8646 component_ref's of vars like vars. */
8647
8648 tree lhs = TREE_OPERAND (exp, 0);
8649 tree rhs = TREE_OPERAND (exp, 1);
8650
8651 temp = 0;
8652
8653 /* Check for |= or &= of a bitfield of size one into another bitfield
8654 of size 1. In this case, (unless we need the result of the
8655 assignment) we can do this more efficiently with a
8656 test followed by an assignment, if necessary.
8657
8658 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8659 things change so we do, this code should be enhanced to
8660 support it. */
8661 if (ignore
8662 && TREE_CODE (lhs) == COMPONENT_REF
8663 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8664 || TREE_CODE (rhs) == BIT_AND_EXPR)
8665 && TREE_OPERAND (rhs, 0) == lhs
8666 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8667 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8668 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8669 {
8670 rtx label = gen_label_rtx ();
8671
8672 do_jump (TREE_OPERAND (rhs, 1),
8673 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8674 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8675 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8676 (TREE_CODE (rhs) == BIT_IOR_EXPR
8677 ? integer_one_node
8678 : integer_zero_node)),
8679 0);
8680 do_pending_stack_adjust ();
8681 emit_label (label);
8682 return const0_rtx;
8683 }
8684
8685 temp = expand_assignment (lhs, rhs, ! ignore);
8686
8687 return temp;
8688 }
8689
8690 case RETURN_EXPR:
8691 if (!TREE_OPERAND (exp, 0))
8692 expand_null_return ();
8693 else
8694 expand_return (TREE_OPERAND (exp, 0));
8695 return const0_rtx;
8696
8697 case PREINCREMENT_EXPR:
8698 case PREDECREMENT_EXPR:
8699 return expand_increment (exp, 0, ignore);
8700
8701 case POSTINCREMENT_EXPR:
8702 case POSTDECREMENT_EXPR:
8703 /* Faster to treat as pre-increment if result is not used. */
8704 return expand_increment (exp, ! ignore, ignore);
8705
8706 case ADDR_EXPR:
8707 if (modifier == EXPAND_STACK_PARM)
8708 target = 0;
8709 /* Are we taking the address of a nested function? */
8710 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8711 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8712 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8713 && ! TREE_STATIC (exp))
8714 {
8715 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8716 op0 = force_operand (op0, target);
8717 }
8718 /* If we are taking the address of something erroneous, just
8719 return a zero. */
8720 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8721 return const0_rtx;
8722 /* If we are taking the address of a constant and are at the
8723 top level, we have to use output_constant_def since we can't
8724 call force_const_mem at top level. */
8725 else if (cfun == 0
8726 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8727 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8728 == 'c')))
8729 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8730 else
8731 {
8732 /* We make sure to pass const0_rtx down if we came in with
8733 ignore set, to avoid doing the cleanups twice for something. */
8734 op0 = expand_expr (TREE_OPERAND (exp, 0),
8735 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8736 (modifier == EXPAND_INITIALIZER
8737 ? modifier : EXPAND_CONST_ADDRESS));
8738
8739 /* If we are going to ignore the result, OP0 will have been set
8740 to const0_rtx, so just return it. Don't get confused and
8741 think we are taking the address of the constant. */
8742 if (ignore)
8743 return op0;
8744
8745 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8746 clever and returns a REG when given a MEM. */
8747 op0 = protect_from_queue (op0, 1);
8748
8749 /* We would like the object in memory. If it is a constant, we can
8750 have it be statically allocated into memory. For a non-constant,
8751 we need to allocate some memory and store the value into it. */
8752
8753 if (CONSTANT_P (op0))
8754 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8755 op0);
8756 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8757 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8758 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
8759 {
8760 /* If the operand is a SAVE_EXPR, we can deal with this by
8761 forcing the SAVE_EXPR into memory. */
8762 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8763 {
8764 put_var_into_stack (TREE_OPERAND (exp, 0),
8765 /*rescan=*/true);
8766 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8767 }
8768 else
8769 {
8770 /* If this object is in a register, it can't be BLKmode. */
8771 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8772 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8773
8774 if (GET_CODE (op0) == PARALLEL)
8775 /* Handle calls that pass values in multiple
8776 non-contiguous locations. The Irix 6 ABI has examples
8777 of this. */
8778 emit_group_store (memloc, op0, inner_type,
8779 int_size_in_bytes (inner_type));
8780 else
8781 emit_move_insn (memloc, op0);
8782
8783 op0 = memloc;
8784 }
8785 }
8786
8787 if (GET_CODE (op0) != MEM)
8788 abort ();
8789
8790 mark_temp_addr_taken (op0);
8791 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8792 {
8793 op0 = XEXP (op0, 0);
8794 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8795 op0 = convert_memory_address (ptr_mode, op0);
8796 return op0;
8797 }
8798
8799 /* If OP0 is not aligned as least as much as the type requires, we
8800 need to make a temporary, copy OP0 to it, and take the address of
8801 the temporary. We want to use the alignment of the type, not of
8802 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8803 the test for BLKmode means that can't happen. The test for
8804 BLKmode is because we never make mis-aligned MEMs with
8805 non-BLKmode.
8806
8807 We don't need to do this at all if the machine doesn't have
8808 strict alignment. */
8809 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8810 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8811 > MEM_ALIGN (op0))
8812 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8813 {
8814 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8815 rtx new;
8816
8817 if (TYPE_ALIGN_OK (inner_type))
8818 abort ();
8819
8820 if (TREE_ADDRESSABLE (inner_type))
8821 {
8822 /* We can't make a bitwise copy of this object, so fail. */
8823 error ("cannot take the address of an unaligned member");
8824 return const0_rtx;
8825 }
8826
8827 new = assign_stack_temp_for_type
8828 (TYPE_MODE (inner_type),
8829 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8830 : int_size_in_bytes (inner_type),
8831 1, build_qualified_type (inner_type,
8832 (TYPE_QUALS (inner_type)
8833 | TYPE_QUAL_CONST)));
8834
8835 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8836 (modifier == EXPAND_STACK_PARM
8837 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8838
8839 op0 = new;
8840 }
8841
8842 op0 = force_operand (XEXP (op0, 0), target);
8843 }
8844
8845 if (flag_force_addr
8846 && GET_CODE (op0) != REG
8847 && modifier != EXPAND_CONST_ADDRESS
8848 && modifier != EXPAND_INITIALIZER
8849 && modifier != EXPAND_SUM)
8850 op0 = force_reg (Pmode, op0);
8851
8852 if (GET_CODE (op0) == REG
8853 && ! REG_USERVAR_P (op0))
8854 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8855
8856 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8857 op0 = convert_memory_address (ptr_mode, op0);
8858
8859 return op0;
8860
8861 case ENTRY_VALUE_EXPR:
8862 abort ();
8863
8864 /* COMPLEX type for Extended Pascal & Fortran */
8865 case COMPLEX_EXPR:
8866 {
8867 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8868 rtx insns;
8869
8870 /* Get the rtx code of the operands. */
8871 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8872 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8873
8874 if (! target)
8875 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8876
8877 start_sequence ();
8878
8879 /* Move the real (op0) and imaginary (op1) parts to their location. */
8880 emit_move_insn (gen_realpart (mode, target), op0);
8881 emit_move_insn (gen_imagpart (mode, target), op1);
8882
8883 insns = get_insns ();
8884 end_sequence ();
8885
8886 /* Complex construction should appear as a single unit. */
8887 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8888 each with a separate pseudo as destination.
8889 It's not correct for flow to treat them as a unit. */
8890 if (GET_CODE (target) != CONCAT)
8891 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8892 else
8893 emit_insn (insns);
8894
8895 return target;
8896 }
8897
8898 case REALPART_EXPR:
8899 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8900 return gen_realpart (mode, op0);
8901
8902 case IMAGPART_EXPR:
8903 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8904 return gen_imagpart (mode, op0);
8905
8906 case CONJ_EXPR:
8907 {
8908 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8909 rtx imag_t;
8910 rtx insns;
8911
8912 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8913
8914 if (! target)
8915 target = gen_reg_rtx (mode);
8916
8917 start_sequence ();
8918
8919 /* Store the realpart and the negated imagpart to target. */
8920 emit_move_insn (gen_realpart (partmode, target),
8921 gen_realpart (partmode, op0));
8922
8923 imag_t = gen_imagpart (partmode, target);
8924 temp = expand_unop (partmode,
8925 ! unsignedp && flag_trapv
8926 && (GET_MODE_CLASS(partmode) == MODE_INT)
8927 ? negv_optab : neg_optab,
8928 gen_imagpart (partmode, op0), imag_t, 0);
8929 if (temp != imag_t)
8930 emit_move_insn (imag_t, temp);
8931
8932 insns = get_insns ();
8933 end_sequence ();
8934
8935 /* Conjugate should appear as a single unit
8936 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8937 each with a separate pseudo as destination.
8938 It's not correct for flow to treat them as a unit. */
8939 if (GET_CODE (target) != CONCAT)
8940 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8941 else
8942 emit_insn (insns);
8943
8944 return target;
8945 }
8946
8947 case TRY_CATCH_EXPR:
8948 {
8949 tree handler = TREE_OPERAND (exp, 1);
8950
8951 expand_eh_region_start ();
8952
8953 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8954
8955 expand_eh_region_end_cleanup (handler);
8956
8957 return op0;
8958 }
8959
8960 case TRY_FINALLY_EXPR:
8961 {
8962 tree try_block = TREE_OPERAND (exp, 0);
8963 tree finally_block = TREE_OPERAND (exp, 1);
8964
8965 if (!optimize || unsafe_for_reeval (finally_block) > 1)
8966 {
8967 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
8968 is not sufficient, so we cannot expand the block twice.
8969 So we play games with GOTO_SUBROUTINE_EXPR to let us
8970 expand the thing only once. */
8971 /* When not optimizing, we go ahead with this form since
8972 (1) user breakpoints operate more predictably without
8973 code duplication, and
8974 (2) we're not running any of the global optimizers
8975 that would explode in time/space with the highly
8976 connected CFG created by the indirect branching. */
8977
8978 rtx finally_label = gen_label_rtx ();
8979 rtx done_label = gen_label_rtx ();
8980 rtx return_link = gen_reg_rtx (Pmode);
8981 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8982 (tree) finally_label, (tree) return_link);
8983 TREE_SIDE_EFFECTS (cleanup) = 1;
8984
8985 /* Start a new binding layer that will keep track of all cleanup
8986 actions to be performed. */
8987 expand_start_bindings (2);
8988 target_temp_slot_level = temp_slot_level;
8989
8990 expand_decl_cleanup (NULL_TREE, cleanup);
8991 op0 = expand_expr (try_block, target, tmode, modifier);
8992
8993 preserve_temp_slots (op0);
8994 expand_end_bindings (NULL_TREE, 0, 0);
8995 emit_jump (done_label);
8996 emit_label (finally_label);
8997 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8998 emit_indirect_jump (return_link);
8999 emit_label (done_label);
9000 }
9001 else
9002 {
9003 expand_start_bindings (2);
9004 target_temp_slot_level = temp_slot_level;
9005
9006 expand_decl_cleanup (NULL_TREE, finally_block);
9007 op0 = expand_expr (try_block, target, tmode, modifier);
9008
9009 preserve_temp_slots (op0);
9010 expand_end_bindings (NULL_TREE, 0, 0);
9011 }
9012
9013 return op0;
9014 }
9015
9016 case GOTO_SUBROUTINE_EXPR:
9017 {
9018 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9019 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9020 rtx return_address = gen_label_rtx ();
9021 emit_move_insn (return_link,
9022 gen_rtx_LABEL_REF (Pmode, return_address));
9023 emit_jump (subr);
9024 emit_label (return_address);
9025 return const0_rtx;
9026 }
9027
9028 case VA_ARG_EXPR:
9029 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9030
9031 case EXC_PTR_EXPR:
9032 return get_exception_pointer (cfun);
9033
9034 case FDESC_EXPR:
9035 /* Function descriptors are not valid except for as
9036 initialization constants, and should not be expanded. */
9037 abort ();
9038
9039 default:
9040 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier,
9041 alt_rtl);
9042 }
9043
9044 /* Here to do an ordinary binary operator, generating an instruction
9045 from the optab already placed in `this_optab'. */
9046 binop:
9047 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9048 subtarget, &op0, &op1, 0);
9049 binop2:
9050 if (modifier == EXPAND_STACK_PARM)
9051 target = 0;
9052 temp = expand_binop (mode, this_optab, op0, op1, target,
9053 unsignedp, OPTAB_LIB_WIDEN);
9054 if (temp == 0)
9055 abort ();
9056 return temp;
9057 }
9058 \f
9059 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9060 when applied to the address of EXP produces an address known to be
9061 aligned more than BIGGEST_ALIGNMENT. */
9062
9063 static int
9064 is_aligning_offset (tree offset, tree exp)
9065 {
9066 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9067 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9068 || TREE_CODE (offset) == NOP_EXPR
9069 || TREE_CODE (offset) == CONVERT_EXPR
9070 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9071 offset = TREE_OPERAND (offset, 0);
9072
9073 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9074 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9075 if (TREE_CODE (offset) != BIT_AND_EXPR
9076 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9077 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9078 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9079 return 0;
9080
9081 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9082 It must be NEGATE_EXPR. Then strip any more conversions. */
9083 offset = TREE_OPERAND (offset, 0);
9084 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9085 || TREE_CODE (offset) == NOP_EXPR
9086 || TREE_CODE (offset) == CONVERT_EXPR)
9087 offset = TREE_OPERAND (offset, 0);
9088
9089 if (TREE_CODE (offset) != NEGATE_EXPR)
9090 return 0;
9091
9092 offset = TREE_OPERAND (offset, 0);
9093 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9094 || TREE_CODE (offset) == NOP_EXPR
9095 || TREE_CODE (offset) == CONVERT_EXPR)
9096 offset = TREE_OPERAND (offset, 0);
9097
9098 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9099 whose type is the same as EXP. */
9100 return (TREE_CODE (offset) == ADDR_EXPR
9101 && (TREE_OPERAND (offset, 0) == exp
9102 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9103 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9104 == TREE_TYPE (exp)))));
9105 }
9106 \f
9107 /* Return the tree node if an ARG corresponds to a string constant or zero
9108 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9109 in bytes within the string that ARG is accessing. The type of the
9110 offset will be `sizetype'. */
9111
9112 tree
9113 string_constant (tree arg, tree *ptr_offset)
9114 {
9115 STRIP_NOPS (arg);
9116
9117 if (TREE_CODE (arg) == ADDR_EXPR
9118 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9119 {
9120 *ptr_offset = size_zero_node;
9121 return TREE_OPERAND (arg, 0);
9122 }
9123 else if (TREE_CODE (arg) == PLUS_EXPR)
9124 {
9125 tree arg0 = TREE_OPERAND (arg, 0);
9126 tree arg1 = TREE_OPERAND (arg, 1);
9127
9128 STRIP_NOPS (arg0);
9129 STRIP_NOPS (arg1);
9130
9131 if (TREE_CODE (arg0) == ADDR_EXPR
9132 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9133 {
9134 *ptr_offset = convert (sizetype, arg1);
9135 return TREE_OPERAND (arg0, 0);
9136 }
9137 else if (TREE_CODE (arg1) == ADDR_EXPR
9138 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9139 {
9140 *ptr_offset = convert (sizetype, arg0);
9141 return TREE_OPERAND (arg1, 0);
9142 }
9143 }
9144
9145 return 0;
9146 }
9147 \f
9148 /* Expand code for a post- or pre- increment or decrement
9149 and return the RTX for the result.
9150 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9151
9152 static rtx
9153 expand_increment (tree exp, int post, int ignore)
9154 {
9155 rtx op0, op1;
9156 rtx temp, value;
9157 tree incremented = TREE_OPERAND (exp, 0);
9158 optab this_optab = add_optab;
9159 int icode;
9160 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9161 int op0_is_copy = 0;
9162 int single_insn = 0;
9163 /* 1 means we can't store into OP0 directly,
9164 because it is a subreg narrower than a word,
9165 and we don't dare clobber the rest of the word. */
9166 int bad_subreg = 0;
9167
9168 /* Stabilize any component ref that might need to be
9169 evaluated more than once below. */
9170 if (!post
9171 || TREE_CODE (incremented) == BIT_FIELD_REF
9172 || (TREE_CODE (incremented) == COMPONENT_REF
9173 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9174 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9175 incremented = stabilize_reference (incremented);
9176 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9177 ones into save exprs so that they don't accidentally get evaluated
9178 more than once by the code below. */
9179 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9180 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9181 incremented = save_expr (incremented);
9182
9183 /* Compute the operands as RTX.
9184 Note whether OP0 is the actual lvalue or a copy of it:
9185 I believe it is a copy iff it is a register or subreg
9186 and insns were generated in computing it. */
9187
9188 temp = get_last_insn ();
9189 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9190
9191 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9192 in place but instead must do sign- or zero-extension during assignment,
9193 so we copy it into a new register and let the code below use it as
9194 a copy.
9195
9196 Note that we can safely modify this SUBREG since it is know not to be
9197 shared (it was made by the expand_expr call above). */
9198
9199 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9200 {
9201 if (post)
9202 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9203 else
9204 bad_subreg = 1;
9205 }
9206 else if (GET_CODE (op0) == SUBREG
9207 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9208 {
9209 /* We cannot increment this SUBREG in place. If we are
9210 post-incrementing, get a copy of the old value. Otherwise,
9211 just mark that we cannot increment in place. */
9212 if (post)
9213 op0 = copy_to_reg (op0);
9214 else
9215 bad_subreg = 1;
9216 }
9217
9218 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9219 && temp != get_last_insn ());
9220 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9221
9222 /* Decide whether incrementing or decrementing. */
9223 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9224 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9225 this_optab = sub_optab;
9226
9227 /* Convert decrement by a constant into a negative increment. */
9228 if (this_optab == sub_optab
9229 && GET_CODE (op1) == CONST_INT)
9230 {
9231 op1 = GEN_INT (-INTVAL (op1));
9232 this_optab = add_optab;
9233 }
9234
9235 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9236 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9237
9238 /* For a preincrement, see if we can do this with a single instruction. */
9239 if (!post)
9240 {
9241 icode = (int) this_optab->handlers[(int) mode].insn_code;
9242 if (icode != (int) CODE_FOR_nothing
9243 /* Make sure that OP0 is valid for operands 0 and 1
9244 of the insn we want to queue. */
9245 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9246 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9247 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9248 single_insn = 1;
9249 }
9250
9251 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9252 then we cannot just increment OP0. We must therefore contrive to
9253 increment the original value. Then, for postincrement, we can return
9254 OP0 since it is a copy of the old value. For preincrement, expand here
9255 unless we can do it with a single insn.
9256
9257 Likewise if storing directly into OP0 would clobber high bits
9258 we need to preserve (bad_subreg). */
9259 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9260 {
9261 /* This is the easiest way to increment the value wherever it is.
9262 Problems with multiple evaluation of INCREMENTED are prevented
9263 because either (1) it is a component_ref or preincrement,
9264 in which case it was stabilized above, or (2) it is an array_ref
9265 with constant index in an array in a register, which is
9266 safe to reevaluate. */
9267 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9268 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9269 ? MINUS_EXPR : PLUS_EXPR),
9270 TREE_TYPE (exp),
9271 incremented,
9272 TREE_OPERAND (exp, 1));
9273
9274 while (TREE_CODE (incremented) == NOP_EXPR
9275 || TREE_CODE (incremented) == CONVERT_EXPR)
9276 {
9277 newexp = convert (TREE_TYPE (incremented), newexp);
9278 incremented = TREE_OPERAND (incremented, 0);
9279 }
9280
9281 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9282 return post ? op0 : temp;
9283 }
9284
9285 if (post)
9286 {
9287 /* We have a true reference to the value in OP0.
9288 If there is an insn to add or subtract in this mode, queue it.
9289 Queuing the increment insn avoids the register shuffling
9290 that often results if we must increment now and first save
9291 the old value for subsequent use. */
9292
9293 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9294 op0 = stabilize (op0);
9295 #endif
9296
9297 icode = (int) this_optab->handlers[(int) mode].insn_code;
9298 if (icode != (int) CODE_FOR_nothing
9299 /* Make sure that OP0 is valid for operands 0 and 1
9300 of the insn we want to queue. */
9301 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9302 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9303 {
9304 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9305 op1 = force_reg (mode, op1);
9306
9307 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9308 }
9309 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9310 {
9311 rtx addr = (general_operand (XEXP (op0, 0), mode)
9312 ? force_reg (Pmode, XEXP (op0, 0))
9313 : copy_to_reg (XEXP (op0, 0)));
9314 rtx temp, result;
9315
9316 op0 = replace_equiv_address (op0, addr);
9317 temp = force_reg (GET_MODE (op0), op0);
9318 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9319 op1 = force_reg (mode, op1);
9320
9321 /* The increment queue is LIFO, thus we have to `queue'
9322 the instructions in reverse order. */
9323 enqueue_insn (op0, gen_move_insn (op0, temp));
9324 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9325 return result;
9326 }
9327 }
9328
9329 /* Preincrement, or we can't increment with one simple insn. */
9330 if (post)
9331 /* Save a copy of the value before inc or dec, to return it later. */
9332 temp = value = copy_to_reg (op0);
9333 else
9334 /* Arrange to return the incremented value. */
9335 /* Copy the rtx because expand_binop will protect from the queue,
9336 and the results of that would be invalid for us to return
9337 if our caller does emit_queue before using our result. */
9338 temp = copy_rtx (value = op0);
9339
9340 /* Increment however we can. */
9341 op1 = expand_binop (mode, this_optab, value, op1, op0,
9342 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9343
9344 /* Make sure the value is stored into OP0. */
9345 if (op1 != op0)
9346 emit_move_insn (op0, op1);
9347
9348 return temp;
9349 }
9350 \f
9351 /* Generate code to calculate EXP using a store-flag instruction
9352 and return an rtx for the result. EXP is either a comparison
9353 or a TRUTH_NOT_EXPR whose operand is a comparison.
9354
9355 If TARGET is nonzero, store the result there if convenient.
9356
9357 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9358 cheap.
9359
9360 Return zero if there is no suitable set-flag instruction
9361 available on this machine.
9362
9363 Once expand_expr has been called on the arguments of the comparison,
9364 we are committed to doing the store flag, since it is not safe to
9365 re-evaluate the expression. We emit the store-flag insn by calling
9366 emit_store_flag, but only expand the arguments if we have a reason
9367 to believe that emit_store_flag will be successful. If we think that
9368 it will, but it isn't, we have to simulate the store-flag with a
9369 set/jump/set sequence. */
9370
9371 static rtx
9372 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9373 {
9374 enum rtx_code code;
9375 tree arg0, arg1, type;
9376 tree tem;
9377 enum machine_mode operand_mode;
9378 int invert = 0;
9379 int unsignedp;
9380 rtx op0, op1;
9381 enum insn_code icode;
9382 rtx subtarget = target;
9383 rtx result, label;
9384
9385 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9386 result at the end. We can't simply invert the test since it would
9387 have already been inverted if it were valid. This case occurs for
9388 some floating-point comparisons. */
9389
9390 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9391 invert = 1, exp = TREE_OPERAND (exp, 0);
9392
9393 arg0 = TREE_OPERAND (exp, 0);
9394 arg1 = TREE_OPERAND (exp, 1);
9395
9396 /* Don't crash if the comparison was erroneous. */
9397 if (arg0 == error_mark_node || arg1 == error_mark_node)
9398 return const0_rtx;
9399
9400 type = TREE_TYPE (arg0);
9401 operand_mode = TYPE_MODE (type);
9402 unsignedp = TREE_UNSIGNED (type);
9403
9404 /* We won't bother with BLKmode store-flag operations because it would mean
9405 passing a lot of information to emit_store_flag. */
9406 if (operand_mode == BLKmode)
9407 return 0;
9408
9409 /* We won't bother with store-flag operations involving function pointers
9410 when function pointers must be canonicalized before comparisons. */
9411 #ifdef HAVE_canonicalize_funcptr_for_compare
9412 if (HAVE_canonicalize_funcptr_for_compare
9413 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9414 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9415 == FUNCTION_TYPE))
9416 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9417 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9418 == FUNCTION_TYPE))))
9419 return 0;
9420 #endif
9421
9422 STRIP_NOPS (arg0);
9423 STRIP_NOPS (arg1);
9424
9425 /* Get the rtx comparison code to use. We know that EXP is a comparison
9426 operation of some type. Some comparisons against 1 and -1 can be
9427 converted to comparisons with zero. Do so here so that the tests
9428 below will be aware that we have a comparison with zero. These
9429 tests will not catch constants in the first operand, but constants
9430 are rarely passed as the first operand. */
9431
9432 switch (TREE_CODE (exp))
9433 {
9434 case EQ_EXPR:
9435 code = EQ;
9436 break;
9437 case NE_EXPR:
9438 code = NE;
9439 break;
9440 case LT_EXPR:
9441 if (integer_onep (arg1))
9442 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9443 else
9444 code = unsignedp ? LTU : LT;
9445 break;
9446 case LE_EXPR:
9447 if (! unsignedp && integer_all_onesp (arg1))
9448 arg1 = integer_zero_node, code = LT;
9449 else
9450 code = unsignedp ? LEU : LE;
9451 break;
9452 case GT_EXPR:
9453 if (! unsignedp && integer_all_onesp (arg1))
9454 arg1 = integer_zero_node, code = GE;
9455 else
9456 code = unsignedp ? GTU : GT;
9457 break;
9458 case GE_EXPR:
9459 if (integer_onep (arg1))
9460 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9461 else
9462 code = unsignedp ? GEU : GE;
9463 break;
9464
9465 case UNORDERED_EXPR:
9466 code = UNORDERED;
9467 break;
9468 case ORDERED_EXPR:
9469 code = ORDERED;
9470 break;
9471 case UNLT_EXPR:
9472 code = UNLT;
9473 break;
9474 case UNLE_EXPR:
9475 code = UNLE;
9476 break;
9477 case UNGT_EXPR:
9478 code = UNGT;
9479 break;
9480 case UNGE_EXPR:
9481 code = UNGE;
9482 break;
9483 case UNEQ_EXPR:
9484 code = UNEQ;
9485 break;
9486
9487 default:
9488 abort ();
9489 }
9490
9491 /* Put a constant second. */
9492 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9493 {
9494 tem = arg0; arg0 = arg1; arg1 = tem;
9495 code = swap_condition (code);
9496 }
9497
9498 /* If this is an equality or inequality test of a single bit, we can
9499 do this by shifting the bit being tested to the low-order bit and
9500 masking the result with the constant 1. If the condition was EQ,
9501 we xor it with 1. This does not require an scc insn and is faster
9502 than an scc insn even if we have it.
9503
9504 The code to make this transformation was moved into fold_single_bit_test,
9505 so we just call into the folder and expand its result. */
9506
9507 if ((code == NE || code == EQ)
9508 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9509 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9510 {
9511 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
9512 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9513 arg0, arg1, type),
9514 target, VOIDmode, EXPAND_NORMAL);
9515 }
9516
9517 /* Now see if we are likely to be able to do this. Return if not. */
9518 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9519 return 0;
9520
9521 icode = setcc_gen_code[(int) code];
9522 if (icode == CODE_FOR_nothing
9523 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9524 {
9525 /* We can only do this if it is one of the special cases that
9526 can be handled without an scc insn. */
9527 if ((code == LT && integer_zerop (arg1))
9528 || (! only_cheap && code == GE && integer_zerop (arg1)))
9529 ;
9530 else if (BRANCH_COST >= 0
9531 && ! only_cheap && (code == NE || code == EQ)
9532 && TREE_CODE (type) != REAL_TYPE
9533 && ((abs_optab->handlers[(int) operand_mode].insn_code
9534 != CODE_FOR_nothing)
9535 || (ffs_optab->handlers[(int) operand_mode].insn_code
9536 != CODE_FOR_nothing)))
9537 ;
9538 else
9539 return 0;
9540 }
9541
9542 if (! get_subtarget (target)
9543 || GET_MODE (subtarget) != operand_mode)
9544 subtarget = 0;
9545
9546 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9547
9548 if (target == 0)
9549 target = gen_reg_rtx (mode);
9550
9551 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9552 because, if the emit_store_flag does anything it will succeed and
9553 OP0 and OP1 will not be used subsequently. */
9554
9555 result = emit_store_flag (target, code,
9556 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9557 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9558 operand_mode, unsignedp, 1);
9559
9560 if (result)
9561 {
9562 if (invert)
9563 result = expand_binop (mode, xor_optab, result, const1_rtx,
9564 result, 0, OPTAB_LIB_WIDEN);
9565 return result;
9566 }
9567
9568 /* If this failed, we have to do this with set/compare/jump/set code. */
9569 if (GET_CODE (target) != REG
9570 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9571 target = gen_reg_rtx (GET_MODE (target));
9572
9573 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9574 result = compare_from_rtx (op0, op1, code, unsignedp,
9575 operand_mode, NULL_RTX);
9576 if (GET_CODE (result) == CONST_INT)
9577 return (((result == const0_rtx && ! invert)
9578 || (result != const0_rtx && invert))
9579 ? const0_rtx : const1_rtx);
9580
9581 /* The code of RESULT may not match CODE if compare_from_rtx
9582 decided to swap its operands and reverse the original code.
9583
9584 We know that compare_from_rtx returns either a CONST_INT or
9585 a new comparison code, so it is safe to just extract the
9586 code from RESULT. */
9587 code = GET_CODE (result);
9588
9589 label = gen_label_rtx ();
9590 if (bcc_gen_fctn[(int) code] == 0)
9591 abort ();
9592
9593 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9594 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9595 emit_label (label);
9596
9597 return target;
9598 }
9599 \f
9600
9601 /* Stubs in case we haven't got a casesi insn. */
9602 #ifndef HAVE_casesi
9603 # define HAVE_casesi 0
9604 # define gen_casesi(a, b, c, d, e) (0)
9605 # define CODE_FOR_casesi CODE_FOR_nothing
9606 #endif
9607
9608 /* If the machine does not have a case insn that compares the bounds,
9609 this means extra overhead for dispatch tables, which raises the
9610 threshold for using them. */
9611 #ifndef CASE_VALUES_THRESHOLD
9612 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9613 #endif /* CASE_VALUES_THRESHOLD */
9614
9615 unsigned int
9616 case_values_threshold (void)
9617 {
9618 return CASE_VALUES_THRESHOLD;
9619 }
9620
9621 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9622 0 otherwise (i.e. if there is no casesi instruction). */
9623 int
9624 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9625 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9626 {
9627 enum machine_mode index_mode = SImode;
9628 int index_bits = GET_MODE_BITSIZE (index_mode);
9629 rtx op1, op2, index;
9630 enum machine_mode op_mode;
9631
9632 if (! HAVE_casesi)
9633 return 0;
9634
9635 /* Convert the index to SImode. */
9636 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9637 {
9638 enum machine_mode omode = TYPE_MODE (index_type);
9639 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9640
9641 /* We must handle the endpoints in the original mode. */
9642 index_expr = build (MINUS_EXPR, index_type,
9643 index_expr, minval);
9644 minval = integer_zero_node;
9645 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9646 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9647 omode, 1, default_label);
9648 /* Now we can safely truncate. */
9649 index = convert_to_mode (index_mode, index, 0);
9650 }
9651 else
9652 {
9653 if (TYPE_MODE (index_type) != index_mode)
9654 {
9655 index_expr = convert ((*lang_hooks.types.type_for_size)
9656 (index_bits, 0), index_expr);
9657 index_type = TREE_TYPE (index_expr);
9658 }
9659
9660 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9661 }
9662 emit_queue ();
9663 index = protect_from_queue (index, 0);
9664 do_pending_stack_adjust ();
9665
9666 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9667 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9668 (index, op_mode))
9669 index = copy_to_mode_reg (op_mode, index);
9670
9671 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9672
9673 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9674 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9675 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
9676 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9677 (op1, op_mode))
9678 op1 = copy_to_mode_reg (op_mode, op1);
9679
9680 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9681
9682 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9683 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9684 op2, TREE_UNSIGNED (TREE_TYPE (range)));
9685 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9686 (op2, op_mode))
9687 op2 = copy_to_mode_reg (op_mode, op2);
9688
9689 emit_jump_insn (gen_casesi (index, op1, op2,
9690 table_label, default_label));
9691 return 1;
9692 }
9693
9694 /* Attempt to generate a tablejump instruction; same concept. */
9695 #ifndef HAVE_tablejump
9696 #define HAVE_tablejump 0
9697 #define gen_tablejump(x, y) (0)
9698 #endif
9699
9700 /* Subroutine of the next function.
9701
9702 INDEX is the value being switched on, with the lowest value
9703 in the table already subtracted.
9704 MODE is its expected mode (needed if INDEX is constant).
9705 RANGE is the length of the jump table.
9706 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9707
9708 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9709 index value is out of range. */
9710
9711 static void
9712 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9713 rtx default_label)
9714 {
9715 rtx temp, vector;
9716
9717 if (INTVAL (range) > cfun->max_jumptable_ents)
9718 cfun->max_jumptable_ents = INTVAL (range);
9719
9720 /* Do an unsigned comparison (in the proper mode) between the index
9721 expression and the value which represents the length of the range.
9722 Since we just finished subtracting the lower bound of the range
9723 from the index expression, this comparison allows us to simultaneously
9724 check that the original index expression value is both greater than
9725 or equal to the minimum value of the range and less than or equal to
9726 the maximum value of the range. */
9727
9728 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9729 default_label);
9730
9731 /* If index is in range, it must fit in Pmode.
9732 Convert to Pmode so we can index with it. */
9733 if (mode != Pmode)
9734 index = convert_to_mode (Pmode, index, 1);
9735
9736 /* Don't let a MEM slip through, because then INDEX that comes
9737 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9738 and break_out_memory_refs will go to work on it and mess it up. */
9739 #ifdef PIC_CASE_VECTOR_ADDRESS
9740 if (flag_pic && GET_CODE (index) != REG)
9741 index = copy_to_mode_reg (Pmode, index);
9742 #endif
9743
9744 /* If flag_force_addr were to affect this address
9745 it could interfere with the tricky assumptions made
9746 about addresses that contain label-refs,
9747 which may be valid only very near the tablejump itself. */
9748 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9749 GET_MODE_SIZE, because this indicates how large insns are. The other
9750 uses should all be Pmode, because they are addresses. This code
9751 could fail if addresses and insns are not the same size. */
9752 index = gen_rtx_PLUS (Pmode,
9753 gen_rtx_MULT (Pmode, index,
9754 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9755 gen_rtx_LABEL_REF (Pmode, table_label));
9756 #ifdef PIC_CASE_VECTOR_ADDRESS
9757 if (flag_pic)
9758 index = PIC_CASE_VECTOR_ADDRESS (index);
9759 else
9760 #endif
9761 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9762 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9763 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9764 RTX_UNCHANGING_P (vector) = 1;
9765 MEM_NOTRAP_P (vector) = 1;
9766 convert_move (temp, vector, 0);
9767
9768 emit_jump_insn (gen_tablejump (temp, table_label));
9769
9770 /* If we are generating PIC code or if the table is PC-relative, the
9771 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9772 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9773 emit_barrier ();
9774 }
9775
9776 int
9777 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9778 rtx table_label, rtx default_label)
9779 {
9780 rtx index;
9781
9782 if (! HAVE_tablejump)
9783 return 0;
9784
9785 index_expr = fold (build (MINUS_EXPR, index_type,
9786 convert (index_type, index_expr),
9787 convert (index_type, minval)));
9788 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9789 emit_queue ();
9790 index = protect_from_queue (index, 0);
9791 do_pending_stack_adjust ();
9792
9793 do_tablejump (index, TYPE_MODE (index_type),
9794 convert_modes (TYPE_MODE (index_type),
9795 TYPE_MODE (TREE_TYPE (range)),
9796 expand_expr (range, NULL_RTX,
9797 VOIDmode, 0),
9798 TREE_UNSIGNED (TREE_TYPE (range))),
9799 table_label, default_label);
9800 return 1;
9801 }
9802
9803 /* Nonzero if the mode is a valid vector mode for this architecture.
9804 This returns nonzero even if there is no hardware support for the
9805 vector mode, but we can emulate with narrower modes. */
9806
9807 int
9808 vector_mode_valid_p (enum machine_mode mode)
9809 {
9810 enum mode_class class = GET_MODE_CLASS (mode);
9811 enum machine_mode innermode;
9812
9813 /* Doh! What's going on? */
9814 if (class != MODE_VECTOR_INT
9815 && class != MODE_VECTOR_FLOAT)
9816 return 0;
9817
9818 /* Hardware support. Woo hoo! */
9819 if (VECTOR_MODE_SUPPORTED_P (mode))
9820 return 1;
9821
9822 innermode = GET_MODE_INNER (mode);
9823
9824 /* We should probably return 1 if requesting V4DI and we have no DI,
9825 but we have V2DI, but this is probably very unlikely. */
9826
9827 /* If we have support for the inner mode, we can safely emulate it.
9828 We may not have V2DI, but me can emulate with a pair of DIs. */
9829 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9830 }
9831
9832 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9833 static rtx
9834 const_vector_from_tree (tree exp)
9835 {
9836 rtvec v;
9837 int units, i;
9838 tree link, elt;
9839 enum machine_mode inner, mode;
9840
9841 mode = TYPE_MODE (TREE_TYPE (exp));
9842
9843 if (is_zeros_p (exp))
9844 return CONST0_RTX (mode);
9845
9846 units = GET_MODE_NUNITS (mode);
9847 inner = GET_MODE_INNER (mode);
9848
9849 v = rtvec_alloc (units);
9850
9851 link = TREE_VECTOR_CST_ELTS (exp);
9852 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9853 {
9854 elt = TREE_VALUE (link);
9855
9856 if (TREE_CODE (elt) == REAL_CST)
9857 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9858 inner);
9859 else
9860 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9861 TREE_INT_CST_HIGH (elt),
9862 inner);
9863 }
9864
9865 /* Initialize remaining elements to 0. */
9866 for (; i < units; ++i)
9867 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9868
9869 return gen_rtx_raw_CONST_VECTOR (mode, v);
9870 }
9871
9872 #include "gt-expr.h"