re PR c++/10779 (Error cascade for unknown type in function prototype)
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "target.h"
51
52 /* Decide whether a function's arguments should be processed
53 from first to last or from last to first.
54
55 They should if the stack and args grow in opposite directions, but
56 only if we have push insns. */
57
58 #ifdef PUSH_ROUNDING
59
60 #ifndef PUSH_ARGS_REVERSED
61 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
62 #define PUSH_ARGS_REVERSED /* If it's last to first. */
63 #endif
64 #endif
65
66 #endif
67
68 #ifndef STACK_PUSH_CODE
69 #ifdef STACK_GROWS_DOWNWARD
70 #define STACK_PUSH_CODE PRE_DEC
71 #else
72 #define STACK_PUSH_CODE PRE_INC
73 #endif
74 #endif
75
76 /* Assume that case vectors are not pc-relative. */
77 #ifndef CASE_VECTOR_PC_RELATIVE
78 #define CASE_VECTOR_PC_RELATIVE 0
79 #endif
80
81 /* Convert defined/undefined to boolean. */
82 #ifdef TARGET_MEM_FUNCTIONS
83 #undef TARGET_MEM_FUNCTIONS
84 #define TARGET_MEM_FUNCTIONS 1
85 #else
86 #define TARGET_MEM_FUNCTIONS 0
87 #endif
88
89
90 /* If this is nonzero, we do not bother generating VOLATILE
91 around volatile memory references, and we are willing to
92 output indirect addresses. If cse is to follow, we reject
93 indirect addresses so a useful potential cse is generated;
94 if it is used only once, instruction combination will produce
95 the same indirect address eventually. */
96 int cse_not_expected;
97
98 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
99 tree placeholder_list = 0;
100
101 /* This structure is used by move_by_pieces to describe the move to
102 be performed. */
103 struct move_by_pieces
104 {
105 rtx to;
106 rtx to_addr;
107 int autinc_to;
108 int explicit_inc_to;
109 rtx from;
110 rtx from_addr;
111 int autinc_from;
112 int explicit_inc_from;
113 unsigned HOST_WIDE_INT len;
114 HOST_WIDE_INT offset;
115 int reverse;
116 };
117
118 /* This structure is used by store_by_pieces to describe the clear to
119 be performed. */
120
121 struct store_by_pieces
122 {
123 rtx to;
124 rtx to_addr;
125 int autinc_to;
126 int explicit_inc_to;
127 unsigned HOST_WIDE_INT len;
128 HOST_WIDE_INT offset;
129 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
130 void *constfundata;
131 int reverse;
132 };
133
134 static rtx enqueue_insn (rtx, rtx);
135 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
136 unsigned int);
137 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
138 struct move_by_pieces *);
139 static bool block_move_libcall_safe_for_call_parm (void);
140 static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
141 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
142 static tree emit_block_move_libcall_fn (int);
143 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
144 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
145 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
146 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
147 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
148 struct store_by_pieces *);
149 static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
150 static rtx clear_storage_via_libcall (rtx, rtx);
151 static tree clear_storage_libcall_fn (int);
152 static rtx compress_float_constant (rtx, rtx);
153 static rtx get_subtarget (rtx);
154 static int is_zeros_p (tree);
155 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
156 HOST_WIDE_INT, enum machine_mode,
157 tree, tree, int, int);
158 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
159 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
160 tree, enum machine_mode, int, tree, int);
161 static rtx var_rtx (tree);
162
163 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
164 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type (tree, tree);
165
166 static int is_aligning_offset (tree, tree);
167 static rtx expand_increment (tree, int, int);
168 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
169 enum expand_modifier);
170 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
171 #ifdef PUSH_ROUNDING
172 static void emit_single_push_insn (enum machine_mode, rtx, tree);
173 #endif
174 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
175 static rtx const_vector_from_tree (tree);
176
177 /* Record for each mode whether we can move a register directly to or
178 from an object of that mode in memory. If we can't, we won't try
179 to use that mode directly when accessing a field of that mode. */
180
181 static char direct_load[NUM_MACHINE_MODES];
182 static char direct_store[NUM_MACHINE_MODES];
183
184 /* Record for each mode whether we can float-extend from memory. */
185
186 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
187
188 /* If a memory-to-memory move would take MOVE_RATIO or more simple
189 move-instruction sequences, we will do a movstr or libcall instead. */
190
191 #ifndef MOVE_RATIO
192 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
193 #define MOVE_RATIO 2
194 #else
195 /* If we are optimizing for space (-Os), cut down the default move ratio. */
196 #define MOVE_RATIO (optimize_size ? 3 : 15)
197 #endif
198 #endif
199
200 /* This macro is used to determine whether move_by_pieces should be called
201 to perform a structure copy. */
202 #ifndef MOVE_BY_PIECES_P
203 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
204 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
205 #endif
206
207 /* If a clear memory operation would take CLEAR_RATIO or more simple
208 move-instruction sequences, we will do a clrstr or libcall instead. */
209
210 #ifndef CLEAR_RATIO
211 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
212 #define CLEAR_RATIO 2
213 #else
214 /* If we are optimizing for space, cut down the default clear ratio. */
215 #define CLEAR_RATIO (optimize_size ? 3 : 15)
216 #endif
217 #endif
218
219 /* This macro is used to determine whether clear_by_pieces should be
220 called to clear storage. */
221 #ifndef CLEAR_BY_PIECES_P
222 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
223 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
224 #endif
225
226 /* This macro is used to determine whether store_by_pieces should be
227 called to "memset" storage with byte values other than zero, or
228 to "memcpy" storage when the source is a constant string. */
229 #ifndef STORE_BY_PIECES_P
230 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
231 #endif
232
233 /* This array records the insn_code of insns to perform block moves. */
234 enum insn_code movstr_optab[NUM_MACHINE_MODES];
235
236 /* This array records the insn_code of insns to perform block clears. */
237 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
238
239 /* These arrays record the insn_code of two different kinds of insns
240 to perform block compares. */
241 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
242 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
243
244 /* Stack of EXPR_WITH_FILE_LOCATION nested expressions. */
245 struct file_stack *expr_wfl_stack;
246
247 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
248
249 #ifndef SLOW_UNALIGNED_ACCESS
250 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
251 #endif
252 \f
253 /* This is run once per compilation to set up which modes can be used
254 directly in memory and to initialize the block move optab. */
255
256 void
257 init_expr_once (void)
258 {
259 rtx insn, pat;
260 enum machine_mode mode;
261 int num_clobbers;
262 rtx mem, mem1;
263 rtx reg;
264
265 /* Try indexing by frame ptr and try by stack ptr.
266 It is known that on the Convex the stack ptr isn't a valid index.
267 With luck, one or the other is valid on any machine. */
268 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
269 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
270
271 /* A scratch register we can modify in-place below to avoid
272 useless RTL allocations. */
273 reg = gen_rtx_REG (VOIDmode, -1);
274
275 insn = rtx_alloc (INSN);
276 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
277 PATTERN (insn) = pat;
278
279 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
280 mode = (enum machine_mode) ((int) mode + 1))
281 {
282 int regno;
283
284 direct_load[(int) mode] = direct_store[(int) mode] = 0;
285 PUT_MODE (mem, mode);
286 PUT_MODE (mem1, mode);
287 PUT_MODE (reg, mode);
288
289 /* See if there is some register that can be used in this mode and
290 directly loaded or stored from memory. */
291
292 if (mode != VOIDmode && mode != BLKmode)
293 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
294 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
295 regno++)
296 {
297 if (! HARD_REGNO_MODE_OK (regno, mode))
298 continue;
299
300 REGNO (reg) = regno;
301
302 SET_SRC (pat) = mem;
303 SET_DEST (pat) = reg;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_load[(int) mode] = 1;
306
307 SET_SRC (pat) = mem1;
308 SET_DEST (pat) = reg;
309 if (recog (pat, insn, &num_clobbers) >= 0)
310 direct_load[(int) mode] = 1;
311
312 SET_SRC (pat) = reg;
313 SET_DEST (pat) = mem;
314 if (recog (pat, insn, &num_clobbers) >= 0)
315 direct_store[(int) mode] = 1;
316
317 SET_SRC (pat) = reg;
318 SET_DEST (pat) = mem1;
319 if (recog (pat, insn, &num_clobbers) >= 0)
320 direct_store[(int) mode] = 1;
321 }
322 }
323
324 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
325
326 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
327 mode = GET_MODE_WIDER_MODE (mode))
328 {
329 enum machine_mode srcmode;
330 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
331 srcmode = GET_MODE_WIDER_MODE (srcmode))
332 {
333 enum insn_code ic;
334
335 ic = can_extend_p (mode, srcmode, 0);
336 if (ic == CODE_FOR_nothing)
337 continue;
338
339 PUT_MODE (mem, srcmode);
340
341 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
342 float_extend_from_mem[mode][srcmode] = true;
343 }
344 }
345 }
346
347 /* This is run at the start of compiling a function. */
348
349 void
350 init_expr (void)
351 {
352 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
353 }
354
355 /* Small sanity check that the queue is empty at the end of a function. */
356
357 void
358 finish_expr_for_function (void)
359 {
360 if (pending_chain)
361 abort ();
362 }
363 \f
364 /* Manage the queue of increment instructions to be output
365 for POSTINCREMENT_EXPR expressions, etc. */
366
367 /* Queue up to increment (or change) VAR later. BODY says how:
368 BODY should be the same thing you would pass to emit_insn
369 to increment right away. It will go to emit_insn later on.
370
371 The value is a QUEUED expression to be used in place of VAR
372 where you want to guarantee the pre-incrementation value of VAR. */
373
374 static rtx
375 enqueue_insn (rtx var, rtx body)
376 {
377 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
378 body, pending_chain);
379 return pending_chain;
380 }
381
382 /* Use protect_from_queue to convert a QUEUED expression
383 into something that you can put immediately into an instruction.
384 If the queued incrementation has not happened yet,
385 protect_from_queue returns the variable itself.
386 If the incrementation has happened, protect_from_queue returns a temp
387 that contains a copy of the old value of the variable.
388
389 Any time an rtx which might possibly be a QUEUED is to be put
390 into an instruction, it must be passed through protect_from_queue first.
391 QUEUED expressions are not meaningful in instructions.
392
393 Do not pass a value through protect_from_queue and then hold
394 on to it for a while before putting it in an instruction!
395 If the queue is flushed in between, incorrect code will result. */
396
397 rtx
398 protect_from_queue (rtx x, int modify)
399 {
400 RTX_CODE code = GET_CODE (x);
401
402 #if 0 /* A QUEUED can hang around after the queue is forced out. */
403 /* Shortcut for most common case. */
404 if (pending_chain == 0)
405 return x;
406 #endif
407
408 if (code != QUEUED)
409 {
410 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
411 use of autoincrement. Make a copy of the contents of the memory
412 location rather than a copy of the address, but not if the value is
413 of mode BLKmode. Don't modify X in place since it might be
414 shared. */
415 if (code == MEM && GET_MODE (x) != BLKmode
416 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
417 {
418 rtx y = XEXP (x, 0);
419 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
420
421 if (QUEUED_INSN (y))
422 {
423 rtx temp = gen_reg_rtx (GET_MODE (x));
424
425 emit_insn_before (gen_move_insn (temp, new),
426 QUEUED_INSN (y));
427 return temp;
428 }
429
430 /* Copy the address into a pseudo, so that the returned value
431 remains correct across calls to emit_queue. */
432 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
433 }
434
435 /* Otherwise, recursively protect the subexpressions of all
436 the kinds of rtx's that can contain a QUEUED. */
437 if (code == MEM)
438 {
439 rtx tem = protect_from_queue (XEXP (x, 0), 0);
440 if (tem != XEXP (x, 0))
441 {
442 x = copy_rtx (x);
443 XEXP (x, 0) = tem;
444 }
445 }
446 else if (code == PLUS || code == MULT)
447 {
448 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
449 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
450 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
451 {
452 x = copy_rtx (x);
453 XEXP (x, 0) = new0;
454 XEXP (x, 1) = new1;
455 }
456 }
457 return x;
458 }
459 /* If the increment has not happened, use the variable itself. Copy it
460 into a new pseudo so that the value remains correct across calls to
461 emit_queue. */
462 if (QUEUED_INSN (x) == 0)
463 return copy_to_reg (QUEUED_VAR (x));
464 /* If the increment has happened and a pre-increment copy exists,
465 use that copy. */
466 if (QUEUED_COPY (x) != 0)
467 return QUEUED_COPY (x);
468 /* The increment has happened but we haven't set up a pre-increment copy.
469 Set one up now, and use it. */
470 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
471 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
472 QUEUED_INSN (x));
473 return QUEUED_COPY (x);
474 }
475
476 /* Return nonzero if X contains a QUEUED expression:
477 if it contains anything that will be altered by a queued increment.
478 We handle only combinations of MEM, PLUS, MINUS and MULT operators
479 since memory addresses generally contain only those. */
480
481 int
482 queued_subexp_p (rtx x)
483 {
484 enum rtx_code code = GET_CODE (x);
485 switch (code)
486 {
487 case QUEUED:
488 return 1;
489 case MEM:
490 return queued_subexp_p (XEXP (x, 0));
491 case MULT:
492 case PLUS:
493 case MINUS:
494 return (queued_subexp_p (XEXP (x, 0))
495 || queued_subexp_p (XEXP (x, 1)));
496 default:
497 return 0;
498 }
499 }
500
501 /* Perform all the pending incrementations. */
502
503 void
504 emit_queue (void)
505 {
506 rtx p;
507 while ((p = pending_chain))
508 {
509 rtx body = QUEUED_BODY (p);
510
511 switch (GET_CODE (body))
512 {
513 case INSN:
514 case JUMP_INSN:
515 case CALL_INSN:
516 case CODE_LABEL:
517 case BARRIER:
518 case NOTE:
519 QUEUED_INSN (p) = body;
520 emit_insn (body);
521 break;
522
523 #ifdef ENABLE_CHECKING
524 case SEQUENCE:
525 abort ();
526 break;
527 #endif
528
529 default:
530 QUEUED_INSN (p) = emit_insn (body);
531 break;
532 }
533
534 pending_chain = QUEUED_NEXT (p);
535 }
536 }
537 \f
538 /* Copy data from FROM to TO, where the machine modes are not the same.
539 Both modes may be integer, or both may be floating.
540 UNSIGNEDP should be nonzero if FROM is an unsigned type.
541 This causes zero-extension instead of sign-extension. */
542
543 void
544 convert_move (rtx to, rtx from, int unsignedp)
545 {
546 enum machine_mode to_mode = GET_MODE (to);
547 enum machine_mode from_mode = GET_MODE (from);
548 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
549 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
550 enum insn_code code;
551 rtx libcall;
552
553 /* rtx code for making an equivalent value. */
554 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
555 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
556
557 to = protect_from_queue (to, 1);
558 from = protect_from_queue (from, 0);
559
560 if (to_real != from_real)
561 abort ();
562
563 /* If FROM is a SUBREG that indicates that we have already done at least
564 the required extension, strip it. We don't handle such SUBREGs as
565 TO here. */
566
567 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
568 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
569 >= GET_MODE_SIZE (to_mode))
570 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
571 from = gen_lowpart (to_mode, from), from_mode = to_mode;
572
573 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
574 abort ();
575
576 if (to_mode == from_mode
577 || (from_mode == VOIDmode && CONSTANT_P (from)))
578 {
579 emit_move_insn (to, from);
580 return;
581 }
582
583 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
584 {
585 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
586 abort ();
587
588 if (VECTOR_MODE_P (to_mode))
589 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
590 else
591 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
592
593 emit_move_insn (to, from);
594 return;
595 }
596
597 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
598 {
599 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
600 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
601 return;
602 }
603
604 if (to_real)
605 {
606 rtx value, insns;
607 convert_optab tab;
608
609 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
610 tab = sext_optab;
611 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
612 tab = trunc_optab;
613 else
614 abort ();
615
616 /* Try converting directly if the insn is supported. */
617
618 code = tab->handlers[to_mode][from_mode].insn_code;
619 if (code != CODE_FOR_nothing)
620 {
621 emit_unop_insn (code, to, from,
622 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
623 return;
624 }
625
626 /* Otherwise use a libcall. */
627 libcall = tab->handlers[to_mode][from_mode].libfunc;
628
629 if (!libcall)
630 /* This conversion is not implemented yet. */
631 abort ();
632
633 start_sequence ();
634 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
635 1, from, from_mode);
636 insns = get_insns ();
637 end_sequence ();
638 emit_libcall_block (insns, to, value,
639 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
640 from)
641 : gen_rtx_FLOAT_EXTEND (to_mode, from));
642 return;
643 }
644
645 /* Handle pointer conversion. */ /* SPEE 900220. */
646 /* Targets are expected to provide conversion insns between PxImode and
647 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
648 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
649 {
650 enum machine_mode full_mode
651 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
652
653 if (trunc_optab->handlers[to_mode][full_mode].insn_code
654 == CODE_FOR_nothing)
655 abort ();
656
657 if (full_mode != from_mode)
658 from = convert_to_mode (full_mode, from, unsignedp);
659 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
660 to, from, UNKNOWN);
661 return;
662 }
663 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
664 {
665 enum machine_mode full_mode
666 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
667
668 if (sext_optab->handlers[full_mode][from_mode].insn_code
669 == CODE_FOR_nothing)
670 abort ();
671
672 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
673 to, from, UNKNOWN);
674 if (to_mode == full_mode)
675 return;
676
677 /* else proceed to integer conversions below */
678 from_mode = full_mode;
679 }
680
681 /* Now both modes are integers. */
682
683 /* Handle expanding beyond a word. */
684 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
685 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
686 {
687 rtx insns;
688 rtx lowpart;
689 rtx fill_value;
690 rtx lowfrom;
691 int i;
692 enum machine_mode lowpart_mode;
693 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
694
695 /* Try converting directly if the insn is supported. */
696 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
697 != CODE_FOR_nothing)
698 {
699 /* If FROM is a SUBREG, put it into a register. Do this
700 so that we always generate the same set of insns for
701 better cse'ing; if an intermediate assignment occurred,
702 we won't be doing the operation directly on the SUBREG. */
703 if (optimize > 0 && GET_CODE (from) == SUBREG)
704 from = force_reg (from_mode, from);
705 emit_unop_insn (code, to, from, equiv_code);
706 return;
707 }
708 /* Next, try converting via full word. */
709 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
710 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
711 != CODE_FOR_nothing))
712 {
713 if (GET_CODE (to) == REG)
714 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
715 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
716 emit_unop_insn (code, to,
717 gen_lowpart (word_mode, to), equiv_code);
718 return;
719 }
720
721 /* No special multiword conversion insn; do it by hand. */
722 start_sequence ();
723
724 /* Since we will turn this into a no conflict block, we must ensure
725 that the source does not overlap the target. */
726
727 if (reg_overlap_mentioned_p (to, from))
728 from = force_reg (from_mode, from);
729
730 /* Get a copy of FROM widened to a word, if necessary. */
731 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
732 lowpart_mode = word_mode;
733 else
734 lowpart_mode = from_mode;
735
736 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
737
738 lowpart = gen_lowpart (lowpart_mode, to);
739 emit_move_insn (lowpart, lowfrom);
740
741 /* Compute the value to put in each remaining word. */
742 if (unsignedp)
743 fill_value = const0_rtx;
744 else
745 {
746 #ifdef HAVE_slt
747 if (HAVE_slt
748 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
749 && STORE_FLAG_VALUE == -1)
750 {
751 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
752 lowpart_mode, 0);
753 fill_value = gen_reg_rtx (word_mode);
754 emit_insn (gen_slt (fill_value));
755 }
756 else
757 #endif
758 {
759 fill_value
760 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
761 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
762 NULL_RTX, 0);
763 fill_value = convert_to_mode (word_mode, fill_value, 1);
764 }
765 }
766
767 /* Fill the remaining words. */
768 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
769 {
770 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
771 rtx subword = operand_subword (to, index, 1, to_mode);
772
773 if (subword == 0)
774 abort ();
775
776 if (fill_value != subword)
777 emit_move_insn (subword, fill_value);
778 }
779
780 insns = get_insns ();
781 end_sequence ();
782
783 emit_no_conflict_block (insns, to, from, NULL_RTX,
784 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
785 return;
786 }
787
788 /* Truncating multi-word to a word or less. */
789 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
790 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
791 {
792 if (!((GET_CODE (from) == MEM
793 && ! MEM_VOLATILE_P (from)
794 && direct_load[(int) to_mode]
795 && ! mode_dependent_address_p (XEXP (from, 0)))
796 || GET_CODE (from) == REG
797 || GET_CODE (from) == SUBREG))
798 from = force_reg (from_mode, from);
799 convert_move (to, gen_lowpart (word_mode, from), 0);
800 return;
801 }
802
803 /* Now follow all the conversions between integers
804 no more than a word long. */
805
806 /* For truncation, usually we can just refer to FROM in a narrower mode. */
807 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
808 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
809 GET_MODE_BITSIZE (from_mode)))
810 {
811 if (!((GET_CODE (from) == MEM
812 && ! MEM_VOLATILE_P (from)
813 && direct_load[(int) to_mode]
814 && ! mode_dependent_address_p (XEXP (from, 0)))
815 || GET_CODE (from) == REG
816 || GET_CODE (from) == SUBREG))
817 from = force_reg (from_mode, from);
818 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
819 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
820 from = copy_to_reg (from);
821 emit_move_insn (to, gen_lowpart (to_mode, from));
822 return;
823 }
824
825 /* Handle extension. */
826 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
827 {
828 /* Convert directly if that works. */
829 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
830 != CODE_FOR_nothing)
831 {
832 if (flag_force_mem)
833 from = force_not_mem (from);
834
835 emit_unop_insn (code, to, from, equiv_code);
836 return;
837 }
838 else
839 {
840 enum machine_mode intermediate;
841 rtx tmp;
842 tree shift_amount;
843
844 /* Search for a mode to convert via. */
845 for (intermediate = from_mode; intermediate != VOIDmode;
846 intermediate = GET_MODE_WIDER_MODE (intermediate))
847 if (((can_extend_p (to_mode, intermediate, unsignedp)
848 != CODE_FOR_nothing)
849 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
850 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
851 GET_MODE_BITSIZE (intermediate))))
852 && (can_extend_p (intermediate, from_mode, unsignedp)
853 != CODE_FOR_nothing))
854 {
855 convert_move (to, convert_to_mode (intermediate, from,
856 unsignedp), unsignedp);
857 return;
858 }
859
860 /* No suitable intermediate mode.
861 Generate what we need with shifts. */
862 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
863 - GET_MODE_BITSIZE (from_mode), 0);
864 from = gen_lowpart (to_mode, force_reg (from_mode, from));
865 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
866 to, unsignedp);
867 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
868 to, unsignedp);
869 if (tmp != to)
870 emit_move_insn (to, tmp);
871 return;
872 }
873 }
874
875 /* Support special truncate insns for certain modes. */
876 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
877 {
878 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
879 to, from, UNKNOWN);
880 return;
881 }
882
883 /* Handle truncation of volatile memrefs, and so on;
884 the things that couldn't be truncated directly,
885 and for which there was no special instruction.
886
887 ??? Code above formerly short-circuited this, for most integer
888 mode pairs, with a force_reg in from_mode followed by a recursive
889 call to this routine. Appears always to have been wrong. */
890 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
891 {
892 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
893 emit_move_insn (to, temp);
894 return;
895 }
896
897 /* Mode combination is not recognized. */
898 abort ();
899 }
900
901 /* Return an rtx for a value that would result
902 from converting X to mode MODE.
903 Both X and MODE may be floating, or both integer.
904 UNSIGNEDP is nonzero if X is an unsigned value.
905 This can be done by referring to a part of X in place
906 or by copying to a new temporary with conversion.
907
908 This function *must not* call protect_from_queue
909 except when putting X into an insn (in which case convert_move does it). */
910
911 rtx
912 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
913 {
914 return convert_modes (mode, VOIDmode, x, unsignedp);
915 }
916
917 /* Return an rtx for a value that would result
918 from converting X from mode OLDMODE to mode MODE.
919 Both modes may be floating, or both integer.
920 UNSIGNEDP is nonzero if X is an unsigned value.
921
922 This can be done by referring to a part of X in place
923 or by copying to a new temporary with conversion.
924
925 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
926
927 This function *must not* call protect_from_queue
928 except when putting X into an insn (in which case convert_move does it). */
929
930 rtx
931 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
932 {
933 rtx temp;
934
935 /* If FROM is a SUBREG that indicates that we have already done at least
936 the required extension, strip it. */
937
938 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
939 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
940 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
941 x = gen_lowpart (mode, x);
942
943 if (GET_MODE (x) != VOIDmode)
944 oldmode = GET_MODE (x);
945
946 if (mode == oldmode)
947 return x;
948
949 /* There is one case that we must handle specially: If we are converting
950 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
951 we are to interpret the constant as unsigned, gen_lowpart will do
952 the wrong if the constant appears negative. What we want to do is
953 make the high-order word of the constant zero, not all ones. */
954
955 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
956 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
957 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
958 {
959 HOST_WIDE_INT val = INTVAL (x);
960
961 if (oldmode != VOIDmode
962 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
963 {
964 int width = GET_MODE_BITSIZE (oldmode);
965
966 /* We need to zero extend VAL. */
967 val &= ((HOST_WIDE_INT) 1 << width) - 1;
968 }
969
970 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
971 }
972
973 /* We can do this with a gen_lowpart if both desired and current modes
974 are integer, and this is either a constant integer, a register, or a
975 non-volatile MEM. Except for the constant case where MODE is no
976 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
977
978 if ((GET_CODE (x) == CONST_INT
979 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
980 || (GET_MODE_CLASS (mode) == MODE_INT
981 && GET_MODE_CLASS (oldmode) == MODE_INT
982 && (GET_CODE (x) == CONST_DOUBLE
983 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
984 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
985 && direct_load[(int) mode])
986 || (GET_CODE (x) == REG
987 && (! HARD_REGISTER_P (x)
988 || HARD_REGNO_MODE_OK (REGNO (x), mode))
989 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
990 GET_MODE_BITSIZE (GET_MODE (x)))))))))
991 {
992 /* ?? If we don't know OLDMODE, we have to assume here that
993 X does not need sign- or zero-extension. This may not be
994 the case, but it's the best we can do. */
995 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
996 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
997 {
998 HOST_WIDE_INT val = INTVAL (x);
999 int width = GET_MODE_BITSIZE (oldmode);
1000
1001 /* We must sign or zero-extend in this case. Start by
1002 zero-extending, then sign extend if we need to. */
1003 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1004 if (! unsignedp
1005 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1006 val |= (HOST_WIDE_INT) (-1) << width;
1007
1008 return gen_int_mode (val, mode);
1009 }
1010
1011 return gen_lowpart (mode, x);
1012 }
1013
1014 /* Converting from integer constant into mode is always equivalent to an
1015 subreg operation. */
1016 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1017 {
1018 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
1019 abort ();
1020 return simplify_gen_subreg (mode, x, oldmode, 0);
1021 }
1022
1023 temp = gen_reg_rtx (mode);
1024 convert_move (temp, x, unsignedp);
1025 return temp;
1026 }
1027 \f
1028 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1029 store efficiently. Due to internal GCC limitations, this is
1030 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1031 for an immediate constant. */
1032
1033 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1034
1035 /* Determine whether the LEN bytes can be moved by using several move
1036 instructions. Return nonzero if a call to move_by_pieces should
1037 succeed. */
1038
1039 int
1040 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1041 unsigned int align ATTRIBUTE_UNUSED)
1042 {
1043 return MOVE_BY_PIECES_P (len, align);
1044 }
1045
1046 /* Generate several move instructions to copy LEN bytes from block FROM to
1047 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1048 and TO through protect_from_queue before calling.
1049
1050 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1051 used to push FROM to the stack.
1052
1053 ALIGN is maximum stack alignment we can assume.
1054
1055 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1056 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1057 stpcpy. */
1058
1059 rtx
1060 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1061 unsigned int align, int endp)
1062 {
1063 struct move_by_pieces data;
1064 rtx to_addr, from_addr = XEXP (from, 0);
1065 unsigned int max_size = MOVE_MAX_PIECES + 1;
1066 enum machine_mode mode = VOIDmode, tmode;
1067 enum insn_code icode;
1068
1069 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1070
1071 data.offset = 0;
1072 data.from_addr = from_addr;
1073 if (to)
1074 {
1075 to_addr = XEXP (to, 0);
1076 data.to = to;
1077 data.autinc_to
1078 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1079 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1080 data.reverse
1081 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1082 }
1083 else
1084 {
1085 to_addr = NULL_RTX;
1086 data.to = NULL_RTX;
1087 data.autinc_to = 1;
1088 #ifdef STACK_GROWS_DOWNWARD
1089 data.reverse = 1;
1090 #else
1091 data.reverse = 0;
1092 #endif
1093 }
1094 data.to_addr = to_addr;
1095 data.from = from;
1096 data.autinc_from
1097 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1098 || GET_CODE (from_addr) == POST_INC
1099 || GET_CODE (from_addr) == POST_DEC);
1100
1101 data.explicit_inc_from = 0;
1102 data.explicit_inc_to = 0;
1103 if (data.reverse) data.offset = len;
1104 data.len = len;
1105
1106 /* If copying requires more than two move insns,
1107 copy addresses to registers (to make displacements shorter)
1108 and use post-increment if available. */
1109 if (!(data.autinc_from && data.autinc_to)
1110 && move_by_pieces_ninsns (len, align) > 2)
1111 {
1112 /* Find the mode of the largest move... */
1113 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1114 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1115 if (GET_MODE_SIZE (tmode) < max_size)
1116 mode = tmode;
1117
1118 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1119 {
1120 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1121 data.autinc_from = 1;
1122 data.explicit_inc_from = -1;
1123 }
1124 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1125 {
1126 data.from_addr = copy_addr_to_reg (from_addr);
1127 data.autinc_from = 1;
1128 data.explicit_inc_from = 1;
1129 }
1130 if (!data.autinc_from && CONSTANT_P (from_addr))
1131 data.from_addr = copy_addr_to_reg (from_addr);
1132 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1133 {
1134 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1135 data.autinc_to = 1;
1136 data.explicit_inc_to = -1;
1137 }
1138 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1139 {
1140 data.to_addr = copy_addr_to_reg (to_addr);
1141 data.autinc_to = 1;
1142 data.explicit_inc_to = 1;
1143 }
1144 if (!data.autinc_to && CONSTANT_P (to_addr))
1145 data.to_addr = copy_addr_to_reg (to_addr);
1146 }
1147
1148 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1149 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1150 align = MOVE_MAX * BITS_PER_UNIT;
1151
1152 /* First move what we can in the largest integer mode, then go to
1153 successively smaller modes. */
1154
1155 while (max_size > 1)
1156 {
1157 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1158 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1159 if (GET_MODE_SIZE (tmode) < max_size)
1160 mode = tmode;
1161
1162 if (mode == VOIDmode)
1163 break;
1164
1165 icode = mov_optab->handlers[(int) mode].insn_code;
1166 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1167 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1168
1169 max_size = GET_MODE_SIZE (mode);
1170 }
1171
1172 /* The code above should have handled everything. */
1173 if (data.len > 0)
1174 abort ();
1175
1176 if (endp)
1177 {
1178 rtx to1;
1179
1180 if (data.reverse)
1181 abort ();
1182 if (data.autinc_to)
1183 {
1184 if (endp == 2)
1185 {
1186 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1187 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1188 else
1189 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1190 -1));
1191 }
1192 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1193 data.offset);
1194 }
1195 else
1196 {
1197 if (endp == 2)
1198 --data.offset;
1199 to1 = adjust_address (data.to, QImode, data.offset);
1200 }
1201 return to1;
1202 }
1203 else
1204 return data.to;
1205 }
1206
1207 /* Return number of insns required to move L bytes by pieces.
1208 ALIGN (in bits) is maximum alignment we can assume. */
1209
1210 static unsigned HOST_WIDE_INT
1211 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1212 {
1213 unsigned HOST_WIDE_INT n_insns = 0;
1214 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1215
1216 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1217 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1218 align = MOVE_MAX * BITS_PER_UNIT;
1219
1220 while (max_size > 1)
1221 {
1222 enum machine_mode mode = VOIDmode, tmode;
1223 enum insn_code icode;
1224
1225 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1226 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1227 if (GET_MODE_SIZE (tmode) < max_size)
1228 mode = tmode;
1229
1230 if (mode == VOIDmode)
1231 break;
1232
1233 icode = mov_optab->handlers[(int) mode].insn_code;
1234 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1235 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1236
1237 max_size = GET_MODE_SIZE (mode);
1238 }
1239
1240 if (l)
1241 abort ();
1242 return n_insns;
1243 }
1244
1245 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1246 with move instructions for mode MODE. GENFUN is the gen_... function
1247 to make a move insn for that mode. DATA has all the other info. */
1248
1249 static void
1250 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1251 struct move_by_pieces *data)
1252 {
1253 unsigned int size = GET_MODE_SIZE (mode);
1254 rtx to1 = NULL_RTX, from1;
1255
1256 while (data->len >= size)
1257 {
1258 if (data->reverse)
1259 data->offset -= size;
1260
1261 if (data->to)
1262 {
1263 if (data->autinc_to)
1264 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1265 data->offset);
1266 else
1267 to1 = adjust_address (data->to, mode, data->offset);
1268 }
1269
1270 if (data->autinc_from)
1271 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1272 data->offset);
1273 else
1274 from1 = adjust_address (data->from, mode, data->offset);
1275
1276 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1277 emit_insn (gen_add2_insn (data->to_addr,
1278 GEN_INT (-(HOST_WIDE_INT)size)));
1279 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1280 emit_insn (gen_add2_insn (data->from_addr,
1281 GEN_INT (-(HOST_WIDE_INT)size)));
1282
1283 if (data->to)
1284 emit_insn ((*genfun) (to1, from1));
1285 else
1286 {
1287 #ifdef PUSH_ROUNDING
1288 emit_single_push_insn (mode, from1, NULL);
1289 #else
1290 abort ();
1291 #endif
1292 }
1293
1294 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1295 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1296 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1297 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1298
1299 if (! data->reverse)
1300 data->offset += size;
1301
1302 data->len -= size;
1303 }
1304 }
1305 \f
1306 /* Emit code to move a block Y to a block X. This may be done with
1307 string-move instructions, with multiple scalar move instructions,
1308 or with a library call.
1309
1310 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1311 SIZE is an rtx that says how long they are.
1312 ALIGN is the maximum alignment we can assume they have.
1313 METHOD describes what kind of copy this is, and what mechanisms may be used.
1314
1315 Return the address of the new block, if memcpy is called and returns it,
1316 0 otherwise. */
1317
1318 rtx
1319 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1320 {
1321 bool may_use_call;
1322 rtx retval = 0;
1323 unsigned int align;
1324
1325 switch (method)
1326 {
1327 case BLOCK_OP_NORMAL:
1328 may_use_call = true;
1329 break;
1330
1331 case BLOCK_OP_CALL_PARM:
1332 may_use_call = block_move_libcall_safe_for_call_parm ();
1333
1334 /* Make inhibit_defer_pop nonzero around the library call
1335 to force it to pop the arguments right away. */
1336 NO_DEFER_POP;
1337 break;
1338
1339 case BLOCK_OP_NO_LIBCALL:
1340 may_use_call = false;
1341 break;
1342
1343 default:
1344 abort ();
1345 }
1346
1347 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1348
1349 if (GET_MODE (x) != BLKmode)
1350 abort ();
1351 if (GET_MODE (y) != BLKmode)
1352 abort ();
1353
1354 x = protect_from_queue (x, 1);
1355 y = protect_from_queue (y, 0);
1356 size = protect_from_queue (size, 0);
1357
1358 if (GET_CODE (x) != MEM)
1359 abort ();
1360 if (GET_CODE (y) != MEM)
1361 abort ();
1362 if (size == 0)
1363 abort ();
1364
1365 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1366 can be incorrect is coming from __builtin_memcpy. */
1367 if (GET_CODE (size) == CONST_INT)
1368 {
1369 if (INTVAL (size) == 0)
1370 return 0;
1371
1372 x = shallow_copy_rtx (x);
1373 y = shallow_copy_rtx (y);
1374 set_mem_size (x, size);
1375 set_mem_size (y, size);
1376 }
1377
1378 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1379 move_by_pieces (x, y, INTVAL (size), align, 0);
1380 else if (emit_block_move_via_movstr (x, y, size, align))
1381 ;
1382 else if (may_use_call)
1383 retval = emit_block_move_via_libcall (x, y, size);
1384 else
1385 emit_block_move_via_loop (x, y, size, align);
1386
1387 if (method == BLOCK_OP_CALL_PARM)
1388 OK_DEFER_POP;
1389
1390 return retval;
1391 }
1392
1393 /* A subroutine of emit_block_move. Returns true if calling the
1394 block move libcall will not clobber any parameters which may have
1395 already been placed on the stack. */
1396
1397 static bool
1398 block_move_libcall_safe_for_call_parm (void)
1399 {
1400 /* If arguments are pushed on the stack, then they're safe. */
1401 if (PUSH_ARGS)
1402 return true;
1403
1404 /* If registers go on the stack anyway, any argument is sure to clobber
1405 an outgoing argument. */
1406 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1407 {
1408 tree fn = emit_block_move_libcall_fn (false);
1409 (void) fn;
1410 if (REG_PARM_STACK_SPACE (fn) != 0)
1411 return false;
1412 }
1413 #endif
1414
1415 /* If any argument goes in memory, then it might clobber an outgoing
1416 argument. */
1417 {
1418 CUMULATIVE_ARGS args_so_far;
1419 tree fn, arg;
1420
1421 fn = emit_block_move_libcall_fn (false);
1422 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1423
1424 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1425 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1426 {
1427 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1428 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1429 if (!tmp || !REG_P (tmp))
1430 return false;
1431 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1432 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1433 NULL_TREE, 1))
1434 return false;
1435 #endif
1436 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1437 }
1438 }
1439 return true;
1440 }
1441
1442 /* A subroutine of emit_block_move. Expand a movstr pattern;
1443 return true if successful. */
1444
1445 static bool
1446 emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
1447 {
1448 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1449 enum machine_mode mode;
1450
1451 /* Since this is a move insn, we don't care about volatility. */
1452 volatile_ok = 1;
1453
1454 /* Try the most limited insn first, because there's no point
1455 including more than one in the machine description unless
1456 the more limited one has some advantage. */
1457
1458 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1459 mode = GET_MODE_WIDER_MODE (mode))
1460 {
1461 enum insn_code code = movstr_optab[(int) mode];
1462 insn_operand_predicate_fn pred;
1463
1464 if (code != CODE_FOR_nothing
1465 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1466 here because if SIZE is less than the mode mask, as it is
1467 returned by the macro, it will definitely be less than the
1468 actual mode mask. */
1469 && ((GET_CODE (size) == CONST_INT
1470 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1471 <= (GET_MODE_MASK (mode) >> 1)))
1472 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1473 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1474 || (*pred) (x, BLKmode))
1475 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1476 || (*pred) (y, BLKmode))
1477 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1478 || (*pred) (opalign, VOIDmode)))
1479 {
1480 rtx op2;
1481 rtx last = get_last_insn ();
1482 rtx pat;
1483
1484 op2 = convert_to_mode (mode, size, 1);
1485 pred = insn_data[(int) code].operand[2].predicate;
1486 if (pred != 0 && ! (*pred) (op2, mode))
1487 op2 = copy_to_mode_reg (mode, op2);
1488
1489 /* ??? When called via emit_block_move_for_call, it'd be
1490 nice if there were some way to inform the backend, so
1491 that it doesn't fail the expansion because it thinks
1492 emitting the libcall would be more efficient. */
1493
1494 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1495 if (pat)
1496 {
1497 emit_insn (pat);
1498 volatile_ok = 0;
1499 return true;
1500 }
1501 else
1502 delete_insns_since (last);
1503 }
1504 }
1505
1506 volatile_ok = 0;
1507 return false;
1508 }
1509
1510 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1511 Return the return value from memcpy, 0 otherwise. */
1512
1513 static rtx
1514 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1515 {
1516 rtx dst_addr, src_addr;
1517 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1518 enum machine_mode size_mode;
1519 rtx retval;
1520
1521 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1522
1523 It is unsafe to save the value generated by protect_from_queue and reuse
1524 it later. Consider what happens if emit_queue is called before the
1525 return value from protect_from_queue is used.
1526
1527 Expansion of the CALL_EXPR below will call emit_queue before we are
1528 finished emitting RTL for argument setup. So if we are not careful we
1529 could get the wrong value for an argument.
1530
1531 To avoid this problem we go ahead and emit code to copy the addresses of
1532 DST and SRC and SIZE into new pseudos. We can then place those new
1533 pseudos into an RTL_EXPR and use them later, even after a call to
1534 emit_queue.
1535
1536 Note this is not strictly needed for library calls since they do not call
1537 emit_queue before loading their arguments. However, we may need to have
1538 library calls call emit_queue in the future since failing to do so could
1539 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1540 arguments in registers. */
1541
1542 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1543 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1544
1545 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1546 src_addr = convert_memory_address (ptr_mode, src_addr);
1547
1548 dst_tree = make_tree (ptr_type_node, dst_addr);
1549 src_tree = make_tree (ptr_type_node, src_addr);
1550
1551 if (TARGET_MEM_FUNCTIONS)
1552 size_mode = TYPE_MODE (sizetype);
1553 else
1554 size_mode = TYPE_MODE (unsigned_type_node);
1555
1556 size = convert_to_mode (size_mode, size, 1);
1557 size = copy_to_mode_reg (size_mode, size);
1558
1559 /* It is incorrect to use the libcall calling conventions to call
1560 memcpy in this context. This could be a user call to memcpy and
1561 the user may wish to examine the return value from memcpy. For
1562 targets where libcalls and normal calls have different conventions
1563 for returning pointers, we could end up generating incorrect code.
1564
1565 For convenience, we generate the call to bcopy this way as well. */
1566
1567 if (TARGET_MEM_FUNCTIONS)
1568 size_tree = make_tree (sizetype, size);
1569 else
1570 size_tree = make_tree (unsigned_type_node, size);
1571
1572 fn = emit_block_move_libcall_fn (true);
1573 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1574 if (TARGET_MEM_FUNCTIONS)
1575 {
1576 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1577 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1578 }
1579 else
1580 {
1581 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1582 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1583 }
1584
1585 /* Now we have to build up the CALL_EXPR itself. */
1586 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1587 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1588 call_expr, arg_list, NULL_TREE);
1589
1590 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1591
1592 /* If we are initializing a readonly value, show the above call clobbered
1593 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1594 the delay slot scheduler might overlook conflicts and take nasty
1595 decisions. */
1596 if (RTX_UNCHANGING_P (dst))
1597 add_function_usage_to
1598 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1599 gen_rtx_CLOBBER (VOIDmode, dst),
1600 NULL_RTX));
1601
1602 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
1603 }
1604
1605 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1606 for the function we use for block copies. The first time FOR_CALL
1607 is true, we call assemble_external. */
1608
1609 static GTY(()) tree block_move_fn;
1610
1611 void
1612 init_block_move_fn (const char *asmspec)
1613 {
1614 if (!block_move_fn)
1615 {
1616 tree args, fn;
1617
1618 if (TARGET_MEM_FUNCTIONS)
1619 {
1620 fn = get_identifier ("memcpy");
1621 args = build_function_type_list (ptr_type_node, ptr_type_node,
1622 const_ptr_type_node, sizetype,
1623 NULL_TREE);
1624 }
1625 else
1626 {
1627 fn = get_identifier ("bcopy");
1628 args = build_function_type_list (void_type_node, const_ptr_type_node,
1629 ptr_type_node, unsigned_type_node,
1630 NULL_TREE);
1631 }
1632
1633 fn = build_decl (FUNCTION_DECL, fn, args);
1634 DECL_EXTERNAL (fn) = 1;
1635 TREE_PUBLIC (fn) = 1;
1636 DECL_ARTIFICIAL (fn) = 1;
1637 TREE_NOTHROW (fn) = 1;
1638
1639 block_move_fn = fn;
1640 }
1641
1642 if (asmspec)
1643 {
1644 SET_DECL_RTL (block_move_fn, NULL_RTX);
1645 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1646 }
1647 }
1648
1649 static tree
1650 emit_block_move_libcall_fn (int for_call)
1651 {
1652 static bool emitted_extern;
1653
1654 if (!block_move_fn)
1655 init_block_move_fn (NULL);
1656
1657 if (for_call && !emitted_extern)
1658 {
1659 emitted_extern = true;
1660 make_decl_rtl (block_move_fn, NULL);
1661 assemble_external (block_move_fn);
1662 }
1663
1664 return block_move_fn;
1665 }
1666
1667 /* A subroutine of emit_block_move. Copy the data via an explicit
1668 loop. This is used only when libcalls are forbidden. */
1669 /* ??? It'd be nice to copy in hunks larger than QImode. */
1670
1671 static void
1672 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1673 unsigned int align ATTRIBUTE_UNUSED)
1674 {
1675 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1676 enum machine_mode iter_mode;
1677
1678 iter_mode = GET_MODE (size);
1679 if (iter_mode == VOIDmode)
1680 iter_mode = word_mode;
1681
1682 top_label = gen_label_rtx ();
1683 cmp_label = gen_label_rtx ();
1684 iter = gen_reg_rtx (iter_mode);
1685
1686 emit_move_insn (iter, const0_rtx);
1687
1688 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1689 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1690 do_pending_stack_adjust ();
1691
1692 emit_note (NOTE_INSN_LOOP_BEG);
1693
1694 emit_jump (cmp_label);
1695 emit_label (top_label);
1696
1697 tmp = convert_modes (Pmode, iter_mode, iter, true);
1698 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1699 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1700 x = change_address (x, QImode, x_addr);
1701 y = change_address (y, QImode, y_addr);
1702
1703 emit_move_insn (x, y);
1704
1705 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1706 true, OPTAB_LIB_WIDEN);
1707 if (tmp != iter)
1708 emit_move_insn (iter, tmp);
1709
1710 emit_note (NOTE_INSN_LOOP_CONT);
1711 emit_label (cmp_label);
1712
1713 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1714 true, top_label);
1715
1716 emit_note (NOTE_INSN_LOOP_END);
1717 }
1718 \f
1719 /* Copy all or part of a value X into registers starting at REGNO.
1720 The number of registers to be filled is NREGS. */
1721
1722 void
1723 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1724 {
1725 int i;
1726 #ifdef HAVE_load_multiple
1727 rtx pat;
1728 rtx last;
1729 #endif
1730
1731 if (nregs == 0)
1732 return;
1733
1734 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1735 x = validize_mem (force_const_mem (mode, x));
1736
1737 /* See if the machine can do this with a load multiple insn. */
1738 #ifdef HAVE_load_multiple
1739 if (HAVE_load_multiple)
1740 {
1741 last = get_last_insn ();
1742 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1743 GEN_INT (nregs));
1744 if (pat)
1745 {
1746 emit_insn (pat);
1747 return;
1748 }
1749 else
1750 delete_insns_since (last);
1751 }
1752 #endif
1753
1754 for (i = 0; i < nregs; i++)
1755 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1756 operand_subword_force (x, i, mode));
1757 }
1758
1759 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1760 The number of registers to be filled is NREGS. */
1761
1762 void
1763 move_block_from_reg (int regno, rtx x, int nregs)
1764 {
1765 int i;
1766
1767 if (nregs == 0)
1768 return;
1769
1770 /* See if the machine can do this with a store multiple insn. */
1771 #ifdef HAVE_store_multiple
1772 if (HAVE_store_multiple)
1773 {
1774 rtx last = get_last_insn ();
1775 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1776 GEN_INT (nregs));
1777 if (pat)
1778 {
1779 emit_insn (pat);
1780 return;
1781 }
1782 else
1783 delete_insns_since (last);
1784 }
1785 #endif
1786
1787 for (i = 0; i < nregs; i++)
1788 {
1789 rtx tem = operand_subword (x, i, 1, BLKmode);
1790
1791 if (tem == 0)
1792 abort ();
1793
1794 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1795 }
1796 }
1797
1798 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1799 ORIG, where ORIG is a non-consecutive group of registers represented by
1800 a PARALLEL. The clone is identical to the original except in that the
1801 original set of registers is replaced by a new set of pseudo registers.
1802 The new set has the same modes as the original set. */
1803
1804 rtx
1805 gen_group_rtx (rtx orig)
1806 {
1807 int i, length;
1808 rtx *tmps;
1809
1810 if (GET_CODE (orig) != PARALLEL)
1811 abort ();
1812
1813 length = XVECLEN (orig, 0);
1814 tmps = alloca (sizeof (rtx) * length);
1815
1816 /* Skip a NULL entry in first slot. */
1817 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1818
1819 if (i)
1820 tmps[0] = 0;
1821
1822 for (; i < length; i++)
1823 {
1824 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1825 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1826
1827 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1828 }
1829
1830 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1831 }
1832
1833 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1834 where DST is non-consecutive registers represented by a PARALLEL.
1835 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1836 if not known. */
1837
1838 void
1839 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1840 {
1841 rtx *tmps, src;
1842 int start, i;
1843
1844 if (GET_CODE (dst) != PARALLEL)
1845 abort ();
1846
1847 /* Check for a NULL entry, used to indicate that the parameter goes
1848 both on the stack and in registers. */
1849 if (XEXP (XVECEXP (dst, 0, 0), 0))
1850 start = 0;
1851 else
1852 start = 1;
1853
1854 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1855
1856 /* Process the pieces. */
1857 for (i = start; i < XVECLEN (dst, 0); i++)
1858 {
1859 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1860 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1861 unsigned int bytelen = GET_MODE_SIZE (mode);
1862 int shift = 0;
1863
1864 /* Handle trailing fragments that run over the size of the struct. */
1865 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1866 {
1867 /* Arrange to shift the fragment to where it belongs.
1868 extract_bit_field loads to the lsb of the reg. */
1869 if (
1870 #ifdef BLOCK_REG_PADDING
1871 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1872 == (BYTES_BIG_ENDIAN ? upward : downward)
1873 #else
1874 BYTES_BIG_ENDIAN
1875 #endif
1876 )
1877 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1878 bytelen = ssize - bytepos;
1879 if (bytelen <= 0)
1880 abort ();
1881 }
1882
1883 /* If we won't be loading directly from memory, protect the real source
1884 from strange tricks we might play; but make sure that the source can
1885 be loaded directly into the destination. */
1886 src = orig_src;
1887 if (GET_CODE (orig_src) != MEM
1888 && (!CONSTANT_P (orig_src)
1889 || (GET_MODE (orig_src) != mode
1890 && GET_MODE (orig_src) != VOIDmode)))
1891 {
1892 if (GET_MODE (orig_src) == VOIDmode)
1893 src = gen_reg_rtx (mode);
1894 else
1895 src = gen_reg_rtx (GET_MODE (orig_src));
1896
1897 emit_move_insn (src, orig_src);
1898 }
1899
1900 /* Optimize the access just a bit. */
1901 if (GET_CODE (src) == MEM
1902 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1903 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1904 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1905 && bytelen == GET_MODE_SIZE (mode))
1906 {
1907 tmps[i] = gen_reg_rtx (mode);
1908 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1909 }
1910 else if (GET_CODE (src) == CONCAT)
1911 {
1912 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1913 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1914
1915 if ((bytepos == 0 && bytelen == slen0)
1916 || (bytepos != 0 && bytepos + bytelen <= slen))
1917 {
1918 /* The following assumes that the concatenated objects all
1919 have the same size. In this case, a simple calculation
1920 can be used to determine the object and the bit field
1921 to be extracted. */
1922 tmps[i] = XEXP (src, bytepos / slen0);
1923 if (! CONSTANT_P (tmps[i])
1924 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
1925 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1926 (bytepos % slen0) * BITS_PER_UNIT,
1927 1, NULL_RTX, mode, mode, ssize);
1928 }
1929 else if (bytepos == 0)
1930 {
1931 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1932 emit_move_insn (mem, src);
1933 tmps[i] = adjust_address (mem, mode, 0);
1934 }
1935 else
1936 abort ();
1937 }
1938 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1939 SIMD register, which is currently broken. While we get GCC
1940 to emit proper RTL for these cases, let's dump to memory. */
1941 else if (VECTOR_MODE_P (GET_MODE (dst))
1942 && GET_CODE (src) == REG)
1943 {
1944 int slen = GET_MODE_SIZE (GET_MODE (src));
1945 rtx mem;
1946
1947 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1948 emit_move_insn (mem, src);
1949 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1950 }
1951 else if (CONSTANT_P (src)
1952 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
1953 tmps[i] = src;
1954 else
1955 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1956 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1957 mode, mode, ssize);
1958
1959 if (shift)
1960 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1961 tmps[i], 0, OPTAB_WIDEN);
1962 }
1963
1964 emit_queue ();
1965
1966 /* Copy the extracted pieces into the proper (probable) hard regs. */
1967 for (i = start; i < XVECLEN (dst, 0); i++)
1968 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1969 }
1970
1971 /* Emit code to move a block SRC to block DST, where SRC and DST are
1972 non-consecutive groups of registers, each represented by a PARALLEL. */
1973
1974 void
1975 emit_group_move (rtx dst, rtx src)
1976 {
1977 int i;
1978
1979 if (GET_CODE (src) != PARALLEL
1980 || GET_CODE (dst) != PARALLEL
1981 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1982 abort ();
1983
1984 /* Skip first entry if NULL. */
1985 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1986 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1987 XEXP (XVECEXP (src, 0, i), 0));
1988 }
1989
1990 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1991 where SRC is non-consecutive registers represented by a PARALLEL.
1992 SSIZE represents the total size of block ORIG_DST, or -1 if not
1993 known. */
1994
1995 void
1996 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1997 {
1998 rtx *tmps, dst;
1999 int start, i;
2000
2001 if (GET_CODE (src) != PARALLEL)
2002 abort ();
2003
2004 /* Check for a NULL entry, used to indicate that the parameter goes
2005 both on the stack and in registers. */
2006 if (XEXP (XVECEXP (src, 0, 0), 0))
2007 start = 0;
2008 else
2009 start = 1;
2010
2011 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
2012
2013 /* Copy the (probable) hard regs into pseudos. */
2014 for (i = start; i < XVECLEN (src, 0); i++)
2015 {
2016 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2017 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2018 emit_move_insn (tmps[i], reg);
2019 }
2020 emit_queue ();
2021
2022 /* If we won't be storing directly into memory, protect the real destination
2023 from strange tricks we might play. */
2024 dst = orig_dst;
2025 if (GET_CODE (dst) == PARALLEL)
2026 {
2027 rtx temp;
2028
2029 /* We can get a PARALLEL dst if there is a conditional expression in
2030 a return statement. In that case, the dst and src are the same,
2031 so no action is necessary. */
2032 if (rtx_equal_p (dst, src))
2033 return;
2034
2035 /* It is unclear if we can ever reach here, but we may as well handle
2036 it. Allocate a temporary, and split this into a store/load to/from
2037 the temporary. */
2038
2039 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2040 emit_group_store (temp, src, type, ssize);
2041 emit_group_load (dst, temp, type, ssize);
2042 return;
2043 }
2044 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2045 {
2046 dst = gen_reg_rtx (GET_MODE (orig_dst));
2047 /* Make life a bit easier for combine. */
2048 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2049 }
2050
2051 /* Process the pieces. */
2052 for (i = start; i < XVECLEN (src, 0); i++)
2053 {
2054 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2055 enum machine_mode mode = GET_MODE (tmps[i]);
2056 unsigned int bytelen = GET_MODE_SIZE (mode);
2057 rtx dest = dst;
2058
2059 /* Handle trailing fragments that run over the size of the struct. */
2060 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2061 {
2062 /* store_bit_field always takes its value from the lsb.
2063 Move the fragment to the lsb if it's not already there. */
2064 if (
2065 #ifdef BLOCK_REG_PADDING
2066 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2067 == (BYTES_BIG_ENDIAN ? upward : downward)
2068 #else
2069 BYTES_BIG_ENDIAN
2070 #endif
2071 )
2072 {
2073 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2074 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2075 tmps[i], 0, OPTAB_WIDEN);
2076 }
2077 bytelen = ssize - bytepos;
2078 }
2079
2080 if (GET_CODE (dst) == CONCAT)
2081 {
2082 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2083 dest = XEXP (dst, 0);
2084 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2085 {
2086 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2087 dest = XEXP (dst, 1);
2088 }
2089 else if (bytepos == 0 && XVECLEN (src, 0))
2090 {
2091 dest = assign_stack_temp (GET_MODE (dest),
2092 GET_MODE_SIZE (GET_MODE (dest)), 0);
2093 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2094 tmps[i]);
2095 dst = dest;
2096 break;
2097 }
2098 else
2099 abort ();
2100 }
2101
2102 /* Optimize the access just a bit. */
2103 if (GET_CODE (dest) == MEM
2104 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2105 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2106 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2107 && bytelen == GET_MODE_SIZE (mode))
2108 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2109 else
2110 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2111 mode, tmps[i], ssize);
2112 }
2113
2114 emit_queue ();
2115
2116 /* Copy from the pseudo into the (probable) hard reg. */
2117 if (orig_dst != dst)
2118 emit_move_insn (orig_dst, dst);
2119 }
2120
2121 /* Generate code to copy a BLKmode object of TYPE out of a
2122 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2123 is null, a stack temporary is created. TGTBLK is returned.
2124
2125 The purpose of this routine is to handle functions that return
2126 BLKmode structures in registers. Some machines (the PA for example)
2127 want to return all small structures in registers regardless of the
2128 structure's alignment. */
2129
2130 rtx
2131 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2132 {
2133 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2134 rtx src = NULL, dst = NULL;
2135 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2136 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2137
2138 if (tgtblk == 0)
2139 {
2140 tgtblk = assign_temp (build_qualified_type (type,
2141 (TYPE_QUALS (type)
2142 | TYPE_QUAL_CONST)),
2143 0, 1, 1);
2144 preserve_temp_slots (tgtblk);
2145 }
2146
2147 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2148 into a new pseudo which is a full word. */
2149
2150 if (GET_MODE (srcreg) != BLKmode
2151 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2152 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2153
2154 /* If the structure doesn't take up a whole number of words, see whether
2155 SRCREG is padded on the left or on the right. If it's on the left,
2156 set PADDING_CORRECTION to the number of bits to skip.
2157
2158 In most ABIs, the structure will be returned at the least end of
2159 the register, which translates to right padding on little-endian
2160 targets and left padding on big-endian targets. The opposite
2161 holds if the structure is returned at the most significant
2162 end of the register. */
2163 if (bytes % UNITS_PER_WORD != 0
2164 && (targetm.calls.return_in_msb (type)
2165 ? !BYTES_BIG_ENDIAN
2166 : BYTES_BIG_ENDIAN))
2167 padding_correction
2168 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2169
2170 /* Copy the structure BITSIZE bites at a time.
2171
2172 We could probably emit more efficient code for machines which do not use
2173 strict alignment, but it doesn't seem worth the effort at the current
2174 time. */
2175 for (bitpos = 0, xbitpos = padding_correction;
2176 bitpos < bytes * BITS_PER_UNIT;
2177 bitpos += bitsize, xbitpos += bitsize)
2178 {
2179 /* We need a new source operand each time xbitpos is on a
2180 word boundary and when xbitpos == padding_correction
2181 (the first time through). */
2182 if (xbitpos % BITS_PER_WORD == 0
2183 || xbitpos == padding_correction)
2184 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2185 GET_MODE (srcreg));
2186
2187 /* We need a new destination operand each time bitpos is on
2188 a word boundary. */
2189 if (bitpos % BITS_PER_WORD == 0)
2190 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2191
2192 /* Use xbitpos for the source extraction (right justified) and
2193 xbitpos for the destination store (left justified). */
2194 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2195 extract_bit_field (src, bitsize,
2196 xbitpos % BITS_PER_WORD, 1,
2197 NULL_RTX, word_mode, word_mode,
2198 BITS_PER_WORD),
2199 BITS_PER_WORD);
2200 }
2201
2202 return tgtblk;
2203 }
2204
2205 /* Add a USE expression for REG to the (possibly empty) list pointed
2206 to by CALL_FUSAGE. REG must denote a hard register. */
2207
2208 void
2209 use_reg (rtx *call_fusage, rtx reg)
2210 {
2211 if (GET_CODE (reg) != REG
2212 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2213 abort ();
2214
2215 *call_fusage
2216 = gen_rtx_EXPR_LIST (VOIDmode,
2217 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2218 }
2219
2220 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2221 starting at REGNO. All of these registers must be hard registers. */
2222
2223 void
2224 use_regs (rtx *call_fusage, int regno, int nregs)
2225 {
2226 int i;
2227
2228 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2229 abort ();
2230
2231 for (i = 0; i < nregs; i++)
2232 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2233 }
2234
2235 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2236 PARALLEL REGS. This is for calls that pass values in multiple
2237 non-contiguous locations. The Irix 6 ABI has examples of this. */
2238
2239 void
2240 use_group_regs (rtx *call_fusage, rtx regs)
2241 {
2242 int i;
2243
2244 for (i = 0; i < XVECLEN (regs, 0); i++)
2245 {
2246 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2247
2248 /* A NULL entry means the parameter goes both on the stack and in
2249 registers. This can also be a MEM for targets that pass values
2250 partially on the stack and partially in registers. */
2251 if (reg != 0 && GET_CODE (reg) == REG)
2252 use_reg (call_fusage, reg);
2253 }
2254 }
2255 \f
2256
2257 /* Determine whether the LEN bytes generated by CONSTFUN can be
2258 stored to memory using several move instructions. CONSTFUNDATA is
2259 a pointer which will be passed as argument in every CONSTFUN call.
2260 ALIGN is maximum alignment we can assume. Return nonzero if a
2261 call to store_by_pieces should succeed. */
2262
2263 int
2264 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2265 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2266 void *constfundata, unsigned int align)
2267 {
2268 unsigned HOST_WIDE_INT max_size, l;
2269 HOST_WIDE_INT offset = 0;
2270 enum machine_mode mode, tmode;
2271 enum insn_code icode;
2272 int reverse;
2273 rtx cst;
2274
2275 if (len == 0)
2276 return 1;
2277
2278 if (! STORE_BY_PIECES_P (len, align))
2279 return 0;
2280
2281 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2282 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2283 align = MOVE_MAX * BITS_PER_UNIT;
2284
2285 /* We would first store what we can in the largest integer mode, then go to
2286 successively smaller modes. */
2287
2288 for (reverse = 0;
2289 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2290 reverse++)
2291 {
2292 l = len;
2293 mode = VOIDmode;
2294 max_size = STORE_MAX_PIECES + 1;
2295 while (max_size > 1)
2296 {
2297 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2298 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2299 if (GET_MODE_SIZE (tmode) < max_size)
2300 mode = tmode;
2301
2302 if (mode == VOIDmode)
2303 break;
2304
2305 icode = mov_optab->handlers[(int) mode].insn_code;
2306 if (icode != CODE_FOR_nothing
2307 && align >= GET_MODE_ALIGNMENT (mode))
2308 {
2309 unsigned int size = GET_MODE_SIZE (mode);
2310
2311 while (l >= size)
2312 {
2313 if (reverse)
2314 offset -= size;
2315
2316 cst = (*constfun) (constfundata, offset, mode);
2317 if (!LEGITIMATE_CONSTANT_P (cst))
2318 return 0;
2319
2320 if (!reverse)
2321 offset += size;
2322
2323 l -= size;
2324 }
2325 }
2326
2327 max_size = GET_MODE_SIZE (mode);
2328 }
2329
2330 /* The code above should have handled everything. */
2331 if (l != 0)
2332 abort ();
2333 }
2334
2335 return 1;
2336 }
2337
2338 /* Generate several move instructions to store LEN bytes generated by
2339 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2340 pointer which will be passed as argument in every CONSTFUN call.
2341 ALIGN is maximum alignment we can assume.
2342 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2343 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2344 stpcpy. */
2345
2346 rtx
2347 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2348 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2349 void *constfundata, unsigned int align, int endp)
2350 {
2351 struct store_by_pieces data;
2352
2353 if (len == 0)
2354 {
2355 if (endp == 2)
2356 abort ();
2357 return to;
2358 }
2359
2360 if (! STORE_BY_PIECES_P (len, align))
2361 abort ();
2362 to = protect_from_queue (to, 1);
2363 data.constfun = constfun;
2364 data.constfundata = constfundata;
2365 data.len = len;
2366 data.to = to;
2367 store_by_pieces_1 (&data, align);
2368 if (endp)
2369 {
2370 rtx to1;
2371
2372 if (data.reverse)
2373 abort ();
2374 if (data.autinc_to)
2375 {
2376 if (endp == 2)
2377 {
2378 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2379 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2380 else
2381 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2382 -1));
2383 }
2384 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2385 data.offset);
2386 }
2387 else
2388 {
2389 if (endp == 2)
2390 --data.offset;
2391 to1 = adjust_address (data.to, QImode, data.offset);
2392 }
2393 return to1;
2394 }
2395 else
2396 return data.to;
2397 }
2398
2399 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2400 rtx with BLKmode). The caller must pass TO through protect_from_queue
2401 before calling. ALIGN is maximum alignment we can assume. */
2402
2403 static void
2404 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2405 {
2406 struct store_by_pieces data;
2407
2408 if (len == 0)
2409 return;
2410
2411 data.constfun = clear_by_pieces_1;
2412 data.constfundata = NULL;
2413 data.len = len;
2414 data.to = to;
2415 store_by_pieces_1 (&data, align);
2416 }
2417
2418 /* Callback routine for clear_by_pieces.
2419 Return const0_rtx unconditionally. */
2420
2421 static rtx
2422 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2423 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2424 enum machine_mode mode ATTRIBUTE_UNUSED)
2425 {
2426 return const0_rtx;
2427 }
2428
2429 /* Subroutine of clear_by_pieces and store_by_pieces.
2430 Generate several move instructions to store LEN bytes of block TO. (A MEM
2431 rtx with BLKmode). The caller must pass TO through protect_from_queue
2432 before calling. ALIGN is maximum alignment we can assume. */
2433
2434 static void
2435 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2436 unsigned int align ATTRIBUTE_UNUSED)
2437 {
2438 rtx to_addr = XEXP (data->to, 0);
2439 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2440 enum machine_mode mode = VOIDmode, tmode;
2441 enum insn_code icode;
2442
2443 data->offset = 0;
2444 data->to_addr = to_addr;
2445 data->autinc_to
2446 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2447 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2448
2449 data->explicit_inc_to = 0;
2450 data->reverse
2451 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2452 if (data->reverse)
2453 data->offset = data->len;
2454
2455 /* If storing requires more than two move insns,
2456 copy addresses to registers (to make displacements shorter)
2457 and use post-increment if available. */
2458 if (!data->autinc_to
2459 && move_by_pieces_ninsns (data->len, align) > 2)
2460 {
2461 /* Determine the main mode we'll be using. */
2462 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2463 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2464 if (GET_MODE_SIZE (tmode) < max_size)
2465 mode = tmode;
2466
2467 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2468 {
2469 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2470 data->autinc_to = 1;
2471 data->explicit_inc_to = -1;
2472 }
2473
2474 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2475 && ! data->autinc_to)
2476 {
2477 data->to_addr = copy_addr_to_reg (to_addr);
2478 data->autinc_to = 1;
2479 data->explicit_inc_to = 1;
2480 }
2481
2482 if ( !data->autinc_to && CONSTANT_P (to_addr))
2483 data->to_addr = copy_addr_to_reg (to_addr);
2484 }
2485
2486 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2487 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2488 align = MOVE_MAX * BITS_PER_UNIT;
2489
2490 /* First store what we can in the largest integer mode, then go to
2491 successively smaller modes. */
2492
2493 while (max_size > 1)
2494 {
2495 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2496 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2497 if (GET_MODE_SIZE (tmode) < max_size)
2498 mode = tmode;
2499
2500 if (mode == VOIDmode)
2501 break;
2502
2503 icode = mov_optab->handlers[(int) mode].insn_code;
2504 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2505 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2506
2507 max_size = GET_MODE_SIZE (mode);
2508 }
2509
2510 /* The code above should have handled everything. */
2511 if (data->len != 0)
2512 abort ();
2513 }
2514
2515 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2516 with move instructions for mode MODE. GENFUN is the gen_... function
2517 to make a move insn for that mode. DATA has all the other info. */
2518
2519 static void
2520 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2521 struct store_by_pieces *data)
2522 {
2523 unsigned int size = GET_MODE_SIZE (mode);
2524 rtx to1, cst;
2525
2526 while (data->len >= size)
2527 {
2528 if (data->reverse)
2529 data->offset -= size;
2530
2531 if (data->autinc_to)
2532 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2533 data->offset);
2534 else
2535 to1 = adjust_address (data->to, mode, data->offset);
2536
2537 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2538 emit_insn (gen_add2_insn (data->to_addr,
2539 GEN_INT (-(HOST_WIDE_INT) size)));
2540
2541 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2542 emit_insn ((*genfun) (to1, cst));
2543
2544 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2545 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2546
2547 if (! data->reverse)
2548 data->offset += size;
2549
2550 data->len -= size;
2551 }
2552 }
2553 \f
2554 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2555 its length in bytes. */
2556
2557 rtx
2558 clear_storage (rtx object, rtx size)
2559 {
2560 rtx retval = 0;
2561 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2562 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2563
2564 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2565 just move a zero. Otherwise, do this a piece at a time. */
2566 if (GET_MODE (object) != BLKmode
2567 && GET_CODE (size) == CONST_INT
2568 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2569 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2570 else
2571 {
2572 object = protect_from_queue (object, 1);
2573 size = protect_from_queue (size, 0);
2574
2575 if (size == const0_rtx)
2576 ;
2577 else if (GET_CODE (size) == CONST_INT
2578 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2579 clear_by_pieces (object, INTVAL (size), align);
2580 else if (clear_storage_via_clrstr (object, size, align))
2581 ;
2582 else
2583 retval = clear_storage_via_libcall (object, size);
2584 }
2585
2586 return retval;
2587 }
2588
2589 /* A subroutine of clear_storage. Expand a clrstr pattern;
2590 return true if successful. */
2591
2592 static bool
2593 clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
2594 {
2595 /* Try the most limited insn first, because there's no point
2596 including more than one in the machine description unless
2597 the more limited one has some advantage. */
2598
2599 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2600 enum machine_mode mode;
2601
2602 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2603 mode = GET_MODE_WIDER_MODE (mode))
2604 {
2605 enum insn_code code = clrstr_optab[(int) mode];
2606 insn_operand_predicate_fn pred;
2607
2608 if (code != CODE_FOR_nothing
2609 /* We don't need MODE to be narrower than
2610 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2611 the mode mask, as it is returned by the macro, it will
2612 definitely be less than the actual mode mask. */
2613 && ((GET_CODE (size) == CONST_INT
2614 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2615 <= (GET_MODE_MASK (mode) >> 1)))
2616 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2617 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2618 || (*pred) (object, BLKmode))
2619 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2620 || (*pred) (opalign, VOIDmode)))
2621 {
2622 rtx op1;
2623 rtx last = get_last_insn ();
2624 rtx pat;
2625
2626 op1 = convert_to_mode (mode, size, 1);
2627 pred = insn_data[(int) code].operand[1].predicate;
2628 if (pred != 0 && ! (*pred) (op1, mode))
2629 op1 = copy_to_mode_reg (mode, op1);
2630
2631 pat = GEN_FCN ((int) code) (object, op1, opalign);
2632 if (pat)
2633 {
2634 emit_insn (pat);
2635 return true;
2636 }
2637 else
2638 delete_insns_since (last);
2639 }
2640 }
2641
2642 return false;
2643 }
2644
2645 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2646 Return the return value of memset, 0 otherwise. */
2647
2648 static rtx
2649 clear_storage_via_libcall (rtx object, rtx size)
2650 {
2651 tree call_expr, arg_list, fn, object_tree, size_tree;
2652 enum machine_mode size_mode;
2653 rtx retval;
2654
2655 /* OBJECT or SIZE may have been passed through protect_from_queue.
2656
2657 It is unsafe to save the value generated by protect_from_queue
2658 and reuse it later. Consider what happens if emit_queue is
2659 called before the return value from protect_from_queue is used.
2660
2661 Expansion of the CALL_EXPR below will call emit_queue before
2662 we are finished emitting RTL for argument setup. So if we are
2663 not careful we could get the wrong value for an argument.
2664
2665 To avoid this problem we go ahead and emit code to copy OBJECT
2666 and SIZE into new pseudos. We can then place those new pseudos
2667 into an RTL_EXPR and use them later, even after a call to
2668 emit_queue.
2669
2670 Note this is not strictly needed for library calls since they
2671 do not call emit_queue before loading their arguments. However,
2672 we may need to have library calls call emit_queue in the future
2673 since failing to do so could cause problems for targets which
2674 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2675
2676 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2677
2678 if (TARGET_MEM_FUNCTIONS)
2679 size_mode = TYPE_MODE (sizetype);
2680 else
2681 size_mode = TYPE_MODE (unsigned_type_node);
2682 size = convert_to_mode (size_mode, size, 1);
2683 size = copy_to_mode_reg (size_mode, size);
2684
2685 /* It is incorrect to use the libcall calling conventions to call
2686 memset in this context. This could be a user call to memset and
2687 the user may wish to examine the return value from memset. For
2688 targets where libcalls and normal calls have different conventions
2689 for returning pointers, we could end up generating incorrect code.
2690
2691 For convenience, we generate the call to bzero this way as well. */
2692
2693 object_tree = make_tree (ptr_type_node, object);
2694 if (TARGET_MEM_FUNCTIONS)
2695 size_tree = make_tree (sizetype, size);
2696 else
2697 size_tree = make_tree (unsigned_type_node, size);
2698
2699 fn = clear_storage_libcall_fn (true);
2700 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2701 if (TARGET_MEM_FUNCTIONS)
2702 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2703 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2704
2705 /* Now we have to build up the CALL_EXPR itself. */
2706 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2707 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2708 call_expr, arg_list, NULL_TREE);
2709
2710 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2711
2712 /* If we are initializing a readonly value, show the above call
2713 clobbered it. Otherwise, a load from it may erroneously be
2714 hoisted from a loop. */
2715 if (RTX_UNCHANGING_P (object))
2716 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2717
2718 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
2719 }
2720
2721 /* A subroutine of clear_storage_via_libcall. Create the tree node
2722 for the function we use for block clears. The first time FOR_CALL
2723 is true, we call assemble_external. */
2724
2725 static GTY(()) tree block_clear_fn;
2726
2727 void
2728 init_block_clear_fn (const char *asmspec)
2729 {
2730 if (!block_clear_fn)
2731 {
2732 tree fn, args;
2733
2734 if (TARGET_MEM_FUNCTIONS)
2735 {
2736 fn = get_identifier ("memset");
2737 args = build_function_type_list (ptr_type_node, ptr_type_node,
2738 integer_type_node, sizetype,
2739 NULL_TREE);
2740 }
2741 else
2742 {
2743 fn = get_identifier ("bzero");
2744 args = build_function_type_list (void_type_node, ptr_type_node,
2745 unsigned_type_node, NULL_TREE);
2746 }
2747
2748 fn = build_decl (FUNCTION_DECL, fn, args);
2749 DECL_EXTERNAL (fn) = 1;
2750 TREE_PUBLIC (fn) = 1;
2751 DECL_ARTIFICIAL (fn) = 1;
2752 TREE_NOTHROW (fn) = 1;
2753
2754 block_clear_fn = fn;
2755 }
2756
2757 if (asmspec)
2758 {
2759 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2760 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2761 }
2762 }
2763
2764 static tree
2765 clear_storage_libcall_fn (int for_call)
2766 {
2767 static bool emitted_extern;
2768
2769 if (!block_clear_fn)
2770 init_block_clear_fn (NULL);
2771
2772 if (for_call && !emitted_extern)
2773 {
2774 emitted_extern = true;
2775 make_decl_rtl (block_clear_fn, NULL);
2776 assemble_external (block_clear_fn);
2777 }
2778
2779 return block_clear_fn;
2780 }
2781 \f
2782 /* Generate code to copy Y into X.
2783 Both Y and X must have the same mode, except that
2784 Y can be a constant with VOIDmode.
2785 This mode cannot be BLKmode; use emit_block_move for that.
2786
2787 Return the last instruction emitted. */
2788
2789 rtx
2790 emit_move_insn (rtx x, rtx y)
2791 {
2792 enum machine_mode mode = GET_MODE (x);
2793 rtx y_cst = NULL_RTX;
2794 rtx last_insn, set;
2795
2796 x = protect_from_queue (x, 1);
2797 y = protect_from_queue (y, 0);
2798
2799 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2800 abort ();
2801
2802 /* Never force constant_p_rtx to memory. */
2803 if (GET_CODE (y) == CONSTANT_P_RTX)
2804 ;
2805 else if (CONSTANT_P (y))
2806 {
2807 if (optimize
2808 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2809 && (last_insn = compress_float_constant (x, y)))
2810 return last_insn;
2811
2812 y_cst = y;
2813
2814 if (!LEGITIMATE_CONSTANT_P (y))
2815 {
2816 y = force_const_mem (mode, y);
2817
2818 /* If the target's cannot_force_const_mem prevented the spill,
2819 assume that the target's move expanders will also take care
2820 of the non-legitimate constant. */
2821 if (!y)
2822 y = y_cst;
2823 }
2824 }
2825
2826 /* If X or Y are memory references, verify that their addresses are valid
2827 for the machine. */
2828 if (GET_CODE (x) == MEM
2829 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2830 && ! push_operand (x, GET_MODE (x)))
2831 || (flag_force_addr
2832 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2833 x = validize_mem (x);
2834
2835 if (GET_CODE (y) == MEM
2836 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2837 || (flag_force_addr
2838 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2839 y = validize_mem (y);
2840
2841 if (mode == BLKmode)
2842 abort ();
2843
2844 last_insn = emit_move_insn_1 (x, y);
2845
2846 if (y_cst && GET_CODE (x) == REG
2847 && (set = single_set (last_insn)) != NULL_RTX
2848 && SET_DEST (set) == x
2849 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2850 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2851
2852 return last_insn;
2853 }
2854
2855 /* Low level part of emit_move_insn.
2856 Called just like emit_move_insn, but assumes X and Y
2857 are basically valid. */
2858
2859 rtx
2860 emit_move_insn_1 (rtx x, rtx y)
2861 {
2862 enum machine_mode mode = GET_MODE (x);
2863 enum machine_mode submode;
2864 enum mode_class class = GET_MODE_CLASS (mode);
2865
2866 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2867 abort ();
2868
2869 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2870 return
2871 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2872
2873 /* Expand complex moves by moving real part and imag part, if possible. */
2874 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2875 && BLKmode != (submode = GET_MODE_INNER (mode))
2876 && (mov_optab->handlers[(int) submode].insn_code
2877 != CODE_FOR_nothing))
2878 {
2879 /* Don't split destination if it is a stack push. */
2880 int stack = push_operand (x, GET_MODE (x));
2881
2882 #ifdef PUSH_ROUNDING
2883 /* In case we output to the stack, but the size is smaller than the
2884 machine can push exactly, we need to use move instructions. */
2885 if (stack
2886 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2887 != GET_MODE_SIZE (submode)))
2888 {
2889 rtx temp;
2890 HOST_WIDE_INT offset1, offset2;
2891
2892 /* Do not use anti_adjust_stack, since we don't want to update
2893 stack_pointer_delta. */
2894 temp = expand_binop (Pmode,
2895 #ifdef STACK_GROWS_DOWNWARD
2896 sub_optab,
2897 #else
2898 add_optab,
2899 #endif
2900 stack_pointer_rtx,
2901 GEN_INT
2902 (PUSH_ROUNDING
2903 (GET_MODE_SIZE (GET_MODE (x)))),
2904 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2905
2906 if (temp != stack_pointer_rtx)
2907 emit_move_insn (stack_pointer_rtx, temp);
2908
2909 #ifdef STACK_GROWS_DOWNWARD
2910 offset1 = 0;
2911 offset2 = GET_MODE_SIZE (submode);
2912 #else
2913 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2914 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2915 + GET_MODE_SIZE (submode));
2916 #endif
2917
2918 emit_move_insn (change_address (x, submode,
2919 gen_rtx_PLUS (Pmode,
2920 stack_pointer_rtx,
2921 GEN_INT (offset1))),
2922 gen_realpart (submode, y));
2923 emit_move_insn (change_address (x, submode,
2924 gen_rtx_PLUS (Pmode,
2925 stack_pointer_rtx,
2926 GEN_INT (offset2))),
2927 gen_imagpart (submode, y));
2928 }
2929 else
2930 #endif
2931 /* If this is a stack, push the highpart first, so it
2932 will be in the argument order.
2933
2934 In that case, change_address is used only to convert
2935 the mode, not to change the address. */
2936 if (stack)
2937 {
2938 /* Note that the real part always precedes the imag part in memory
2939 regardless of machine's endianness. */
2940 #ifdef STACK_GROWS_DOWNWARD
2941 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2942 gen_imagpart (submode, y));
2943 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2944 gen_realpart (submode, y));
2945 #else
2946 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2947 gen_realpart (submode, y));
2948 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2949 gen_imagpart (submode, y));
2950 #endif
2951 }
2952 else
2953 {
2954 rtx realpart_x, realpart_y;
2955 rtx imagpart_x, imagpart_y;
2956
2957 /* If this is a complex value with each part being smaller than a
2958 word, the usual calling sequence will likely pack the pieces into
2959 a single register. Unfortunately, SUBREG of hard registers only
2960 deals in terms of words, so we have a problem converting input
2961 arguments to the CONCAT of two registers that is used elsewhere
2962 for complex values. If this is before reload, we can copy it into
2963 memory and reload. FIXME, we should see about using extract and
2964 insert on integer registers, but complex short and complex char
2965 variables should be rarely used. */
2966 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2967 && (reload_in_progress | reload_completed) == 0)
2968 {
2969 int packed_dest_p
2970 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2971 int packed_src_p
2972 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2973
2974 if (packed_dest_p || packed_src_p)
2975 {
2976 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2977 ? MODE_FLOAT : MODE_INT);
2978
2979 enum machine_mode reg_mode
2980 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2981
2982 if (reg_mode != BLKmode)
2983 {
2984 rtx mem = assign_stack_temp (reg_mode,
2985 GET_MODE_SIZE (mode), 0);
2986 rtx cmem = adjust_address (mem, mode, 0);
2987
2988 cfun->cannot_inline
2989 = N_("function using short complex types cannot be inline");
2990
2991 if (packed_dest_p)
2992 {
2993 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2994
2995 emit_move_insn_1 (cmem, y);
2996 return emit_move_insn_1 (sreg, mem);
2997 }
2998 else
2999 {
3000 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3001
3002 emit_move_insn_1 (mem, sreg);
3003 return emit_move_insn_1 (x, cmem);
3004 }
3005 }
3006 }
3007 }
3008
3009 realpart_x = gen_realpart (submode, x);
3010 realpart_y = gen_realpart (submode, y);
3011 imagpart_x = gen_imagpart (submode, x);
3012 imagpart_y = gen_imagpart (submode, y);
3013
3014 /* Show the output dies here. This is necessary for SUBREGs
3015 of pseudos since we cannot track their lifetimes correctly;
3016 hard regs shouldn't appear here except as return values.
3017 We never want to emit such a clobber after reload. */
3018 if (x != y
3019 && ! (reload_in_progress || reload_completed)
3020 && (GET_CODE (realpart_x) == SUBREG
3021 || GET_CODE (imagpart_x) == SUBREG))
3022 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3023
3024 emit_move_insn (realpart_x, realpart_y);
3025 emit_move_insn (imagpart_x, imagpart_y);
3026 }
3027
3028 return get_last_insn ();
3029 }
3030
3031 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3032 find a mode to do it in. If we have a movcc, use it. Otherwise,
3033 find the MODE_INT mode of the same width. */
3034 else if (GET_MODE_CLASS (mode) == MODE_CC
3035 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3036 {
3037 enum insn_code insn_code;
3038 enum machine_mode tmode = VOIDmode;
3039 rtx x1 = x, y1 = y;
3040
3041 if (mode != CCmode
3042 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3043 tmode = CCmode;
3044 else
3045 for (tmode = QImode; tmode != VOIDmode;
3046 tmode = GET_MODE_WIDER_MODE (tmode))
3047 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3048 break;
3049
3050 if (tmode == VOIDmode)
3051 abort ();
3052
3053 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3054 may call change_address which is not appropriate if we were
3055 called when a reload was in progress. We don't have to worry
3056 about changing the address since the size in bytes is supposed to
3057 be the same. Copy the MEM to change the mode and move any
3058 substitutions from the old MEM to the new one. */
3059
3060 if (reload_in_progress)
3061 {
3062 x = gen_lowpart_common (tmode, x1);
3063 if (x == 0 && GET_CODE (x1) == MEM)
3064 {
3065 x = adjust_address_nv (x1, tmode, 0);
3066 copy_replacements (x1, x);
3067 }
3068
3069 y = gen_lowpart_common (tmode, y1);
3070 if (y == 0 && GET_CODE (y1) == MEM)
3071 {
3072 y = adjust_address_nv (y1, tmode, 0);
3073 copy_replacements (y1, y);
3074 }
3075 }
3076 else
3077 {
3078 x = gen_lowpart (tmode, x);
3079 y = gen_lowpart (tmode, y);
3080 }
3081
3082 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3083 return emit_insn (GEN_FCN (insn_code) (x, y));
3084 }
3085
3086 /* Try using a move pattern for the corresponding integer mode. This is
3087 only safe when simplify_subreg can convert MODE constants into integer
3088 constants. At present, it can only do this reliably if the value
3089 fits within a HOST_WIDE_INT. */
3090 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3091 && (submode = int_mode_for_mode (mode)) != BLKmode
3092 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3093 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3094 (simplify_gen_subreg (submode, x, mode, 0),
3095 simplify_gen_subreg (submode, y, mode, 0)));
3096
3097 /* This will handle any multi-word or full-word mode that lacks a move_insn
3098 pattern. However, you will get better code if you define such patterns,
3099 even if they must turn into multiple assembler instructions. */
3100 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3101 {
3102 rtx last_insn = 0;
3103 rtx seq, inner;
3104 int need_clobber;
3105 int i;
3106
3107 #ifdef PUSH_ROUNDING
3108
3109 /* If X is a push on the stack, do the push now and replace
3110 X with a reference to the stack pointer. */
3111 if (push_operand (x, GET_MODE (x)))
3112 {
3113 rtx temp;
3114 enum rtx_code code;
3115
3116 /* Do not use anti_adjust_stack, since we don't want to update
3117 stack_pointer_delta. */
3118 temp = expand_binop (Pmode,
3119 #ifdef STACK_GROWS_DOWNWARD
3120 sub_optab,
3121 #else
3122 add_optab,
3123 #endif
3124 stack_pointer_rtx,
3125 GEN_INT
3126 (PUSH_ROUNDING
3127 (GET_MODE_SIZE (GET_MODE (x)))),
3128 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3129
3130 if (temp != stack_pointer_rtx)
3131 emit_move_insn (stack_pointer_rtx, temp);
3132
3133 code = GET_CODE (XEXP (x, 0));
3134
3135 /* Just hope that small offsets off SP are OK. */
3136 if (code == POST_INC)
3137 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3138 GEN_INT (-((HOST_WIDE_INT)
3139 GET_MODE_SIZE (GET_MODE (x)))));
3140 else if (code == POST_DEC)
3141 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3142 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3143 else
3144 temp = stack_pointer_rtx;
3145
3146 x = change_address (x, VOIDmode, temp);
3147 }
3148 #endif
3149
3150 /* If we are in reload, see if either operand is a MEM whose address
3151 is scheduled for replacement. */
3152 if (reload_in_progress && GET_CODE (x) == MEM
3153 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3154 x = replace_equiv_address_nv (x, inner);
3155 if (reload_in_progress && GET_CODE (y) == MEM
3156 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3157 y = replace_equiv_address_nv (y, inner);
3158
3159 start_sequence ();
3160
3161 need_clobber = 0;
3162 for (i = 0;
3163 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3164 i++)
3165 {
3166 rtx xpart = operand_subword (x, i, 1, mode);
3167 rtx ypart = operand_subword (y, i, 1, mode);
3168
3169 /* If we can't get a part of Y, put Y into memory if it is a
3170 constant. Otherwise, force it into a register. If we still
3171 can't get a part of Y, abort. */
3172 if (ypart == 0 && CONSTANT_P (y))
3173 {
3174 y = force_const_mem (mode, y);
3175 ypart = operand_subword (y, i, 1, mode);
3176 }
3177 else if (ypart == 0)
3178 ypart = operand_subword_force (y, i, mode);
3179
3180 if (xpart == 0 || ypart == 0)
3181 abort ();
3182
3183 need_clobber |= (GET_CODE (xpart) == SUBREG);
3184
3185 last_insn = emit_move_insn (xpart, ypart);
3186 }
3187
3188 seq = get_insns ();
3189 end_sequence ();
3190
3191 /* Show the output dies here. This is necessary for SUBREGs
3192 of pseudos since we cannot track their lifetimes correctly;
3193 hard regs shouldn't appear here except as return values.
3194 We never want to emit such a clobber after reload. */
3195 if (x != y
3196 && ! (reload_in_progress || reload_completed)
3197 && need_clobber != 0)
3198 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3199
3200 emit_insn (seq);
3201
3202 return last_insn;
3203 }
3204 else
3205 abort ();
3206 }
3207
3208 /* If Y is representable exactly in a narrower mode, and the target can
3209 perform the extension directly from constant or memory, then emit the
3210 move as an extension. */
3211
3212 static rtx
3213 compress_float_constant (rtx x, rtx y)
3214 {
3215 enum machine_mode dstmode = GET_MODE (x);
3216 enum machine_mode orig_srcmode = GET_MODE (y);
3217 enum machine_mode srcmode;
3218 REAL_VALUE_TYPE r;
3219
3220 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3221
3222 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3223 srcmode != orig_srcmode;
3224 srcmode = GET_MODE_WIDER_MODE (srcmode))
3225 {
3226 enum insn_code ic;
3227 rtx trunc_y, last_insn;
3228
3229 /* Skip if the target can't extend this way. */
3230 ic = can_extend_p (dstmode, srcmode, 0);
3231 if (ic == CODE_FOR_nothing)
3232 continue;
3233
3234 /* Skip if the narrowed value isn't exact. */
3235 if (! exact_real_truncate (srcmode, &r))
3236 continue;
3237
3238 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3239
3240 if (LEGITIMATE_CONSTANT_P (trunc_y))
3241 {
3242 /* Skip if the target needs extra instructions to perform
3243 the extension. */
3244 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3245 continue;
3246 }
3247 else if (float_extend_from_mem[dstmode][srcmode])
3248 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3249 else
3250 continue;
3251
3252 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3253 last_insn = get_last_insn ();
3254
3255 if (GET_CODE (x) == REG)
3256 set_unique_reg_note (last_insn, REG_EQUAL, y);
3257
3258 return last_insn;
3259 }
3260
3261 return NULL_RTX;
3262 }
3263 \f
3264 /* Pushing data onto the stack. */
3265
3266 /* Push a block of length SIZE (perhaps variable)
3267 and return an rtx to address the beginning of the block.
3268 Note that it is not possible for the value returned to be a QUEUED.
3269 The value may be virtual_outgoing_args_rtx.
3270
3271 EXTRA is the number of bytes of padding to push in addition to SIZE.
3272 BELOW nonzero means this padding comes at low addresses;
3273 otherwise, the padding comes at high addresses. */
3274
3275 rtx
3276 push_block (rtx size, int extra, int below)
3277 {
3278 rtx temp;
3279
3280 size = convert_modes (Pmode, ptr_mode, size, 1);
3281 if (CONSTANT_P (size))
3282 anti_adjust_stack (plus_constant (size, extra));
3283 else if (GET_CODE (size) == REG && extra == 0)
3284 anti_adjust_stack (size);
3285 else
3286 {
3287 temp = copy_to_mode_reg (Pmode, size);
3288 if (extra != 0)
3289 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3290 temp, 0, OPTAB_LIB_WIDEN);
3291 anti_adjust_stack (temp);
3292 }
3293
3294 #ifndef STACK_GROWS_DOWNWARD
3295 if (0)
3296 #else
3297 if (1)
3298 #endif
3299 {
3300 temp = virtual_outgoing_args_rtx;
3301 if (extra != 0 && below)
3302 temp = plus_constant (temp, extra);
3303 }
3304 else
3305 {
3306 if (GET_CODE (size) == CONST_INT)
3307 temp = plus_constant (virtual_outgoing_args_rtx,
3308 -INTVAL (size) - (below ? 0 : extra));
3309 else if (extra != 0 && !below)
3310 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3311 negate_rtx (Pmode, plus_constant (size, extra)));
3312 else
3313 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3314 negate_rtx (Pmode, size));
3315 }
3316
3317 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3318 }
3319
3320 #ifdef PUSH_ROUNDING
3321
3322 /* Emit single push insn. */
3323
3324 static void
3325 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3326 {
3327 rtx dest_addr;
3328 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3329 rtx dest;
3330 enum insn_code icode;
3331 insn_operand_predicate_fn pred;
3332
3333 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3334 /* If there is push pattern, use it. Otherwise try old way of throwing
3335 MEM representing push operation to move expander. */
3336 icode = push_optab->handlers[(int) mode].insn_code;
3337 if (icode != CODE_FOR_nothing)
3338 {
3339 if (((pred = insn_data[(int) icode].operand[0].predicate)
3340 && !((*pred) (x, mode))))
3341 x = force_reg (mode, x);
3342 emit_insn (GEN_FCN (icode) (x));
3343 return;
3344 }
3345 if (GET_MODE_SIZE (mode) == rounded_size)
3346 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3347 /* If we are to pad downward, adjust the stack pointer first and
3348 then store X into the stack location using an offset. This is
3349 because emit_move_insn does not know how to pad; it does not have
3350 access to type. */
3351 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3352 {
3353 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3354 HOST_WIDE_INT offset;
3355
3356 emit_move_insn (stack_pointer_rtx,
3357 expand_binop (Pmode,
3358 #ifdef STACK_GROWS_DOWNWARD
3359 sub_optab,
3360 #else
3361 add_optab,
3362 #endif
3363 stack_pointer_rtx,
3364 GEN_INT (rounded_size),
3365 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3366
3367 offset = (HOST_WIDE_INT) padding_size;
3368 #ifdef STACK_GROWS_DOWNWARD
3369 if (STACK_PUSH_CODE == POST_DEC)
3370 /* We have already decremented the stack pointer, so get the
3371 previous value. */
3372 offset += (HOST_WIDE_INT) rounded_size;
3373 #else
3374 if (STACK_PUSH_CODE == POST_INC)
3375 /* We have already incremented the stack pointer, so get the
3376 previous value. */
3377 offset -= (HOST_WIDE_INT) rounded_size;
3378 #endif
3379 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3380 }
3381 else
3382 {
3383 #ifdef STACK_GROWS_DOWNWARD
3384 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3385 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3386 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3387 #else
3388 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3389 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3390 GEN_INT (rounded_size));
3391 #endif
3392 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3393 }
3394
3395 dest = gen_rtx_MEM (mode, dest_addr);
3396
3397 if (type != 0)
3398 {
3399 set_mem_attributes (dest, type, 1);
3400
3401 if (flag_optimize_sibling_calls)
3402 /* Function incoming arguments may overlap with sibling call
3403 outgoing arguments and we cannot allow reordering of reads
3404 from function arguments with stores to outgoing arguments
3405 of sibling calls. */
3406 set_mem_alias_set (dest, 0);
3407 }
3408 emit_move_insn (dest, x);
3409 }
3410 #endif
3411
3412 /* Generate code to push X onto the stack, assuming it has mode MODE and
3413 type TYPE.
3414 MODE is redundant except when X is a CONST_INT (since they don't
3415 carry mode info).
3416 SIZE is an rtx for the size of data to be copied (in bytes),
3417 needed only if X is BLKmode.
3418
3419 ALIGN (in bits) is maximum alignment we can assume.
3420
3421 If PARTIAL and REG are both nonzero, then copy that many of the first
3422 words of X into registers starting with REG, and push the rest of X.
3423 The amount of space pushed is decreased by PARTIAL words,
3424 rounded *down* to a multiple of PARM_BOUNDARY.
3425 REG must be a hard register in this case.
3426 If REG is zero but PARTIAL is not, take any all others actions for an
3427 argument partially in registers, but do not actually load any
3428 registers.
3429
3430 EXTRA is the amount in bytes of extra space to leave next to this arg.
3431 This is ignored if an argument block has already been allocated.
3432
3433 On a machine that lacks real push insns, ARGS_ADDR is the address of
3434 the bottom of the argument block for this call. We use indexing off there
3435 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3436 argument block has not been preallocated.
3437
3438 ARGS_SO_FAR is the size of args previously pushed for this call.
3439
3440 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3441 for arguments passed in registers. If nonzero, it will be the number
3442 of bytes required. */
3443
3444 void
3445 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3446 unsigned int align, int partial, rtx reg, int extra,
3447 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3448 rtx alignment_pad)
3449 {
3450 rtx xinner;
3451 enum direction stack_direction
3452 #ifdef STACK_GROWS_DOWNWARD
3453 = downward;
3454 #else
3455 = upward;
3456 #endif
3457
3458 /* Decide where to pad the argument: `downward' for below,
3459 `upward' for above, or `none' for don't pad it.
3460 Default is below for small data on big-endian machines; else above. */
3461 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3462
3463 /* Invert direction if stack is post-decrement.
3464 FIXME: why? */
3465 if (STACK_PUSH_CODE == POST_DEC)
3466 if (where_pad != none)
3467 where_pad = (where_pad == downward ? upward : downward);
3468
3469 xinner = x = protect_from_queue (x, 0);
3470
3471 if (mode == BLKmode)
3472 {
3473 /* Copy a block into the stack, entirely or partially. */
3474
3475 rtx temp;
3476 int used = partial * UNITS_PER_WORD;
3477 int offset;
3478 int skip;
3479
3480 if (reg && GET_CODE (reg) == PARALLEL)
3481 {
3482 /* Use the size of the elt to compute offset. */
3483 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3484 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3485 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3486 }
3487 else
3488 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3489
3490 if (size == 0)
3491 abort ();
3492
3493 used -= offset;
3494
3495 /* USED is now the # of bytes we need not copy to the stack
3496 because registers will take care of them. */
3497
3498 if (partial != 0)
3499 xinner = adjust_address (xinner, BLKmode, used);
3500
3501 /* If the partial register-part of the arg counts in its stack size,
3502 skip the part of stack space corresponding to the registers.
3503 Otherwise, start copying to the beginning of the stack space,
3504 by setting SKIP to 0. */
3505 skip = (reg_parm_stack_space == 0) ? 0 : used;
3506
3507 #ifdef PUSH_ROUNDING
3508 /* Do it with several push insns if that doesn't take lots of insns
3509 and if there is no difficulty with push insns that skip bytes
3510 on the stack for alignment purposes. */
3511 if (args_addr == 0
3512 && PUSH_ARGS
3513 && GET_CODE (size) == CONST_INT
3514 && skip == 0
3515 && MEM_ALIGN (xinner) >= align
3516 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3517 /* Here we avoid the case of a structure whose weak alignment
3518 forces many pushes of a small amount of data,
3519 and such small pushes do rounding that causes trouble. */
3520 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3521 || align >= BIGGEST_ALIGNMENT
3522 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3523 == (align / BITS_PER_UNIT)))
3524 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3525 {
3526 /* Push padding now if padding above and stack grows down,
3527 or if padding below and stack grows up.
3528 But if space already allocated, this has already been done. */
3529 if (extra && args_addr == 0
3530 && where_pad != none && where_pad != stack_direction)
3531 anti_adjust_stack (GEN_INT (extra));
3532
3533 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3534 }
3535 else
3536 #endif /* PUSH_ROUNDING */
3537 {
3538 rtx target;
3539
3540 /* Otherwise make space on the stack and copy the data
3541 to the address of that space. */
3542
3543 /* Deduct words put into registers from the size we must copy. */
3544 if (partial != 0)
3545 {
3546 if (GET_CODE (size) == CONST_INT)
3547 size = GEN_INT (INTVAL (size) - used);
3548 else
3549 size = expand_binop (GET_MODE (size), sub_optab, size,
3550 GEN_INT (used), NULL_RTX, 0,
3551 OPTAB_LIB_WIDEN);
3552 }
3553
3554 /* Get the address of the stack space.
3555 In this case, we do not deal with EXTRA separately.
3556 A single stack adjust will do. */
3557 if (! args_addr)
3558 {
3559 temp = push_block (size, extra, where_pad == downward);
3560 extra = 0;
3561 }
3562 else if (GET_CODE (args_so_far) == CONST_INT)
3563 temp = memory_address (BLKmode,
3564 plus_constant (args_addr,
3565 skip + INTVAL (args_so_far)));
3566 else
3567 temp = memory_address (BLKmode,
3568 plus_constant (gen_rtx_PLUS (Pmode,
3569 args_addr,
3570 args_so_far),
3571 skip));
3572
3573 if (!ACCUMULATE_OUTGOING_ARGS)
3574 {
3575 /* If the source is referenced relative to the stack pointer,
3576 copy it to another register to stabilize it. We do not need
3577 to do this if we know that we won't be changing sp. */
3578
3579 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3580 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3581 temp = copy_to_reg (temp);
3582 }
3583
3584 target = gen_rtx_MEM (BLKmode, temp);
3585
3586 if (type != 0)
3587 {
3588 set_mem_attributes (target, type, 1);
3589 /* Function incoming arguments may overlap with sibling call
3590 outgoing arguments and we cannot allow reordering of reads
3591 from function arguments with stores to outgoing arguments
3592 of sibling calls. */
3593 set_mem_alias_set (target, 0);
3594 }
3595
3596 /* ALIGN may well be better aligned than TYPE, e.g. due to
3597 PARM_BOUNDARY. Assume the caller isn't lying. */
3598 set_mem_align (target, align);
3599
3600 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3601 }
3602 }
3603 else if (partial > 0)
3604 {
3605 /* Scalar partly in registers. */
3606
3607 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3608 int i;
3609 int not_stack;
3610 /* # words of start of argument
3611 that we must make space for but need not store. */
3612 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3613 int args_offset = INTVAL (args_so_far);
3614 int skip;
3615
3616 /* Push padding now if padding above and stack grows down,
3617 or if padding below and stack grows up.
3618 But if space already allocated, this has already been done. */
3619 if (extra && args_addr == 0
3620 && where_pad != none && where_pad != stack_direction)
3621 anti_adjust_stack (GEN_INT (extra));
3622
3623 /* If we make space by pushing it, we might as well push
3624 the real data. Otherwise, we can leave OFFSET nonzero
3625 and leave the space uninitialized. */
3626 if (args_addr == 0)
3627 offset = 0;
3628
3629 /* Now NOT_STACK gets the number of words that we don't need to
3630 allocate on the stack. */
3631 not_stack = partial - offset;
3632
3633 /* If the partial register-part of the arg counts in its stack size,
3634 skip the part of stack space corresponding to the registers.
3635 Otherwise, start copying to the beginning of the stack space,
3636 by setting SKIP to 0. */
3637 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3638
3639 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3640 x = validize_mem (force_const_mem (mode, x));
3641
3642 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3643 SUBREGs of such registers are not allowed. */
3644 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3645 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3646 x = copy_to_reg (x);
3647
3648 /* Loop over all the words allocated on the stack for this arg. */
3649 /* We can do it by words, because any scalar bigger than a word
3650 has a size a multiple of a word. */
3651 #ifndef PUSH_ARGS_REVERSED
3652 for (i = not_stack; i < size; i++)
3653 #else
3654 for (i = size - 1; i >= not_stack; i--)
3655 #endif
3656 if (i >= not_stack + offset)
3657 emit_push_insn (operand_subword_force (x, i, mode),
3658 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3659 0, args_addr,
3660 GEN_INT (args_offset + ((i - not_stack + skip)
3661 * UNITS_PER_WORD)),
3662 reg_parm_stack_space, alignment_pad);
3663 }
3664 else
3665 {
3666 rtx addr;
3667 rtx dest;
3668
3669 /* Push padding now if padding above and stack grows down,
3670 or if padding below and stack grows up.
3671 But if space already allocated, this has already been done. */
3672 if (extra && args_addr == 0
3673 && where_pad != none && where_pad != stack_direction)
3674 anti_adjust_stack (GEN_INT (extra));
3675
3676 #ifdef PUSH_ROUNDING
3677 if (args_addr == 0 && PUSH_ARGS)
3678 emit_single_push_insn (mode, x, type);
3679 else
3680 #endif
3681 {
3682 if (GET_CODE (args_so_far) == CONST_INT)
3683 addr
3684 = memory_address (mode,
3685 plus_constant (args_addr,
3686 INTVAL (args_so_far)));
3687 else
3688 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3689 args_so_far));
3690 dest = gen_rtx_MEM (mode, addr);
3691 if (type != 0)
3692 {
3693 set_mem_attributes (dest, type, 1);
3694 /* Function incoming arguments may overlap with sibling call
3695 outgoing arguments and we cannot allow reordering of reads
3696 from function arguments with stores to outgoing arguments
3697 of sibling calls. */
3698 set_mem_alias_set (dest, 0);
3699 }
3700
3701 emit_move_insn (dest, x);
3702 }
3703 }
3704
3705 /* If part should go in registers, copy that part
3706 into the appropriate registers. Do this now, at the end,
3707 since mem-to-mem copies above may do function calls. */
3708 if (partial > 0 && reg != 0)
3709 {
3710 /* Handle calls that pass values in multiple non-contiguous locations.
3711 The Irix 6 ABI has examples of this. */
3712 if (GET_CODE (reg) == PARALLEL)
3713 emit_group_load (reg, x, type, -1);
3714 else
3715 move_block_to_reg (REGNO (reg), x, partial, mode);
3716 }
3717
3718 if (extra && args_addr == 0 && where_pad == stack_direction)
3719 anti_adjust_stack (GEN_INT (extra));
3720
3721 if (alignment_pad && args_addr == 0)
3722 anti_adjust_stack (alignment_pad);
3723 }
3724 \f
3725 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3726 operations. */
3727
3728 static rtx
3729 get_subtarget (rtx x)
3730 {
3731 return ((x == 0
3732 /* Only registers can be subtargets. */
3733 || GET_CODE (x) != REG
3734 /* If the register is readonly, it can't be set more than once. */
3735 || RTX_UNCHANGING_P (x)
3736 /* Don't use hard regs to avoid extending their life. */
3737 || REGNO (x) < FIRST_PSEUDO_REGISTER
3738 /* Avoid subtargets inside loops,
3739 since they hide some invariant expressions. */
3740 || preserve_subexpressions_p ())
3741 ? 0 : x);
3742 }
3743
3744 /* Expand an assignment that stores the value of FROM into TO.
3745 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3746 (This may contain a QUEUED rtx;
3747 if the value is constant, this rtx is a constant.)
3748 Otherwise, the returned value is NULL_RTX. */
3749
3750 rtx
3751 expand_assignment (tree to, tree from, int want_value)
3752 {
3753 rtx to_rtx = 0;
3754 rtx result;
3755
3756 /* Don't crash if the lhs of the assignment was erroneous. */
3757
3758 if (TREE_CODE (to) == ERROR_MARK)
3759 {
3760 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3761 return want_value ? result : NULL_RTX;
3762 }
3763
3764 /* Assignment of a structure component needs special treatment
3765 if the structure component's rtx is not simply a MEM.
3766 Assignment of an array element at a constant index, and assignment of
3767 an array element in an unaligned packed structure field, has the same
3768 problem. */
3769
3770 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3771 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3772 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3773 {
3774 enum machine_mode mode1;
3775 HOST_WIDE_INT bitsize, bitpos;
3776 rtx orig_to_rtx;
3777 tree offset;
3778 int unsignedp;
3779 int volatilep = 0;
3780 tree tem;
3781
3782 push_temp_slots ();
3783 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3784 &unsignedp, &volatilep);
3785
3786 /* If we are going to use store_bit_field and extract_bit_field,
3787 make sure to_rtx will be safe for multiple use. */
3788
3789 if (mode1 == VOIDmode && want_value)
3790 tem = stabilize_reference (tem);
3791
3792 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3793
3794 if (offset != 0)
3795 {
3796 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3797
3798 if (GET_CODE (to_rtx) != MEM)
3799 abort ();
3800
3801 #ifdef POINTERS_EXTEND_UNSIGNED
3802 if (GET_MODE (offset_rtx) != Pmode)
3803 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3804 #else
3805 if (GET_MODE (offset_rtx) != ptr_mode)
3806 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3807 #endif
3808
3809 /* A constant address in TO_RTX can have VOIDmode, we must not try
3810 to call force_reg for that case. Avoid that case. */
3811 if (GET_CODE (to_rtx) == MEM
3812 && GET_MODE (to_rtx) == BLKmode
3813 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3814 && bitsize > 0
3815 && (bitpos % bitsize) == 0
3816 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3817 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3818 {
3819 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3820 bitpos = 0;
3821 }
3822
3823 to_rtx = offset_address (to_rtx, offset_rtx,
3824 highest_pow2_factor_for_type (TREE_TYPE (to),
3825 offset));
3826 }
3827
3828 if (GET_CODE (to_rtx) == MEM)
3829 {
3830 /* If the field is at offset zero, we could have been given the
3831 DECL_RTX of the parent struct. Don't munge it. */
3832 to_rtx = shallow_copy_rtx (to_rtx);
3833
3834 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3835 }
3836
3837 /* Deal with volatile and readonly fields. The former is only done
3838 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3839 if (volatilep && GET_CODE (to_rtx) == MEM)
3840 {
3841 if (to_rtx == orig_to_rtx)
3842 to_rtx = copy_rtx (to_rtx);
3843 MEM_VOLATILE_P (to_rtx) = 1;
3844 }
3845
3846 if (TREE_CODE (to) == COMPONENT_REF
3847 && TREE_READONLY (TREE_OPERAND (to, 1))
3848 /* We can't assert that a MEM won't be set more than once
3849 if the component is not addressable because another
3850 non-addressable component may be referenced by the same MEM. */
3851 && ! (GET_CODE (to_rtx) == MEM && ! can_address_p (to)))
3852 {
3853 if (to_rtx == orig_to_rtx)
3854 to_rtx = copy_rtx (to_rtx);
3855 RTX_UNCHANGING_P (to_rtx) = 1;
3856 }
3857
3858 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3859 {
3860 if (to_rtx == orig_to_rtx)
3861 to_rtx = copy_rtx (to_rtx);
3862 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3863 }
3864
3865 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3866 (want_value
3867 /* Spurious cast for HPUX compiler. */
3868 ? ((enum machine_mode)
3869 TYPE_MODE (TREE_TYPE (to)))
3870 : VOIDmode),
3871 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3872
3873 preserve_temp_slots (result);
3874 free_temp_slots ();
3875 pop_temp_slots ();
3876
3877 /* If the value is meaningful, convert RESULT to the proper mode.
3878 Otherwise, return nothing. */
3879 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3880 TYPE_MODE (TREE_TYPE (from)),
3881 result,
3882 TREE_UNSIGNED (TREE_TYPE (to)))
3883 : NULL_RTX);
3884 }
3885
3886 /* If the rhs is a function call and its value is not an aggregate,
3887 call the function before we start to compute the lhs.
3888 This is needed for correct code for cases such as
3889 val = setjmp (buf) on machines where reference to val
3890 requires loading up part of an address in a separate insn.
3891
3892 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3893 since it might be a promoted variable where the zero- or sign- extension
3894 needs to be done. Handling this in the normal way is safe because no
3895 computation is done before the call. */
3896 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3897 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3898 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3899 && GET_CODE (DECL_RTL (to)) == REG))
3900 {
3901 rtx value;
3902
3903 push_temp_slots ();
3904 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3905 if (to_rtx == 0)
3906 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3907
3908 /* Handle calls that return values in multiple non-contiguous locations.
3909 The Irix 6 ABI has examples of this. */
3910 if (GET_CODE (to_rtx) == PARALLEL)
3911 emit_group_load (to_rtx, value, TREE_TYPE (from),
3912 int_size_in_bytes (TREE_TYPE (from)));
3913 else if (GET_MODE (to_rtx) == BLKmode)
3914 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3915 else
3916 {
3917 if (POINTER_TYPE_P (TREE_TYPE (to)))
3918 value = convert_memory_address (GET_MODE (to_rtx), value);
3919 emit_move_insn (to_rtx, value);
3920 }
3921 preserve_temp_slots (to_rtx);
3922 free_temp_slots ();
3923 pop_temp_slots ();
3924 return want_value ? to_rtx : NULL_RTX;
3925 }
3926
3927 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3928 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3929
3930 if (to_rtx == 0)
3931 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3932
3933 /* Don't move directly into a return register. */
3934 if (TREE_CODE (to) == RESULT_DECL
3935 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3936 {
3937 rtx temp;
3938
3939 push_temp_slots ();
3940 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3941
3942 if (GET_CODE (to_rtx) == PARALLEL)
3943 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3944 int_size_in_bytes (TREE_TYPE (from)));
3945 else
3946 emit_move_insn (to_rtx, temp);
3947
3948 preserve_temp_slots (to_rtx);
3949 free_temp_slots ();
3950 pop_temp_slots ();
3951 return want_value ? to_rtx : NULL_RTX;
3952 }
3953
3954 /* In case we are returning the contents of an object which overlaps
3955 the place the value is being stored, use a safe function when copying
3956 a value through a pointer into a structure value return block. */
3957 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3958 && current_function_returns_struct
3959 && !current_function_returns_pcc_struct)
3960 {
3961 rtx from_rtx, size;
3962
3963 push_temp_slots ();
3964 size = expr_size (from);
3965 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3966
3967 if (TARGET_MEM_FUNCTIONS)
3968 emit_library_call (memmove_libfunc, LCT_NORMAL,
3969 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3970 XEXP (from_rtx, 0), Pmode,
3971 convert_to_mode (TYPE_MODE (sizetype),
3972 size, TREE_UNSIGNED (sizetype)),
3973 TYPE_MODE (sizetype));
3974 else
3975 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3976 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3977 XEXP (to_rtx, 0), Pmode,
3978 convert_to_mode (TYPE_MODE (integer_type_node),
3979 size,
3980 TREE_UNSIGNED (integer_type_node)),
3981 TYPE_MODE (integer_type_node));
3982
3983 preserve_temp_slots (to_rtx);
3984 free_temp_slots ();
3985 pop_temp_slots ();
3986 return want_value ? to_rtx : NULL_RTX;
3987 }
3988
3989 /* Compute FROM and store the value in the rtx we got. */
3990
3991 push_temp_slots ();
3992 result = store_expr (from, to_rtx, want_value);
3993 preserve_temp_slots (result);
3994 free_temp_slots ();
3995 pop_temp_slots ();
3996 return want_value ? result : NULL_RTX;
3997 }
3998
3999 /* Generate code for computing expression EXP,
4000 and storing the value into TARGET.
4001 TARGET may contain a QUEUED rtx.
4002
4003 If WANT_VALUE & 1 is nonzero, return a copy of the value
4004 not in TARGET, so that we can be sure to use the proper
4005 value in a containing expression even if TARGET has something
4006 else stored in it. If possible, we copy the value through a pseudo
4007 and return that pseudo. Or, if the value is constant, we try to
4008 return the constant. In some cases, we return a pseudo
4009 copied *from* TARGET.
4010
4011 If the mode is BLKmode then we may return TARGET itself.
4012 It turns out that in BLKmode it doesn't cause a problem.
4013 because C has no operators that could combine two different
4014 assignments into the same BLKmode object with different values
4015 with no sequence point. Will other languages need this to
4016 be more thorough?
4017
4018 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4019 to catch quickly any cases where the caller uses the value
4020 and fails to set WANT_VALUE.
4021
4022 If WANT_VALUE & 2 is set, this is a store into a call param on the
4023 stack, and block moves may need to be treated specially. */
4024
4025 rtx
4026 store_expr (tree exp, rtx target, int want_value)
4027 {
4028 rtx temp;
4029 int dont_return_target = 0;
4030 int dont_store_target = 0;
4031
4032 if (VOID_TYPE_P (TREE_TYPE (exp)))
4033 {
4034 /* C++ can generate ?: expressions with a throw expression in one
4035 branch and an rvalue in the other. Here, we resolve attempts to
4036 store the throw expression's nonexistent result. */
4037 if (want_value)
4038 abort ();
4039 expand_expr (exp, const0_rtx, VOIDmode, 0);
4040 return NULL_RTX;
4041 }
4042 if (TREE_CODE (exp) == COMPOUND_EXPR)
4043 {
4044 /* Perform first part of compound expression, then assign from second
4045 part. */
4046 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4047 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4048 emit_queue ();
4049 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4050 }
4051 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4052 {
4053 /* For conditional expression, get safe form of the target. Then
4054 test the condition, doing the appropriate assignment on either
4055 side. This avoids the creation of unnecessary temporaries.
4056 For non-BLKmode, it is more efficient not to do this. */
4057
4058 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4059
4060 emit_queue ();
4061 target = protect_from_queue (target, 1);
4062
4063 do_pending_stack_adjust ();
4064 NO_DEFER_POP;
4065 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4066 start_cleanup_deferral ();
4067 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4068 end_cleanup_deferral ();
4069 emit_queue ();
4070 emit_jump_insn (gen_jump (lab2));
4071 emit_barrier ();
4072 emit_label (lab1);
4073 start_cleanup_deferral ();
4074 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4075 end_cleanup_deferral ();
4076 emit_queue ();
4077 emit_label (lab2);
4078 OK_DEFER_POP;
4079
4080 return want_value & 1 ? target : NULL_RTX;
4081 }
4082 else if (queued_subexp_p (target))
4083 /* If target contains a postincrement, let's not risk
4084 using it as the place to generate the rhs. */
4085 {
4086 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4087 {
4088 /* Expand EXP into a new pseudo. */
4089 temp = gen_reg_rtx (GET_MODE (target));
4090 temp = expand_expr (exp, temp, GET_MODE (target),
4091 (want_value & 2
4092 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4093 }
4094 else
4095 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4096 (want_value & 2
4097 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4098
4099 /* If target is volatile, ANSI requires accessing the value
4100 *from* the target, if it is accessed. So make that happen.
4101 In no case return the target itself. */
4102 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4103 dont_return_target = 1;
4104 }
4105 else if ((want_value & 1) != 0
4106 && GET_CODE (target) == MEM
4107 && ! MEM_VOLATILE_P (target)
4108 && GET_MODE (target) != BLKmode)
4109 /* If target is in memory and caller wants value in a register instead,
4110 arrange that. Pass TARGET as target for expand_expr so that,
4111 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4112 We know expand_expr will not use the target in that case.
4113 Don't do this if TARGET is volatile because we are supposed
4114 to write it and then read it. */
4115 {
4116 temp = expand_expr (exp, target, GET_MODE (target),
4117 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4118 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4119 {
4120 /* If TEMP is already in the desired TARGET, only copy it from
4121 memory and don't store it there again. */
4122 if (temp == target
4123 || (rtx_equal_p (temp, target)
4124 && ! side_effects_p (temp) && ! side_effects_p (target)))
4125 dont_store_target = 1;
4126 temp = copy_to_reg (temp);
4127 }
4128 dont_return_target = 1;
4129 }
4130 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4131 /* If this is a scalar in a register that is stored in a wider mode
4132 than the declared mode, compute the result into its declared mode
4133 and then convert to the wider mode. Our value is the computed
4134 expression. */
4135 {
4136 rtx inner_target = 0;
4137
4138 /* If we don't want a value, we can do the conversion inside EXP,
4139 which will often result in some optimizations. Do the conversion
4140 in two steps: first change the signedness, if needed, then
4141 the extend. But don't do this if the type of EXP is a subtype
4142 of something else since then the conversion might involve
4143 more than just converting modes. */
4144 if ((want_value & 1) == 0
4145 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4146 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4147 {
4148 if (TREE_UNSIGNED (TREE_TYPE (exp))
4149 != SUBREG_PROMOTED_UNSIGNED_P (target))
4150 exp = convert
4151 ((*lang_hooks.types.signed_or_unsigned_type)
4152 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4153
4154 exp = convert ((*lang_hooks.types.type_for_mode)
4155 (GET_MODE (SUBREG_REG (target)),
4156 SUBREG_PROMOTED_UNSIGNED_P (target)),
4157 exp);
4158
4159 inner_target = SUBREG_REG (target);
4160 }
4161
4162 temp = expand_expr (exp, inner_target, VOIDmode,
4163 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4164
4165 /* If TEMP is a MEM and we want a result value, make the access
4166 now so it gets done only once. Strictly speaking, this is
4167 only necessary if the MEM is volatile, or if the address
4168 overlaps TARGET. But not performing the load twice also
4169 reduces the amount of rtl we generate and then have to CSE. */
4170 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4171 temp = copy_to_reg (temp);
4172
4173 /* If TEMP is a VOIDmode constant, use convert_modes to make
4174 sure that we properly convert it. */
4175 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4176 {
4177 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4178 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4179 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4180 GET_MODE (target), temp,
4181 SUBREG_PROMOTED_UNSIGNED_P (target));
4182 }
4183
4184 convert_move (SUBREG_REG (target), temp,
4185 SUBREG_PROMOTED_UNSIGNED_P (target));
4186
4187 /* If we promoted a constant, change the mode back down to match
4188 target. Otherwise, the caller might get confused by a result whose
4189 mode is larger than expected. */
4190
4191 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4192 {
4193 if (GET_MODE (temp) != VOIDmode)
4194 {
4195 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4196 SUBREG_PROMOTED_VAR_P (temp) = 1;
4197 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4198 SUBREG_PROMOTED_UNSIGNED_P (target));
4199 }
4200 else
4201 temp = convert_modes (GET_MODE (target),
4202 GET_MODE (SUBREG_REG (target)),
4203 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4204 }
4205
4206 return want_value & 1 ? temp : NULL_RTX;
4207 }
4208 else
4209 {
4210 temp = expand_expr (exp, target, GET_MODE (target),
4211 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4212 /* Return TARGET if it's a specified hardware register.
4213 If TARGET is a volatile mem ref, either return TARGET
4214 or return a reg copied *from* TARGET; ANSI requires this.
4215
4216 Otherwise, if TEMP is not TARGET, return TEMP
4217 if it is constant (for efficiency),
4218 or if we really want the correct value. */
4219 if (!(target && GET_CODE (target) == REG
4220 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4221 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4222 && ! rtx_equal_p (temp, target)
4223 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4224 dont_return_target = 1;
4225 }
4226
4227 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4228 the same as that of TARGET, adjust the constant. This is needed, for
4229 example, in case it is a CONST_DOUBLE and we want only a word-sized
4230 value. */
4231 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4232 && TREE_CODE (exp) != ERROR_MARK
4233 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4234 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4235 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4236
4237 /* If value was not generated in the target, store it there.
4238 Convert the value to TARGET's type first if necessary.
4239 If TEMP and TARGET compare equal according to rtx_equal_p, but
4240 one or both of them are volatile memory refs, we have to distinguish
4241 two cases:
4242 - expand_expr has used TARGET. In this case, we must not generate
4243 another copy. This can be detected by TARGET being equal according
4244 to == .
4245 - expand_expr has not used TARGET - that means that the source just
4246 happens to have the same RTX form. Since temp will have been created
4247 by expand_expr, it will compare unequal according to == .
4248 We must generate a copy in this case, to reach the correct number
4249 of volatile memory references. */
4250
4251 if ((! rtx_equal_p (temp, target)
4252 || (temp != target && (side_effects_p (temp)
4253 || side_effects_p (target))))
4254 && TREE_CODE (exp) != ERROR_MARK
4255 && ! dont_store_target
4256 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4257 but TARGET is not valid memory reference, TEMP will differ
4258 from TARGET although it is really the same location. */
4259 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4260 || target != DECL_RTL_IF_SET (exp))
4261 /* If there's nothing to copy, don't bother. Don't call expr_size
4262 unless necessary, because some front-ends (C++) expr_size-hook
4263 aborts on objects that are not supposed to be bit-copied or
4264 bit-initialized. */
4265 && expr_size (exp) != const0_rtx)
4266 {
4267 target = protect_from_queue (target, 1);
4268 if (GET_MODE (temp) != GET_MODE (target)
4269 && GET_MODE (temp) != VOIDmode)
4270 {
4271 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4272 if (dont_return_target)
4273 {
4274 /* In this case, we will return TEMP,
4275 so make sure it has the proper mode.
4276 But don't forget to store the value into TARGET. */
4277 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4278 emit_move_insn (target, temp);
4279 }
4280 else
4281 convert_move (target, temp, unsignedp);
4282 }
4283
4284 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4285 {
4286 /* Handle copying a string constant into an array. The string
4287 constant may be shorter than the array. So copy just the string's
4288 actual length, and clear the rest. First get the size of the data
4289 type of the string, which is actually the size of the target. */
4290 rtx size = expr_size (exp);
4291
4292 if (GET_CODE (size) == CONST_INT
4293 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4294 emit_block_move (target, temp, size,
4295 (want_value & 2
4296 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4297 else
4298 {
4299 /* Compute the size of the data to copy from the string. */
4300 tree copy_size
4301 = size_binop (MIN_EXPR,
4302 make_tree (sizetype, size),
4303 size_int (TREE_STRING_LENGTH (exp)));
4304 rtx copy_size_rtx
4305 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4306 (want_value & 2
4307 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4308 rtx label = 0;
4309
4310 /* Copy that much. */
4311 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4312 TREE_UNSIGNED (sizetype));
4313 emit_block_move (target, temp, copy_size_rtx,
4314 (want_value & 2
4315 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4316
4317 /* Figure out how much is left in TARGET that we have to clear.
4318 Do all calculations in ptr_mode. */
4319 if (GET_CODE (copy_size_rtx) == CONST_INT)
4320 {
4321 size = plus_constant (size, -INTVAL (copy_size_rtx));
4322 target = adjust_address (target, BLKmode,
4323 INTVAL (copy_size_rtx));
4324 }
4325 else
4326 {
4327 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4328 copy_size_rtx, NULL_RTX, 0,
4329 OPTAB_LIB_WIDEN);
4330
4331 #ifdef POINTERS_EXTEND_UNSIGNED
4332 if (GET_MODE (copy_size_rtx) != Pmode)
4333 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4334 TREE_UNSIGNED (sizetype));
4335 #endif
4336
4337 target = offset_address (target, copy_size_rtx,
4338 highest_pow2_factor (copy_size));
4339 label = gen_label_rtx ();
4340 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4341 GET_MODE (size), 0, label);
4342 }
4343
4344 if (size != const0_rtx)
4345 clear_storage (target, size);
4346
4347 if (label)
4348 emit_label (label);
4349 }
4350 }
4351 /* Handle calls that return values in multiple non-contiguous locations.
4352 The Irix 6 ABI has examples of this. */
4353 else if (GET_CODE (target) == PARALLEL)
4354 emit_group_load (target, temp, TREE_TYPE (exp),
4355 int_size_in_bytes (TREE_TYPE (exp)));
4356 else if (GET_MODE (temp) == BLKmode)
4357 emit_block_move (target, temp, expr_size (exp),
4358 (want_value & 2
4359 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4360 else
4361 emit_move_insn (target, temp);
4362 }
4363
4364 /* If we don't want a value, return NULL_RTX. */
4365 if ((want_value & 1) == 0)
4366 return NULL_RTX;
4367
4368 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4369 ??? The latter test doesn't seem to make sense. */
4370 else if (dont_return_target && GET_CODE (temp) != MEM)
4371 return temp;
4372
4373 /* Return TARGET itself if it is a hard register. */
4374 else if ((want_value & 1) != 0
4375 && GET_MODE (target) != BLKmode
4376 && ! (GET_CODE (target) == REG
4377 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4378 return copy_to_reg (target);
4379
4380 else
4381 return target;
4382 }
4383 \f
4384 /* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
4385
4386 static int
4387 is_zeros_p (tree exp)
4388 {
4389 tree elt;
4390
4391 switch (TREE_CODE (exp))
4392 {
4393 case CONVERT_EXPR:
4394 case NOP_EXPR:
4395 case NON_LVALUE_EXPR:
4396 case VIEW_CONVERT_EXPR:
4397 return is_zeros_p (TREE_OPERAND (exp, 0));
4398
4399 case INTEGER_CST:
4400 return integer_zerop (exp);
4401
4402 case COMPLEX_CST:
4403 return
4404 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4405
4406 case REAL_CST:
4407 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4408
4409 case VECTOR_CST:
4410 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4411 elt = TREE_CHAIN (elt))
4412 if (!is_zeros_p (TREE_VALUE (elt)))
4413 return 0;
4414
4415 return 1;
4416
4417 case CONSTRUCTOR:
4418 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4419 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4420 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4421 if (! is_zeros_p (TREE_VALUE (elt)))
4422 return 0;
4423
4424 return 1;
4425
4426 default:
4427 return 0;
4428 }
4429 }
4430
4431 /* Return 1 if EXP contains mostly (3/4) zeros. */
4432
4433 int
4434 mostly_zeros_p (tree exp)
4435 {
4436 if (TREE_CODE (exp) == CONSTRUCTOR)
4437 {
4438 int elts = 0, zeros = 0;
4439 tree elt = CONSTRUCTOR_ELTS (exp);
4440 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4441 {
4442 /* If there are no ranges of true bits, it is all zero. */
4443 return elt == NULL_TREE;
4444 }
4445 for (; elt; elt = TREE_CHAIN (elt))
4446 {
4447 /* We do not handle the case where the index is a RANGE_EXPR,
4448 so the statistic will be somewhat inaccurate.
4449 We do make a more accurate count in store_constructor itself,
4450 so since this function is only used for nested array elements,
4451 this should be close enough. */
4452 if (mostly_zeros_p (TREE_VALUE (elt)))
4453 zeros++;
4454 elts++;
4455 }
4456
4457 return 4 * zeros >= 3 * elts;
4458 }
4459
4460 return is_zeros_p (exp);
4461 }
4462 \f
4463 /* Helper function for store_constructor.
4464 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4465 TYPE is the type of the CONSTRUCTOR, not the element type.
4466 CLEARED is as for store_constructor.
4467 ALIAS_SET is the alias set to use for any stores.
4468
4469 This provides a recursive shortcut back to store_constructor when it isn't
4470 necessary to go through store_field. This is so that we can pass through
4471 the cleared field to let store_constructor know that we may not have to
4472 clear a substructure if the outer structure has already been cleared. */
4473
4474 static void
4475 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4476 HOST_WIDE_INT bitpos, enum machine_mode mode,
4477 tree exp, tree type, int cleared, int alias_set)
4478 {
4479 if (TREE_CODE (exp) == CONSTRUCTOR
4480 && bitpos % BITS_PER_UNIT == 0
4481 /* If we have a nonzero bitpos for a register target, then we just
4482 let store_field do the bitfield handling. This is unlikely to
4483 generate unnecessary clear instructions anyways. */
4484 && (bitpos == 0 || GET_CODE (target) == MEM))
4485 {
4486 if (GET_CODE (target) == MEM)
4487 target
4488 = adjust_address (target,
4489 GET_MODE (target) == BLKmode
4490 || 0 != (bitpos
4491 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4492 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4493
4494
4495 /* Update the alias set, if required. */
4496 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4497 && MEM_ALIAS_SET (target) != 0)
4498 {
4499 target = copy_rtx (target);
4500 set_mem_alias_set (target, alias_set);
4501 }
4502
4503 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4504 }
4505 else
4506 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4507 alias_set);
4508 }
4509
4510 /* Store the value of constructor EXP into the rtx TARGET.
4511 TARGET is either a REG or a MEM; we know it cannot conflict, since
4512 safe_from_p has been called.
4513 CLEARED is true if TARGET is known to have been zero'd.
4514 SIZE is the number of bytes of TARGET we are allowed to modify: this
4515 may not be the same as the size of EXP if we are assigning to a field
4516 which has been packed to exclude padding bits. */
4517
4518 static void
4519 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4520 {
4521 tree type = TREE_TYPE (exp);
4522 #ifdef WORD_REGISTER_OPERATIONS
4523 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4524 #endif
4525
4526 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4527 || TREE_CODE (type) == QUAL_UNION_TYPE)
4528 {
4529 tree elt;
4530
4531 /* If size is zero or the target is already cleared, do nothing. */
4532 if (size == 0 || cleared)
4533 cleared = 1;
4534 /* We either clear the aggregate or indicate the value is dead. */
4535 else if ((TREE_CODE (type) == UNION_TYPE
4536 || TREE_CODE (type) == QUAL_UNION_TYPE)
4537 && ! CONSTRUCTOR_ELTS (exp))
4538 /* If the constructor is empty, clear the union. */
4539 {
4540 clear_storage (target, expr_size (exp));
4541 cleared = 1;
4542 }
4543
4544 /* If we are building a static constructor into a register,
4545 set the initial value as zero so we can fold the value into
4546 a constant. But if more than one register is involved,
4547 this probably loses. */
4548 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4549 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4550 {
4551 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4552 cleared = 1;
4553 }
4554
4555 /* If the constructor has fewer fields than the structure
4556 or if we are initializing the structure to mostly zeros,
4557 clear the whole structure first. Don't do this if TARGET is a
4558 register whose mode size isn't equal to SIZE since clear_storage
4559 can't handle this case. */
4560 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4561 || mostly_zeros_p (exp))
4562 && (GET_CODE (target) != REG
4563 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4564 == size)))
4565 {
4566 rtx xtarget = target;
4567
4568 if (readonly_fields_p (type))
4569 {
4570 xtarget = copy_rtx (xtarget);
4571 RTX_UNCHANGING_P (xtarget) = 1;
4572 }
4573
4574 clear_storage (xtarget, GEN_INT (size));
4575 cleared = 1;
4576 }
4577
4578 if (! cleared)
4579 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4580
4581 /* Store each element of the constructor into
4582 the corresponding field of TARGET. */
4583
4584 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4585 {
4586 tree field = TREE_PURPOSE (elt);
4587 tree value = TREE_VALUE (elt);
4588 enum machine_mode mode;
4589 HOST_WIDE_INT bitsize;
4590 HOST_WIDE_INT bitpos = 0;
4591 tree offset;
4592 rtx to_rtx = target;
4593
4594 /* Just ignore missing fields.
4595 We cleared the whole structure, above,
4596 if any fields are missing. */
4597 if (field == 0)
4598 continue;
4599
4600 if (cleared && is_zeros_p (value))
4601 continue;
4602
4603 if (host_integerp (DECL_SIZE (field), 1))
4604 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4605 else
4606 bitsize = -1;
4607
4608 mode = DECL_MODE (field);
4609 if (DECL_BIT_FIELD (field))
4610 mode = VOIDmode;
4611
4612 offset = DECL_FIELD_OFFSET (field);
4613 if (host_integerp (offset, 0)
4614 && host_integerp (bit_position (field), 0))
4615 {
4616 bitpos = int_bit_position (field);
4617 offset = 0;
4618 }
4619 else
4620 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4621
4622 if (offset)
4623 {
4624 rtx offset_rtx;
4625
4626 if (CONTAINS_PLACEHOLDER_P (offset))
4627 offset = build (WITH_RECORD_EXPR, sizetype,
4628 offset, make_tree (TREE_TYPE (exp), target));
4629
4630 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4631 if (GET_CODE (to_rtx) != MEM)
4632 abort ();
4633
4634 #ifdef POINTERS_EXTEND_UNSIGNED
4635 if (GET_MODE (offset_rtx) != Pmode)
4636 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4637 #else
4638 if (GET_MODE (offset_rtx) != ptr_mode)
4639 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4640 #endif
4641
4642 to_rtx = offset_address (to_rtx, offset_rtx,
4643 highest_pow2_factor (offset));
4644 }
4645
4646 /* If the constructor has been cleared, setting RTX_UNCHANGING_P
4647 on the MEM might lead to scheduling the clearing after the
4648 store. */
4649 if (TREE_READONLY (field) && !cleared)
4650 {
4651 if (GET_CODE (to_rtx) == MEM)
4652 to_rtx = copy_rtx (to_rtx);
4653
4654 RTX_UNCHANGING_P (to_rtx) = 1;
4655 }
4656
4657 #ifdef WORD_REGISTER_OPERATIONS
4658 /* If this initializes a field that is smaller than a word, at the
4659 start of a word, try to widen it to a full word.
4660 This special case allows us to output C++ member function
4661 initializations in a form that the optimizers can understand. */
4662 if (GET_CODE (target) == REG
4663 && bitsize < BITS_PER_WORD
4664 && bitpos % BITS_PER_WORD == 0
4665 && GET_MODE_CLASS (mode) == MODE_INT
4666 && TREE_CODE (value) == INTEGER_CST
4667 && exp_size >= 0
4668 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4669 {
4670 tree type = TREE_TYPE (value);
4671
4672 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4673 {
4674 type = (*lang_hooks.types.type_for_size)
4675 (BITS_PER_WORD, TREE_UNSIGNED (type));
4676 value = convert (type, value);
4677 }
4678
4679 if (BYTES_BIG_ENDIAN)
4680 value
4681 = fold (build (LSHIFT_EXPR, type, value,
4682 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4683 bitsize = BITS_PER_WORD;
4684 mode = word_mode;
4685 }
4686 #endif
4687
4688 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4689 && DECL_NONADDRESSABLE_P (field))
4690 {
4691 to_rtx = copy_rtx (to_rtx);
4692 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4693 }
4694
4695 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4696 value, type, cleared,
4697 get_alias_set (TREE_TYPE (field)));
4698 }
4699 }
4700 else if (TREE_CODE (type) == ARRAY_TYPE
4701 || TREE_CODE (type) == VECTOR_TYPE)
4702 {
4703 tree elt;
4704 int i;
4705 int need_to_clear;
4706 tree domain = TYPE_DOMAIN (type);
4707 tree elttype = TREE_TYPE (type);
4708 int const_bounds_p;
4709 HOST_WIDE_INT minelt = 0;
4710 HOST_WIDE_INT maxelt = 0;
4711
4712 /* Vectors are like arrays, but the domain is stored via an array
4713 type indirectly. */
4714 if (TREE_CODE (type) == VECTOR_TYPE)
4715 {
4716 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4717 the same field as TYPE_DOMAIN, we are not guaranteed that
4718 it always will. */
4719 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4720 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4721 }
4722
4723 const_bounds_p = (TYPE_MIN_VALUE (domain)
4724 && TYPE_MAX_VALUE (domain)
4725 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4726 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4727
4728 /* If we have constant bounds for the range of the type, get them. */
4729 if (const_bounds_p)
4730 {
4731 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4732 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4733 }
4734
4735 /* If the constructor has fewer elements than the array,
4736 clear the whole array first. Similarly if this is
4737 static constructor of a non-BLKmode object. */
4738 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4739 need_to_clear = 1;
4740 else
4741 {
4742 HOST_WIDE_INT count = 0, zero_count = 0;
4743 need_to_clear = ! const_bounds_p;
4744
4745 /* This loop is a more accurate version of the loop in
4746 mostly_zeros_p (it handles RANGE_EXPR in an index).
4747 It is also needed to check for missing elements. */
4748 for (elt = CONSTRUCTOR_ELTS (exp);
4749 elt != NULL_TREE && ! need_to_clear;
4750 elt = TREE_CHAIN (elt))
4751 {
4752 tree index = TREE_PURPOSE (elt);
4753 HOST_WIDE_INT this_node_count;
4754
4755 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4756 {
4757 tree lo_index = TREE_OPERAND (index, 0);
4758 tree hi_index = TREE_OPERAND (index, 1);
4759
4760 if (! host_integerp (lo_index, 1)
4761 || ! host_integerp (hi_index, 1))
4762 {
4763 need_to_clear = 1;
4764 break;
4765 }
4766
4767 this_node_count = (tree_low_cst (hi_index, 1)
4768 - tree_low_cst (lo_index, 1) + 1);
4769 }
4770 else
4771 this_node_count = 1;
4772
4773 count += this_node_count;
4774 if (mostly_zeros_p (TREE_VALUE (elt)))
4775 zero_count += this_node_count;
4776 }
4777
4778 /* Clear the entire array first if there are any missing elements,
4779 or if the incidence of zero elements is >= 75%. */
4780 if (! need_to_clear
4781 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4782 need_to_clear = 1;
4783 }
4784
4785 if (need_to_clear && size > 0)
4786 {
4787 if (! cleared)
4788 {
4789 if (REG_P (target))
4790 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4791 else
4792 clear_storage (target, GEN_INT (size));
4793 }
4794 cleared = 1;
4795 }
4796 else if (REG_P (target))
4797 /* Inform later passes that the old value is dead. */
4798 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4799
4800 /* Store each element of the constructor into
4801 the corresponding element of TARGET, determined
4802 by counting the elements. */
4803 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4804 elt;
4805 elt = TREE_CHAIN (elt), i++)
4806 {
4807 enum machine_mode mode;
4808 HOST_WIDE_INT bitsize;
4809 HOST_WIDE_INT bitpos;
4810 int unsignedp;
4811 tree value = TREE_VALUE (elt);
4812 tree index = TREE_PURPOSE (elt);
4813 rtx xtarget = target;
4814
4815 if (cleared && is_zeros_p (value))
4816 continue;
4817
4818 unsignedp = TREE_UNSIGNED (elttype);
4819 mode = TYPE_MODE (elttype);
4820 if (mode == BLKmode)
4821 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4822 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4823 : -1);
4824 else
4825 bitsize = GET_MODE_BITSIZE (mode);
4826
4827 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4828 {
4829 tree lo_index = TREE_OPERAND (index, 0);
4830 tree hi_index = TREE_OPERAND (index, 1);
4831 rtx index_r, pos_rtx, loop_end;
4832 struct nesting *loop;
4833 HOST_WIDE_INT lo, hi, count;
4834 tree position;
4835
4836 /* If the range is constant and "small", unroll the loop. */
4837 if (const_bounds_p
4838 && host_integerp (lo_index, 0)
4839 && host_integerp (hi_index, 0)
4840 && (lo = tree_low_cst (lo_index, 0),
4841 hi = tree_low_cst (hi_index, 0),
4842 count = hi - lo + 1,
4843 (GET_CODE (target) != MEM
4844 || count <= 2
4845 || (host_integerp (TYPE_SIZE (elttype), 1)
4846 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4847 <= 40 * 8)))))
4848 {
4849 lo -= minelt; hi -= minelt;
4850 for (; lo <= hi; lo++)
4851 {
4852 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4853
4854 if (GET_CODE (target) == MEM
4855 && !MEM_KEEP_ALIAS_SET_P (target)
4856 && TREE_CODE (type) == ARRAY_TYPE
4857 && TYPE_NONALIASED_COMPONENT (type))
4858 {
4859 target = copy_rtx (target);
4860 MEM_KEEP_ALIAS_SET_P (target) = 1;
4861 }
4862
4863 store_constructor_field
4864 (target, bitsize, bitpos, mode, value, type, cleared,
4865 get_alias_set (elttype));
4866 }
4867 }
4868 else
4869 {
4870 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4871 loop_end = gen_label_rtx ();
4872
4873 unsignedp = TREE_UNSIGNED (domain);
4874
4875 index = build_decl (VAR_DECL, NULL_TREE, domain);
4876
4877 index_r
4878 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4879 &unsignedp, 0));
4880 SET_DECL_RTL (index, index_r);
4881 if (TREE_CODE (value) == SAVE_EXPR
4882 && SAVE_EXPR_RTL (value) == 0)
4883 {
4884 /* Make sure value gets expanded once before the
4885 loop. */
4886 expand_expr (value, const0_rtx, VOIDmode, 0);
4887 emit_queue ();
4888 }
4889 store_expr (lo_index, index_r, 0);
4890 loop = expand_start_loop (0);
4891
4892 /* Assign value to element index. */
4893 position
4894 = convert (ssizetype,
4895 fold (build (MINUS_EXPR, TREE_TYPE (index),
4896 index, TYPE_MIN_VALUE (domain))));
4897 position = size_binop (MULT_EXPR, position,
4898 convert (ssizetype,
4899 TYPE_SIZE_UNIT (elttype)));
4900
4901 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4902 xtarget = offset_address (target, pos_rtx,
4903 highest_pow2_factor (position));
4904 xtarget = adjust_address (xtarget, mode, 0);
4905 if (TREE_CODE (value) == CONSTRUCTOR)
4906 store_constructor (value, xtarget, cleared,
4907 bitsize / BITS_PER_UNIT);
4908 else
4909 store_expr (value, xtarget, 0);
4910
4911 expand_exit_loop_if_false (loop,
4912 build (LT_EXPR, integer_type_node,
4913 index, hi_index));
4914
4915 expand_increment (build (PREINCREMENT_EXPR,
4916 TREE_TYPE (index),
4917 index, integer_one_node), 0, 0);
4918 expand_end_loop ();
4919 emit_label (loop_end);
4920 }
4921 }
4922 else if ((index != 0 && ! host_integerp (index, 0))
4923 || ! host_integerp (TYPE_SIZE (elttype), 1))
4924 {
4925 tree position;
4926
4927 if (index == 0)
4928 index = ssize_int (1);
4929
4930 if (minelt)
4931 index = convert (ssizetype,
4932 fold (build (MINUS_EXPR, index,
4933 TYPE_MIN_VALUE (domain))));
4934
4935 position = size_binop (MULT_EXPR, index,
4936 convert (ssizetype,
4937 TYPE_SIZE_UNIT (elttype)));
4938 xtarget = offset_address (target,
4939 expand_expr (position, 0, VOIDmode, 0),
4940 highest_pow2_factor (position));
4941 xtarget = adjust_address (xtarget, mode, 0);
4942 store_expr (value, xtarget, 0);
4943 }
4944 else
4945 {
4946 if (index != 0)
4947 bitpos = ((tree_low_cst (index, 0) - minelt)
4948 * tree_low_cst (TYPE_SIZE (elttype), 1));
4949 else
4950 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4951
4952 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4953 && TREE_CODE (type) == ARRAY_TYPE
4954 && TYPE_NONALIASED_COMPONENT (type))
4955 {
4956 target = copy_rtx (target);
4957 MEM_KEEP_ALIAS_SET_P (target) = 1;
4958 }
4959
4960 store_constructor_field (target, bitsize, bitpos, mode, value,
4961 type, cleared, get_alias_set (elttype));
4962
4963 }
4964 }
4965 }
4966
4967 /* Set constructor assignments. */
4968 else if (TREE_CODE (type) == SET_TYPE)
4969 {
4970 tree elt = CONSTRUCTOR_ELTS (exp);
4971 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4972 tree domain = TYPE_DOMAIN (type);
4973 tree domain_min, domain_max, bitlength;
4974
4975 /* The default implementation strategy is to extract the constant
4976 parts of the constructor, use that to initialize the target,
4977 and then "or" in whatever non-constant ranges we need in addition.
4978
4979 If a large set is all zero or all ones, it is
4980 probably better to set it using memset (if available) or bzero.
4981 Also, if a large set has just a single range, it may also be
4982 better to first clear all the first clear the set (using
4983 bzero/memset), and set the bits we want. */
4984
4985 /* Check for all zeros. */
4986 if (elt == NULL_TREE && size > 0)
4987 {
4988 if (!cleared)
4989 clear_storage (target, GEN_INT (size));
4990 return;
4991 }
4992
4993 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4994 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4995 bitlength = size_binop (PLUS_EXPR,
4996 size_diffop (domain_max, domain_min),
4997 ssize_int (1));
4998
4999 nbits = tree_low_cst (bitlength, 1);
5000
5001 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5002 are "complicated" (more than one range), initialize (the
5003 constant parts) by copying from a constant. */
5004 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5005 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5006 {
5007 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5008 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5009 char *bit_buffer = alloca (nbits);
5010 HOST_WIDE_INT word = 0;
5011 unsigned int bit_pos = 0;
5012 unsigned int ibit = 0;
5013 unsigned int offset = 0; /* In bytes from beginning of set. */
5014
5015 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5016 for (;;)
5017 {
5018 if (bit_buffer[ibit])
5019 {
5020 if (BYTES_BIG_ENDIAN)
5021 word |= (1 << (set_word_size - 1 - bit_pos));
5022 else
5023 word |= 1 << bit_pos;
5024 }
5025
5026 bit_pos++; ibit++;
5027 if (bit_pos >= set_word_size || ibit == nbits)
5028 {
5029 if (word != 0 || ! cleared)
5030 {
5031 rtx datum = GEN_INT (word);
5032 rtx to_rtx;
5033
5034 /* The assumption here is that it is safe to use
5035 XEXP if the set is multi-word, but not if
5036 it's single-word. */
5037 if (GET_CODE (target) == MEM)
5038 to_rtx = adjust_address (target, mode, offset);
5039 else if (offset == 0)
5040 to_rtx = target;
5041 else
5042 abort ();
5043 emit_move_insn (to_rtx, datum);
5044 }
5045
5046 if (ibit == nbits)
5047 break;
5048 word = 0;
5049 bit_pos = 0;
5050 offset += set_word_size / BITS_PER_UNIT;
5051 }
5052 }
5053 }
5054 else if (!cleared)
5055 /* Don't bother clearing storage if the set is all ones. */
5056 if (TREE_CHAIN (elt) != NULL_TREE
5057 || (TREE_PURPOSE (elt) == NULL_TREE
5058 ? nbits != 1
5059 : ( ! host_integerp (TREE_VALUE (elt), 0)
5060 || ! host_integerp (TREE_PURPOSE (elt), 0)
5061 || (tree_low_cst (TREE_VALUE (elt), 0)
5062 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5063 != (HOST_WIDE_INT) nbits))))
5064 clear_storage (target, expr_size (exp));
5065
5066 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5067 {
5068 /* Start of range of element or NULL. */
5069 tree startbit = TREE_PURPOSE (elt);
5070 /* End of range of element, or element value. */
5071 tree endbit = TREE_VALUE (elt);
5072 HOST_WIDE_INT startb, endb;
5073 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5074
5075 bitlength_rtx = expand_expr (bitlength,
5076 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5077
5078 /* Handle non-range tuple element like [ expr ]. */
5079 if (startbit == NULL_TREE)
5080 {
5081 startbit = save_expr (endbit);
5082 endbit = startbit;
5083 }
5084
5085 startbit = convert (sizetype, startbit);
5086 endbit = convert (sizetype, endbit);
5087 if (! integer_zerop (domain_min))
5088 {
5089 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5090 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5091 }
5092 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5093 EXPAND_CONST_ADDRESS);
5094 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5095 EXPAND_CONST_ADDRESS);
5096
5097 if (REG_P (target))
5098 {
5099 targetx
5100 = assign_temp
5101 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5102 (GET_MODE (target), 0),
5103 TYPE_QUAL_CONST)),
5104 0, 1, 1);
5105 emit_move_insn (targetx, target);
5106 }
5107
5108 else if (GET_CODE (target) == MEM)
5109 targetx = target;
5110 else
5111 abort ();
5112
5113 /* Optimization: If startbit and endbit are constants divisible
5114 by BITS_PER_UNIT, call memset instead. */
5115 if (TARGET_MEM_FUNCTIONS
5116 && TREE_CODE (startbit) == INTEGER_CST
5117 && TREE_CODE (endbit) == INTEGER_CST
5118 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5119 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5120 {
5121 emit_library_call (memset_libfunc, LCT_NORMAL,
5122 VOIDmode, 3,
5123 plus_constant (XEXP (targetx, 0),
5124 startb / BITS_PER_UNIT),
5125 Pmode,
5126 constm1_rtx, TYPE_MODE (integer_type_node),
5127 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5128 TYPE_MODE (sizetype));
5129 }
5130 else
5131 emit_library_call (setbits_libfunc, LCT_NORMAL,
5132 VOIDmode, 4, XEXP (targetx, 0),
5133 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5134 startbit_rtx, TYPE_MODE (sizetype),
5135 endbit_rtx, TYPE_MODE (sizetype));
5136
5137 if (REG_P (target))
5138 emit_move_insn (target, targetx);
5139 }
5140 }
5141
5142 else
5143 abort ();
5144 }
5145
5146 /* Store the value of EXP (an expression tree)
5147 into a subfield of TARGET which has mode MODE and occupies
5148 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5149 If MODE is VOIDmode, it means that we are storing into a bit-field.
5150
5151 If VALUE_MODE is VOIDmode, return nothing in particular.
5152 UNSIGNEDP is not used in this case.
5153
5154 Otherwise, return an rtx for the value stored. This rtx
5155 has mode VALUE_MODE if that is convenient to do.
5156 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5157
5158 TYPE is the type of the underlying object,
5159
5160 ALIAS_SET is the alias set for the destination. This value will
5161 (in general) be different from that for TARGET, since TARGET is a
5162 reference to the containing structure. */
5163
5164 static rtx
5165 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5166 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5167 int unsignedp, tree type, int alias_set)
5168 {
5169 HOST_WIDE_INT width_mask = 0;
5170
5171 if (TREE_CODE (exp) == ERROR_MARK)
5172 return const0_rtx;
5173
5174 /* If we have nothing to store, do nothing unless the expression has
5175 side-effects. */
5176 if (bitsize == 0)
5177 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5178 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5179 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5180
5181 /* If we are storing into an unaligned field of an aligned union that is
5182 in a register, we may have the mode of TARGET being an integer mode but
5183 MODE == BLKmode. In that case, get an aligned object whose size and
5184 alignment are the same as TARGET and store TARGET into it (we can avoid
5185 the store if the field being stored is the entire width of TARGET). Then
5186 call ourselves recursively to store the field into a BLKmode version of
5187 that object. Finally, load from the object into TARGET. This is not
5188 very efficient in general, but should only be slightly more expensive
5189 than the otherwise-required unaligned accesses. Perhaps this can be
5190 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5191 twice, once with emit_move_insn and once via store_field. */
5192
5193 if (mode == BLKmode
5194 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5195 {
5196 rtx object = assign_temp (type, 0, 1, 1);
5197 rtx blk_object = adjust_address (object, BLKmode, 0);
5198
5199 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5200 emit_move_insn (object, target);
5201
5202 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5203 alias_set);
5204
5205 emit_move_insn (target, object);
5206
5207 /* We want to return the BLKmode version of the data. */
5208 return blk_object;
5209 }
5210
5211 if (GET_CODE (target) == CONCAT)
5212 {
5213 /* We're storing into a struct containing a single __complex. */
5214
5215 if (bitpos != 0)
5216 abort ();
5217 return store_expr (exp, target, 0);
5218 }
5219
5220 /* If the structure is in a register or if the component
5221 is a bit field, we cannot use addressing to access it.
5222 Use bit-field techniques or SUBREG to store in it. */
5223
5224 if (mode == VOIDmode
5225 || (mode != BLKmode && ! direct_store[(int) mode]
5226 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5227 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5228 || GET_CODE (target) == REG
5229 || GET_CODE (target) == SUBREG
5230 /* If the field isn't aligned enough to store as an ordinary memref,
5231 store it as a bit field. */
5232 || (mode != BLKmode
5233 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5234 || bitpos % GET_MODE_ALIGNMENT (mode))
5235 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5236 || (bitpos % BITS_PER_UNIT != 0)))
5237 /* If the RHS and field are a constant size and the size of the
5238 RHS isn't the same size as the bitfield, we must use bitfield
5239 operations. */
5240 || (bitsize >= 0
5241 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5242 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5243 {
5244 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5245
5246 /* If BITSIZE is narrower than the size of the type of EXP
5247 we will be narrowing TEMP. Normally, what's wanted are the
5248 low-order bits. However, if EXP's type is a record and this is
5249 big-endian machine, we want the upper BITSIZE bits. */
5250 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5251 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5252 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5253 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5254 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5255 - bitsize),
5256 NULL_RTX, 1);
5257
5258 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5259 MODE. */
5260 if (mode != VOIDmode && mode != BLKmode
5261 && mode != TYPE_MODE (TREE_TYPE (exp)))
5262 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5263
5264 /* If the modes of TARGET and TEMP are both BLKmode, both
5265 must be in memory and BITPOS must be aligned on a byte
5266 boundary. If so, we simply do a block copy. */
5267 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5268 {
5269 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5270 || bitpos % BITS_PER_UNIT != 0)
5271 abort ();
5272
5273 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5274 emit_block_move (target, temp,
5275 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5276 / BITS_PER_UNIT),
5277 BLOCK_OP_NORMAL);
5278
5279 return value_mode == VOIDmode ? const0_rtx : target;
5280 }
5281
5282 /* Store the value in the bitfield. */
5283 store_bit_field (target, bitsize, bitpos, mode, temp,
5284 int_size_in_bytes (type));
5285
5286 if (value_mode != VOIDmode)
5287 {
5288 /* The caller wants an rtx for the value.
5289 If possible, avoid refetching from the bitfield itself. */
5290 if (width_mask != 0
5291 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5292 {
5293 tree count;
5294 enum machine_mode tmode;
5295
5296 tmode = GET_MODE (temp);
5297 if (tmode == VOIDmode)
5298 tmode = value_mode;
5299
5300 if (unsignedp)
5301 return expand_and (tmode, temp,
5302 gen_int_mode (width_mask, tmode),
5303 NULL_RTX);
5304
5305 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5306 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5307 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5308 }
5309
5310 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5311 NULL_RTX, value_mode, VOIDmode,
5312 int_size_in_bytes (type));
5313 }
5314 return const0_rtx;
5315 }
5316 else
5317 {
5318 rtx addr = XEXP (target, 0);
5319 rtx to_rtx = target;
5320
5321 /* If a value is wanted, it must be the lhs;
5322 so make the address stable for multiple use. */
5323
5324 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5325 && ! CONSTANT_ADDRESS_P (addr)
5326 /* A frame-pointer reference is already stable. */
5327 && ! (GET_CODE (addr) == PLUS
5328 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5329 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5330 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5331 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5332
5333 /* Now build a reference to just the desired component. */
5334
5335 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5336
5337 if (to_rtx == target)
5338 to_rtx = copy_rtx (to_rtx);
5339
5340 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5341 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5342 set_mem_alias_set (to_rtx, alias_set);
5343
5344 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5345 }
5346 }
5347 \f
5348 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5349 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5350 codes and find the ultimate containing object, which we return.
5351
5352 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5353 bit position, and *PUNSIGNEDP to the signedness of the field.
5354 If the position of the field is variable, we store a tree
5355 giving the variable offset (in units) in *POFFSET.
5356 This offset is in addition to the bit position.
5357 If the position is not variable, we store 0 in *POFFSET.
5358
5359 If any of the extraction expressions is volatile,
5360 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5361
5362 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5363 is a mode that can be used to access the field. In that case, *PBITSIZE
5364 is redundant.
5365
5366 If the field describes a variable-sized object, *PMODE is set to
5367 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5368 this case, but the address of the object can be found. */
5369
5370 tree
5371 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5372 HOST_WIDE_INT *pbitpos, tree *poffset,
5373 enum machine_mode *pmode, int *punsignedp,
5374 int *pvolatilep)
5375 {
5376 tree size_tree = 0;
5377 enum machine_mode mode = VOIDmode;
5378 tree offset = size_zero_node;
5379 tree bit_offset = bitsize_zero_node;
5380 tree placeholder_ptr = 0;
5381 tree tem;
5382
5383 /* First get the mode, signedness, and size. We do this from just the
5384 outermost expression. */
5385 if (TREE_CODE (exp) == COMPONENT_REF)
5386 {
5387 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5388 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5389 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5390
5391 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5392 }
5393 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5394 {
5395 size_tree = TREE_OPERAND (exp, 1);
5396 *punsignedp = TREE_UNSIGNED (exp);
5397 }
5398 else
5399 {
5400 mode = TYPE_MODE (TREE_TYPE (exp));
5401 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5402
5403 if (mode == BLKmode)
5404 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5405 else
5406 *pbitsize = GET_MODE_BITSIZE (mode);
5407 }
5408
5409 if (size_tree != 0)
5410 {
5411 if (! host_integerp (size_tree, 1))
5412 mode = BLKmode, *pbitsize = -1;
5413 else
5414 *pbitsize = tree_low_cst (size_tree, 1);
5415 }
5416
5417 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5418 and find the ultimate containing object. */
5419 while (1)
5420 {
5421 if (TREE_CODE (exp) == BIT_FIELD_REF)
5422 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5423 else if (TREE_CODE (exp) == COMPONENT_REF)
5424 {
5425 tree field = TREE_OPERAND (exp, 1);
5426 tree this_offset = DECL_FIELD_OFFSET (field);
5427
5428 /* If this field hasn't been filled in yet, don't go
5429 past it. This should only happen when folding expressions
5430 made during type construction. */
5431 if (this_offset == 0)
5432 break;
5433 else if (CONTAINS_PLACEHOLDER_P (this_offset))
5434 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5435
5436 offset = size_binop (PLUS_EXPR, offset, this_offset);
5437 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5438 DECL_FIELD_BIT_OFFSET (field));
5439
5440 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5441 }
5442
5443 else if (TREE_CODE (exp) == ARRAY_REF
5444 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5445 {
5446 tree index = TREE_OPERAND (exp, 1);
5447 tree array = TREE_OPERAND (exp, 0);
5448 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5449 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5450 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5451
5452 /* We assume all arrays have sizes that are a multiple of a byte.
5453 First subtract the lower bound, if any, in the type of the
5454 index, then convert to sizetype and multiply by the size of the
5455 array element. */
5456 if (low_bound != 0 && ! integer_zerop (low_bound))
5457 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5458 index, low_bound));
5459
5460 /* If the index has a self-referential type, pass it to a
5461 WITH_RECORD_EXPR; if the component size is, pass our
5462 component to one. */
5463 if (CONTAINS_PLACEHOLDER_P (index))
5464 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5465 if (CONTAINS_PLACEHOLDER_P (unit_size))
5466 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5467
5468 offset = size_binop (PLUS_EXPR, offset,
5469 size_binop (MULT_EXPR,
5470 convert (sizetype, index),
5471 unit_size));
5472 }
5473
5474 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5475 {
5476 tree new = find_placeholder (exp, &placeholder_ptr);
5477
5478 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5479 We might have been called from tree optimization where we
5480 haven't set up an object yet. */
5481 if (new == 0)
5482 break;
5483 else
5484 exp = new;
5485
5486 continue;
5487 }
5488
5489 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5490 conversions that don't change the mode, and all view conversions
5491 except those that need to "step up" the alignment. */
5492 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5493 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5494 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5495 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5496 && STRICT_ALIGNMENT
5497 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5498 < BIGGEST_ALIGNMENT)
5499 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5500 || TYPE_ALIGN_OK (TREE_TYPE
5501 (TREE_OPERAND (exp, 0))))))
5502 && ! ((TREE_CODE (exp) == NOP_EXPR
5503 || TREE_CODE (exp) == CONVERT_EXPR)
5504 && (TYPE_MODE (TREE_TYPE (exp))
5505 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5506 break;
5507
5508 /* If any reference in the chain is volatile, the effect is volatile. */
5509 if (TREE_THIS_VOLATILE (exp))
5510 *pvolatilep = 1;
5511
5512 exp = TREE_OPERAND (exp, 0);
5513 }
5514
5515 /* If OFFSET is constant, see if we can return the whole thing as a
5516 constant bit position. Otherwise, split it up. */
5517 if (host_integerp (offset, 0)
5518 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5519 bitsize_unit_node))
5520 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5521 && host_integerp (tem, 0))
5522 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5523 else
5524 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5525
5526 *pmode = mode;
5527 return exp;
5528 }
5529
5530 /* Return 1 if T is an expression that get_inner_reference handles. */
5531
5532 int
5533 handled_component_p (tree t)
5534 {
5535 switch (TREE_CODE (t))
5536 {
5537 case BIT_FIELD_REF:
5538 case COMPONENT_REF:
5539 case ARRAY_REF:
5540 case ARRAY_RANGE_REF:
5541 case NON_LVALUE_EXPR:
5542 case VIEW_CONVERT_EXPR:
5543 return 1;
5544
5545 /* ??? Sure they are handled, but get_inner_reference may return
5546 a different PBITSIZE, depending upon whether the expression is
5547 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5548 case NOP_EXPR:
5549 case CONVERT_EXPR:
5550 return (TYPE_MODE (TREE_TYPE (t))
5551 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5552
5553 default:
5554 return 0;
5555 }
5556 }
5557 \f
5558 /* Given an rtx VALUE that may contain additions and multiplications, return
5559 an equivalent value that just refers to a register, memory, or constant.
5560 This is done by generating instructions to perform the arithmetic and
5561 returning a pseudo-register containing the value.
5562
5563 The returned value may be a REG, SUBREG, MEM or constant. */
5564
5565 rtx
5566 force_operand (rtx value, rtx target)
5567 {
5568 rtx op1, op2;
5569 /* Use subtarget as the target for operand 0 of a binary operation. */
5570 rtx subtarget = get_subtarget (target);
5571 enum rtx_code code = GET_CODE (value);
5572
5573 /* Check for a PIC address load. */
5574 if ((code == PLUS || code == MINUS)
5575 && XEXP (value, 0) == pic_offset_table_rtx
5576 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5577 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5578 || GET_CODE (XEXP (value, 1)) == CONST))
5579 {
5580 if (!subtarget)
5581 subtarget = gen_reg_rtx (GET_MODE (value));
5582 emit_move_insn (subtarget, value);
5583 return subtarget;
5584 }
5585
5586 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5587 {
5588 if (!target)
5589 target = gen_reg_rtx (GET_MODE (value));
5590 convert_move (target, force_operand (XEXP (value, 0), NULL),
5591 code == ZERO_EXTEND);
5592 return target;
5593 }
5594
5595 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5596 {
5597 op2 = XEXP (value, 1);
5598 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5599 subtarget = 0;
5600 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5601 {
5602 code = PLUS;
5603 op2 = negate_rtx (GET_MODE (value), op2);
5604 }
5605
5606 /* Check for an addition with OP2 a constant integer and our first
5607 operand a PLUS of a virtual register and something else. In that
5608 case, we want to emit the sum of the virtual register and the
5609 constant first and then add the other value. This allows virtual
5610 register instantiation to simply modify the constant rather than
5611 creating another one around this addition. */
5612 if (code == PLUS && GET_CODE (op2) == CONST_INT
5613 && GET_CODE (XEXP (value, 0)) == PLUS
5614 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5615 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5616 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5617 {
5618 rtx temp = expand_simple_binop (GET_MODE (value), code,
5619 XEXP (XEXP (value, 0), 0), op2,
5620 subtarget, 0, OPTAB_LIB_WIDEN);
5621 return expand_simple_binop (GET_MODE (value), code, temp,
5622 force_operand (XEXP (XEXP (value,
5623 0), 1), 0),
5624 target, 0, OPTAB_LIB_WIDEN);
5625 }
5626
5627 op1 = force_operand (XEXP (value, 0), subtarget);
5628 op2 = force_operand (op2, NULL_RTX);
5629 switch (code)
5630 {
5631 case MULT:
5632 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5633 case DIV:
5634 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5635 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5636 target, 1, OPTAB_LIB_WIDEN);
5637 else
5638 return expand_divmod (0,
5639 FLOAT_MODE_P (GET_MODE (value))
5640 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5641 GET_MODE (value), op1, op2, target, 0);
5642 break;
5643 case MOD:
5644 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5645 target, 0);
5646 break;
5647 case UDIV:
5648 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5649 target, 1);
5650 break;
5651 case UMOD:
5652 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5653 target, 1);
5654 break;
5655 case ASHIFTRT:
5656 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5657 target, 0, OPTAB_LIB_WIDEN);
5658 break;
5659 default:
5660 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5661 target, 1, OPTAB_LIB_WIDEN);
5662 }
5663 }
5664 if (GET_RTX_CLASS (code) == '1')
5665 {
5666 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5667 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5668 }
5669
5670 #ifdef INSN_SCHEDULING
5671 /* On machines that have insn scheduling, we want all memory reference to be
5672 explicit, so we need to deal with such paradoxical SUBREGs. */
5673 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5674 && (GET_MODE_SIZE (GET_MODE (value))
5675 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5676 value
5677 = simplify_gen_subreg (GET_MODE (value),
5678 force_reg (GET_MODE (SUBREG_REG (value)),
5679 force_operand (SUBREG_REG (value),
5680 NULL_RTX)),
5681 GET_MODE (SUBREG_REG (value)),
5682 SUBREG_BYTE (value));
5683 #endif
5684
5685 return value;
5686 }
5687 \f
5688 /* Subroutine of expand_expr: return nonzero iff there is no way that
5689 EXP can reference X, which is being modified. TOP_P is nonzero if this
5690 call is going to be used to determine whether we need a temporary
5691 for EXP, as opposed to a recursive call to this function.
5692
5693 It is always safe for this routine to return zero since it merely
5694 searches for optimization opportunities. */
5695
5696 int
5697 safe_from_p (rtx x, tree exp, int top_p)
5698 {
5699 rtx exp_rtl = 0;
5700 int i, nops;
5701 static tree save_expr_list;
5702
5703 if (x == 0
5704 /* If EXP has varying size, we MUST use a target since we currently
5705 have no way of allocating temporaries of variable size
5706 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5707 So we assume here that something at a higher level has prevented a
5708 clash. This is somewhat bogus, but the best we can do. Only
5709 do this when X is BLKmode and when we are at the top level. */
5710 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5711 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5712 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5713 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5714 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5715 != INTEGER_CST)
5716 && GET_MODE (x) == BLKmode)
5717 /* If X is in the outgoing argument area, it is always safe. */
5718 || (GET_CODE (x) == MEM
5719 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5720 || (GET_CODE (XEXP (x, 0)) == PLUS
5721 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5722 return 1;
5723
5724 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5725 find the underlying pseudo. */
5726 if (GET_CODE (x) == SUBREG)
5727 {
5728 x = SUBREG_REG (x);
5729 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5730 return 0;
5731 }
5732
5733 /* A SAVE_EXPR might appear many times in the expression passed to the
5734 top-level safe_from_p call, and if it has a complex subexpression,
5735 examining it multiple times could result in a combinatorial explosion.
5736 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
5737 with optimization took about 28 minutes to compile -- even though it was
5738 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5739 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5740 we have processed. Note that the only test of top_p was above. */
5741
5742 if (top_p)
5743 {
5744 int rtn;
5745 tree t;
5746
5747 save_expr_list = 0;
5748
5749 rtn = safe_from_p (x, exp, 0);
5750
5751 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5752 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5753
5754 return rtn;
5755 }
5756
5757 /* Now look at our tree code and possibly recurse. */
5758 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5759 {
5760 case 'd':
5761 exp_rtl = DECL_RTL_IF_SET (exp);
5762 break;
5763
5764 case 'c':
5765 return 1;
5766
5767 case 'x':
5768 if (TREE_CODE (exp) == TREE_LIST)
5769 {
5770 while (1)
5771 {
5772 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5773 return 0;
5774 exp = TREE_CHAIN (exp);
5775 if (!exp)
5776 return 1;
5777 if (TREE_CODE (exp) != TREE_LIST)
5778 return safe_from_p (x, exp, 0);
5779 }
5780 }
5781 else if (TREE_CODE (exp) == ERROR_MARK)
5782 return 1; /* An already-visited SAVE_EXPR? */
5783 else
5784 return 0;
5785
5786 case '2':
5787 case '<':
5788 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5789 return 0;
5790 /* FALLTHRU */
5791
5792 case '1':
5793 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5794
5795 case 'e':
5796 case 'r':
5797 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5798 the expression. If it is set, we conflict iff we are that rtx or
5799 both are in memory. Otherwise, we check all operands of the
5800 expression recursively. */
5801
5802 switch (TREE_CODE (exp))
5803 {
5804 case ADDR_EXPR:
5805 /* If the operand is static or we are static, we can't conflict.
5806 Likewise if we don't conflict with the operand at all. */
5807 if (staticp (TREE_OPERAND (exp, 0))
5808 || TREE_STATIC (exp)
5809 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5810 return 1;
5811
5812 /* Otherwise, the only way this can conflict is if we are taking
5813 the address of a DECL a that address if part of X, which is
5814 very rare. */
5815 exp = TREE_OPERAND (exp, 0);
5816 if (DECL_P (exp))
5817 {
5818 if (!DECL_RTL_SET_P (exp)
5819 || GET_CODE (DECL_RTL (exp)) != MEM)
5820 return 0;
5821 else
5822 exp_rtl = XEXP (DECL_RTL (exp), 0);
5823 }
5824 break;
5825
5826 case INDIRECT_REF:
5827 if (GET_CODE (x) == MEM
5828 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5829 get_alias_set (exp)))
5830 return 0;
5831 break;
5832
5833 case CALL_EXPR:
5834 /* Assume that the call will clobber all hard registers and
5835 all of memory. */
5836 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5837 || GET_CODE (x) == MEM)
5838 return 0;
5839 break;
5840
5841 case RTL_EXPR:
5842 /* If a sequence exists, we would have to scan every instruction
5843 in the sequence to see if it was safe. This is probably not
5844 worthwhile. */
5845 if (RTL_EXPR_SEQUENCE (exp))
5846 return 0;
5847
5848 exp_rtl = RTL_EXPR_RTL (exp);
5849 break;
5850
5851 case WITH_CLEANUP_EXPR:
5852 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5853 break;
5854
5855 case CLEANUP_POINT_EXPR:
5856 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5857
5858 case SAVE_EXPR:
5859 exp_rtl = SAVE_EXPR_RTL (exp);
5860 if (exp_rtl)
5861 break;
5862
5863 /* If we've already scanned this, don't do it again. Otherwise,
5864 show we've scanned it and record for clearing the flag if we're
5865 going on. */
5866 if (TREE_PRIVATE (exp))
5867 return 1;
5868
5869 TREE_PRIVATE (exp) = 1;
5870 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5871 {
5872 TREE_PRIVATE (exp) = 0;
5873 return 0;
5874 }
5875
5876 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5877 return 1;
5878
5879 case BIND_EXPR:
5880 /* The only operand we look at is operand 1. The rest aren't
5881 part of the expression. */
5882 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5883
5884 default:
5885 break;
5886 }
5887
5888 /* If we have an rtx, we do not need to scan our operands. */
5889 if (exp_rtl)
5890 break;
5891
5892 nops = first_rtl_op (TREE_CODE (exp));
5893 for (i = 0; i < nops; i++)
5894 if (TREE_OPERAND (exp, i) != 0
5895 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5896 return 0;
5897
5898 /* If this is a language-specific tree code, it may require
5899 special handling. */
5900 if ((unsigned int) TREE_CODE (exp)
5901 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5902 && !(*lang_hooks.safe_from_p) (x, exp))
5903 return 0;
5904 }
5905
5906 /* If we have an rtl, find any enclosed object. Then see if we conflict
5907 with it. */
5908 if (exp_rtl)
5909 {
5910 if (GET_CODE (exp_rtl) == SUBREG)
5911 {
5912 exp_rtl = SUBREG_REG (exp_rtl);
5913 if (GET_CODE (exp_rtl) == REG
5914 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5915 return 0;
5916 }
5917
5918 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5919 are memory and they conflict. */
5920 return ! (rtx_equal_p (x, exp_rtl)
5921 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5922 && true_dependence (exp_rtl, VOIDmode, x,
5923 rtx_addr_varies_p)));
5924 }
5925
5926 /* If we reach here, it is safe. */
5927 return 1;
5928 }
5929
5930 /* Subroutine of expand_expr: return rtx if EXP is a
5931 variable or parameter; else return 0. */
5932
5933 static rtx
5934 var_rtx (tree exp)
5935 {
5936 STRIP_NOPS (exp);
5937 switch (TREE_CODE (exp))
5938 {
5939 case PARM_DECL:
5940 case VAR_DECL:
5941 return DECL_RTL (exp);
5942 default:
5943 return 0;
5944 }
5945 }
5946
5947 #ifdef MAX_INTEGER_COMPUTATION_MODE
5948
5949 void
5950 check_max_integer_computation_mode (tree exp)
5951 {
5952 enum tree_code code;
5953 enum machine_mode mode;
5954
5955 /* Strip any NOPs that don't change the mode. */
5956 STRIP_NOPS (exp);
5957 code = TREE_CODE (exp);
5958
5959 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5960 if (code == NOP_EXPR
5961 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5962 return;
5963
5964 /* First check the type of the overall operation. We need only look at
5965 unary, binary and relational operations. */
5966 if (TREE_CODE_CLASS (code) == '1'
5967 || TREE_CODE_CLASS (code) == '2'
5968 || TREE_CODE_CLASS (code) == '<')
5969 {
5970 mode = TYPE_MODE (TREE_TYPE (exp));
5971 if (GET_MODE_CLASS (mode) == MODE_INT
5972 && mode > MAX_INTEGER_COMPUTATION_MODE)
5973 internal_error ("unsupported wide integer operation");
5974 }
5975
5976 /* Check operand of a unary op. */
5977 if (TREE_CODE_CLASS (code) == '1')
5978 {
5979 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5980 if (GET_MODE_CLASS (mode) == MODE_INT
5981 && mode > MAX_INTEGER_COMPUTATION_MODE)
5982 internal_error ("unsupported wide integer operation");
5983 }
5984
5985 /* Check operands of a binary/comparison op. */
5986 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5987 {
5988 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5989 if (GET_MODE_CLASS (mode) == MODE_INT
5990 && mode > MAX_INTEGER_COMPUTATION_MODE)
5991 internal_error ("unsupported wide integer operation");
5992
5993 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5994 if (GET_MODE_CLASS (mode) == MODE_INT
5995 && mode > MAX_INTEGER_COMPUTATION_MODE)
5996 internal_error ("unsupported wide integer operation");
5997 }
5998 }
5999 #endif
6000 \f
6001 /* Return the highest power of two that EXP is known to be a multiple of.
6002 This is used in updating alignment of MEMs in array references. */
6003
6004 static unsigned HOST_WIDE_INT
6005 highest_pow2_factor (tree exp)
6006 {
6007 unsigned HOST_WIDE_INT c0, c1;
6008
6009 switch (TREE_CODE (exp))
6010 {
6011 case INTEGER_CST:
6012 /* We can find the lowest bit that's a one. If the low
6013 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6014 We need to handle this case since we can find it in a COND_EXPR,
6015 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6016 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6017 later ICE. */
6018 if (TREE_CONSTANT_OVERFLOW (exp))
6019 return BIGGEST_ALIGNMENT;
6020 else
6021 {
6022 /* Note: tree_low_cst is intentionally not used here,
6023 we don't care about the upper bits. */
6024 c0 = TREE_INT_CST_LOW (exp);
6025 c0 &= -c0;
6026 return c0 ? c0 : BIGGEST_ALIGNMENT;
6027 }
6028 break;
6029
6030 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6031 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6032 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6033 return MIN (c0, c1);
6034
6035 case MULT_EXPR:
6036 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6037 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6038 return c0 * c1;
6039
6040 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6041 case CEIL_DIV_EXPR:
6042 if (integer_pow2p (TREE_OPERAND (exp, 1))
6043 && host_integerp (TREE_OPERAND (exp, 1), 1))
6044 {
6045 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6046 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6047 return MAX (1, c0 / c1);
6048 }
6049 break;
6050
6051 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6052 case SAVE_EXPR: case WITH_RECORD_EXPR:
6053 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6054
6055 case COMPOUND_EXPR:
6056 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6057
6058 case COND_EXPR:
6059 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6060 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6061 return MIN (c0, c1);
6062
6063 default:
6064 break;
6065 }
6066
6067 return 1;
6068 }
6069
6070 /* Similar, except that it is known that the expression must be a multiple
6071 of the alignment of TYPE. */
6072
6073 static unsigned HOST_WIDE_INT
6074 highest_pow2_factor_for_type (tree type, tree exp)
6075 {
6076 unsigned HOST_WIDE_INT type_align, factor;
6077
6078 factor = highest_pow2_factor (exp);
6079 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6080 return MAX (factor, type_align);
6081 }
6082 \f
6083 /* Return an object on the placeholder list that matches EXP, a
6084 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6085 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6086 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6087 is a location which initially points to a starting location in the
6088 placeholder list (zero means start of the list) and where a pointer into
6089 the placeholder list at which the object is found is placed. */
6090
6091 tree
6092 find_placeholder (tree exp, tree *plist)
6093 {
6094 tree type = TREE_TYPE (exp);
6095 tree placeholder_expr;
6096
6097 for (placeholder_expr
6098 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6099 placeholder_expr != 0;
6100 placeholder_expr = TREE_CHAIN (placeholder_expr))
6101 {
6102 tree need_type = TYPE_MAIN_VARIANT (type);
6103 tree elt;
6104
6105 /* Find the outermost reference that is of the type we want. If none,
6106 see if any object has a type that is a pointer to the type we
6107 want. */
6108 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6109 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6110 || TREE_CODE (elt) == COND_EXPR)
6111 ? TREE_OPERAND (elt, 1)
6112 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6113 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6114 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6115 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6116 ? TREE_OPERAND (elt, 0) : 0))
6117 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6118 {
6119 if (plist)
6120 *plist = placeholder_expr;
6121 return elt;
6122 }
6123
6124 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6125 elt
6126 = ((TREE_CODE (elt) == COMPOUND_EXPR
6127 || TREE_CODE (elt) == COND_EXPR)
6128 ? TREE_OPERAND (elt, 1)
6129 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6130 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6131 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6132 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6133 ? TREE_OPERAND (elt, 0) : 0))
6134 if (POINTER_TYPE_P (TREE_TYPE (elt))
6135 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6136 == need_type))
6137 {
6138 if (plist)
6139 *plist = placeholder_expr;
6140 return build1 (INDIRECT_REF, need_type, elt);
6141 }
6142 }
6143
6144 return 0;
6145 }
6146
6147 /* Subroutine of expand_expr. Expand the two operands of a binary
6148 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6149 The value may be stored in TARGET if TARGET is nonzero. The
6150 MODIFIER argument is as documented by expand_expr. */
6151
6152 static void
6153 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6154 enum expand_modifier modifier)
6155 {
6156 if (! safe_from_p (target, exp1, 1))
6157 target = 0;
6158 if (operand_equal_p (exp0, exp1, 0))
6159 {
6160 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6161 *op1 = copy_rtx (*op0);
6162 }
6163 else
6164 {
6165 /* If we need to preserve evaluation order, copy exp0 into its own
6166 temporary variable so that it can't be clobbered by exp1. */
6167 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6168 exp0 = save_expr (exp0);
6169 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6170 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6171 }
6172 }
6173
6174 \f
6175 /* expand_expr: generate code for computing expression EXP.
6176 An rtx for the computed value is returned. The value is never null.
6177 In the case of a void EXP, const0_rtx is returned.
6178
6179 The value may be stored in TARGET if TARGET is nonzero.
6180 TARGET is just a suggestion; callers must assume that
6181 the rtx returned may not be the same as TARGET.
6182
6183 If TARGET is CONST0_RTX, it means that the value will be ignored.
6184
6185 If TMODE is not VOIDmode, it suggests generating the
6186 result in mode TMODE. But this is done only when convenient.
6187 Otherwise, TMODE is ignored and the value generated in its natural mode.
6188 TMODE is just a suggestion; callers must assume that
6189 the rtx returned may not have mode TMODE.
6190
6191 Note that TARGET may have neither TMODE nor MODE. In that case, it
6192 probably will not be used.
6193
6194 If MODIFIER is EXPAND_SUM then when EXP is an addition
6195 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6196 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6197 products as above, or REG or MEM, or constant.
6198 Ordinarily in such cases we would output mul or add instructions
6199 and then return a pseudo reg containing the sum.
6200
6201 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6202 it also marks a label as absolutely required (it can't be dead).
6203 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6204 This is used for outputting expressions used in initializers.
6205
6206 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6207 with a constant address even if that address is not normally legitimate.
6208 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6209
6210 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6211 a call parameter. Such targets require special care as we haven't yet
6212 marked TARGET so that it's safe from being trashed by libcalls. We
6213 don't want to use TARGET for anything but the final result;
6214 Intermediate values must go elsewhere. Additionally, calls to
6215 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6216
6217 rtx
6218 expand_expr (tree exp, rtx target, enum machine_mode tmode,
6219 enum expand_modifier modifier)
6220 {
6221 rtx op0, op1, temp;
6222 tree type = TREE_TYPE (exp);
6223 int unsignedp = TREE_UNSIGNED (type);
6224 enum machine_mode mode;
6225 enum tree_code code = TREE_CODE (exp);
6226 optab this_optab;
6227 rtx subtarget, original_target;
6228 int ignore;
6229 tree context;
6230
6231 /* Handle ERROR_MARK before anybody tries to access its type. */
6232 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6233 {
6234 op0 = CONST0_RTX (tmode);
6235 if (op0 != 0)
6236 return op0;
6237 return const0_rtx;
6238 }
6239
6240 mode = TYPE_MODE (type);
6241 /* Use subtarget as the target for operand 0 of a binary operation. */
6242 subtarget = get_subtarget (target);
6243 original_target = target;
6244 ignore = (target == const0_rtx
6245 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6246 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6247 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6248 && TREE_CODE (type) == VOID_TYPE));
6249
6250 /* If we are going to ignore this result, we need only do something
6251 if there is a side-effect somewhere in the expression. If there
6252 is, short-circuit the most common cases here. Note that we must
6253 not call expand_expr with anything but const0_rtx in case this
6254 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6255
6256 if (ignore)
6257 {
6258 if (! TREE_SIDE_EFFECTS (exp))
6259 return const0_rtx;
6260
6261 /* Ensure we reference a volatile object even if value is ignored, but
6262 don't do this if all we are doing is taking its address. */
6263 if (TREE_THIS_VOLATILE (exp)
6264 && TREE_CODE (exp) != FUNCTION_DECL
6265 && mode != VOIDmode && mode != BLKmode
6266 && modifier != EXPAND_CONST_ADDRESS)
6267 {
6268 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6269 if (GET_CODE (temp) == MEM)
6270 temp = copy_to_reg (temp);
6271 return const0_rtx;
6272 }
6273
6274 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6275 || code == INDIRECT_REF || code == BUFFER_REF)
6276 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6277 modifier);
6278
6279 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6280 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6281 {
6282 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6283 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6284 return const0_rtx;
6285 }
6286 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6287 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6288 /* If the second operand has no side effects, just evaluate
6289 the first. */
6290 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6291 modifier);
6292 else if (code == BIT_FIELD_REF)
6293 {
6294 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6295 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6296 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6297 return const0_rtx;
6298 }
6299
6300 target = 0;
6301 }
6302
6303 #ifdef MAX_INTEGER_COMPUTATION_MODE
6304 /* Only check stuff here if the mode we want is different from the mode
6305 of the expression; if it's the same, check_max_integer_computation_mode
6306 will handle it. Do we really need to check this stuff at all? */
6307
6308 if (target
6309 && GET_MODE (target) != mode
6310 && TREE_CODE (exp) != INTEGER_CST
6311 && TREE_CODE (exp) != PARM_DECL
6312 && TREE_CODE (exp) != ARRAY_REF
6313 && TREE_CODE (exp) != ARRAY_RANGE_REF
6314 && TREE_CODE (exp) != COMPONENT_REF
6315 && TREE_CODE (exp) != BIT_FIELD_REF
6316 && TREE_CODE (exp) != INDIRECT_REF
6317 && TREE_CODE (exp) != CALL_EXPR
6318 && TREE_CODE (exp) != VAR_DECL
6319 && TREE_CODE (exp) != RTL_EXPR)
6320 {
6321 enum machine_mode mode = GET_MODE (target);
6322
6323 if (GET_MODE_CLASS (mode) == MODE_INT
6324 && mode > MAX_INTEGER_COMPUTATION_MODE)
6325 internal_error ("unsupported wide integer operation");
6326 }
6327
6328 if (tmode != mode
6329 && TREE_CODE (exp) != INTEGER_CST
6330 && TREE_CODE (exp) != PARM_DECL
6331 && TREE_CODE (exp) != ARRAY_REF
6332 && TREE_CODE (exp) != ARRAY_RANGE_REF
6333 && TREE_CODE (exp) != COMPONENT_REF
6334 && TREE_CODE (exp) != BIT_FIELD_REF
6335 && TREE_CODE (exp) != INDIRECT_REF
6336 && TREE_CODE (exp) != VAR_DECL
6337 && TREE_CODE (exp) != CALL_EXPR
6338 && TREE_CODE (exp) != RTL_EXPR
6339 && GET_MODE_CLASS (tmode) == MODE_INT
6340 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6341 internal_error ("unsupported wide integer operation");
6342
6343 check_max_integer_computation_mode (exp);
6344 #endif
6345
6346 /* If will do cse, generate all results into pseudo registers
6347 since 1) that allows cse to find more things
6348 and 2) otherwise cse could produce an insn the machine
6349 cannot support. An exception is a CONSTRUCTOR into a multi-word
6350 MEM: that's much more likely to be most efficient into the MEM.
6351 Another is a CALL_EXPR which must return in memory. */
6352
6353 if (! cse_not_expected && mode != BLKmode && target
6354 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6355 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6356 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6357 target = 0;
6358
6359 switch (code)
6360 {
6361 case LABEL_DECL:
6362 {
6363 tree function = decl_function_context (exp);
6364 /* Labels in containing functions, or labels used from initializers,
6365 must be forced. */
6366 if (modifier == EXPAND_INITIALIZER
6367 || (function != current_function_decl
6368 && function != inline_function_decl
6369 && function != 0))
6370 temp = force_label_rtx (exp);
6371 else
6372 temp = label_rtx (exp);
6373
6374 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6375 if (function != current_function_decl
6376 && function != inline_function_decl && function != 0)
6377 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6378 return temp;
6379 }
6380
6381 case PARM_DECL:
6382 if (!DECL_RTL_SET_P (exp))
6383 {
6384 error ("%Jprior parameter's size depends on '%D'", exp, exp);
6385 return CONST0_RTX (mode);
6386 }
6387
6388 /* ... fall through ... */
6389
6390 case VAR_DECL:
6391 /* If a static var's type was incomplete when the decl was written,
6392 but the type is complete now, lay out the decl now. */
6393 if (DECL_SIZE (exp) == 0
6394 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6395 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6396 layout_decl (exp, 0);
6397
6398 /* ... fall through ... */
6399
6400 case FUNCTION_DECL:
6401 case RESULT_DECL:
6402 if (DECL_RTL (exp) == 0)
6403 abort ();
6404
6405 /* Ensure variable marked as used even if it doesn't go through
6406 a parser. If it hasn't be used yet, write out an external
6407 definition. */
6408 if (! TREE_USED (exp))
6409 {
6410 assemble_external (exp);
6411 TREE_USED (exp) = 1;
6412 }
6413
6414 /* Show we haven't gotten RTL for this yet. */
6415 temp = 0;
6416
6417 /* Handle variables inherited from containing functions. */
6418 context = decl_function_context (exp);
6419
6420 /* We treat inline_function_decl as an alias for the current function
6421 because that is the inline function whose vars, types, etc.
6422 are being merged into the current function.
6423 See expand_inline_function. */
6424
6425 if (context != 0 && context != current_function_decl
6426 && context != inline_function_decl
6427 /* If var is static, we don't need a static chain to access it. */
6428 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6429 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6430 {
6431 rtx addr;
6432
6433 /* Mark as non-local and addressable. */
6434 DECL_NONLOCAL (exp) = 1;
6435 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6436 abort ();
6437 (*lang_hooks.mark_addressable) (exp);
6438 if (GET_CODE (DECL_RTL (exp)) != MEM)
6439 abort ();
6440 addr = XEXP (DECL_RTL (exp), 0);
6441 if (GET_CODE (addr) == MEM)
6442 addr
6443 = replace_equiv_address (addr,
6444 fix_lexical_addr (XEXP (addr, 0), exp));
6445 else
6446 addr = fix_lexical_addr (addr, exp);
6447
6448 temp = replace_equiv_address (DECL_RTL (exp), addr);
6449 }
6450
6451 /* This is the case of an array whose size is to be determined
6452 from its initializer, while the initializer is still being parsed.
6453 See expand_decl. */
6454
6455 else if (GET_CODE (DECL_RTL (exp)) == MEM
6456 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6457 temp = validize_mem (DECL_RTL (exp));
6458
6459 /* If DECL_RTL is memory, we are in the normal case and either
6460 the address is not valid or it is not a register and -fforce-addr
6461 is specified, get the address into a register. */
6462
6463 else if (GET_CODE (DECL_RTL (exp)) == MEM
6464 && modifier != EXPAND_CONST_ADDRESS
6465 && modifier != EXPAND_SUM
6466 && modifier != EXPAND_INITIALIZER
6467 && (! memory_address_p (DECL_MODE (exp),
6468 XEXP (DECL_RTL (exp), 0))
6469 || (flag_force_addr
6470 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6471 temp = replace_equiv_address (DECL_RTL (exp),
6472 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6473
6474 /* If we got something, return it. But first, set the alignment
6475 if the address is a register. */
6476 if (temp != 0)
6477 {
6478 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6479 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6480
6481 return temp;
6482 }
6483
6484 /* If the mode of DECL_RTL does not match that of the decl, it
6485 must be a promoted value. We return a SUBREG of the wanted mode,
6486 but mark it so that we know that it was already extended. */
6487
6488 if (GET_CODE (DECL_RTL (exp)) == REG
6489 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6490 {
6491 /* Get the signedness used for this variable. Ensure we get the
6492 same mode we got when the variable was declared. */
6493 if (GET_MODE (DECL_RTL (exp))
6494 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6495 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6496 abort ();
6497
6498 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6499 SUBREG_PROMOTED_VAR_P (temp) = 1;
6500 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6501 return temp;
6502 }
6503
6504 return DECL_RTL (exp);
6505
6506 case INTEGER_CST:
6507 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6508 TREE_INT_CST_HIGH (exp), mode);
6509
6510 /* ??? If overflow is set, fold will have done an incomplete job,
6511 which can result in (plus xx (const_int 0)), which can get
6512 simplified by validate_replace_rtx during virtual register
6513 instantiation, which can result in unrecognizable insns.
6514 Avoid this by forcing all overflows into registers. */
6515 if (TREE_CONSTANT_OVERFLOW (exp)
6516 && modifier != EXPAND_INITIALIZER)
6517 temp = force_reg (mode, temp);
6518
6519 return temp;
6520
6521 case VECTOR_CST:
6522 return const_vector_from_tree (exp);
6523
6524 case CONST_DECL:
6525 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6526
6527 case REAL_CST:
6528 /* If optimized, generate immediate CONST_DOUBLE
6529 which will be turned into memory by reload if necessary.
6530
6531 We used to force a register so that loop.c could see it. But
6532 this does not allow gen_* patterns to perform optimizations with
6533 the constants. It also produces two insns in cases like "x = 1.0;".
6534 On most machines, floating-point constants are not permitted in
6535 many insns, so we'd end up copying it to a register in any case.
6536
6537 Now, we do the copying in expand_binop, if appropriate. */
6538 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6539 TYPE_MODE (TREE_TYPE (exp)));
6540
6541 case COMPLEX_CST:
6542 /* Handle evaluating a complex constant in a CONCAT target. */
6543 if (original_target && GET_CODE (original_target) == CONCAT)
6544 {
6545 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6546 rtx rtarg, itarg;
6547
6548 rtarg = XEXP (original_target, 0);
6549 itarg = XEXP (original_target, 1);
6550
6551 /* Move the real and imaginary parts separately. */
6552 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6553 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6554
6555 if (op0 != rtarg)
6556 emit_move_insn (rtarg, op0);
6557 if (op1 != itarg)
6558 emit_move_insn (itarg, op1);
6559
6560 return original_target;
6561 }
6562
6563 /* ... fall through ... */
6564
6565 case STRING_CST:
6566 temp = output_constant_def (exp, 1);
6567
6568 /* temp contains a constant address.
6569 On RISC machines where a constant address isn't valid,
6570 make some insns to get that address into a register. */
6571 if (modifier != EXPAND_CONST_ADDRESS
6572 && modifier != EXPAND_INITIALIZER
6573 && modifier != EXPAND_SUM
6574 && (! memory_address_p (mode, XEXP (temp, 0))
6575 || flag_force_addr))
6576 return replace_equiv_address (temp,
6577 copy_rtx (XEXP (temp, 0)));
6578 return temp;
6579
6580 case EXPR_WITH_FILE_LOCATION:
6581 {
6582 rtx to_return;
6583 struct file_stack fs;
6584
6585 fs.location = input_location;
6586 fs.next = expr_wfl_stack;
6587 input_filename = EXPR_WFL_FILENAME (exp);
6588 input_line = EXPR_WFL_LINENO (exp);
6589 expr_wfl_stack = &fs;
6590 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6591 emit_line_note (input_location);
6592 /* Possibly avoid switching back and forth here. */
6593 to_return = expand_expr (EXPR_WFL_NODE (exp),
6594 (ignore ? const0_rtx : target),
6595 tmode, modifier);
6596 if (expr_wfl_stack != &fs)
6597 abort ();
6598 input_location = fs.location;
6599 expr_wfl_stack = fs.next;
6600 return to_return;
6601 }
6602
6603 case SAVE_EXPR:
6604 context = decl_function_context (exp);
6605
6606 /* If this SAVE_EXPR was at global context, assume we are an
6607 initialization function and move it into our context. */
6608 if (context == 0)
6609 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6610
6611 /* We treat inline_function_decl as an alias for the current function
6612 because that is the inline function whose vars, types, etc.
6613 are being merged into the current function.
6614 See expand_inline_function. */
6615 if (context == current_function_decl || context == inline_function_decl)
6616 context = 0;
6617
6618 /* If this is non-local, handle it. */
6619 if (context)
6620 {
6621 /* The following call just exists to abort if the context is
6622 not of a containing function. */
6623 find_function_data (context);
6624
6625 temp = SAVE_EXPR_RTL (exp);
6626 if (temp && GET_CODE (temp) == REG)
6627 {
6628 put_var_into_stack (exp, /*rescan=*/true);
6629 temp = SAVE_EXPR_RTL (exp);
6630 }
6631 if (temp == 0 || GET_CODE (temp) != MEM)
6632 abort ();
6633 return
6634 replace_equiv_address (temp,
6635 fix_lexical_addr (XEXP (temp, 0), exp));
6636 }
6637 if (SAVE_EXPR_RTL (exp) == 0)
6638 {
6639 if (mode == VOIDmode)
6640 temp = const0_rtx;
6641 else
6642 temp = assign_temp (build_qualified_type (type,
6643 (TYPE_QUALS (type)
6644 | TYPE_QUAL_CONST)),
6645 3, 0, 0);
6646
6647 SAVE_EXPR_RTL (exp) = temp;
6648 if (!optimize && GET_CODE (temp) == REG)
6649 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6650 save_expr_regs);
6651
6652 /* If the mode of TEMP does not match that of the expression, it
6653 must be a promoted value. We pass store_expr a SUBREG of the
6654 wanted mode but mark it so that we know that it was already
6655 extended. */
6656
6657 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6658 {
6659 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6660 promote_mode (type, mode, &unsignedp, 0);
6661 SUBREG_PROMOTED_VAR_P (temp) = 1;
6662 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6663 }
6664
6665 if (temp == const0_rtx)
6666 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6667 else
6668 store_expr (TREE_OPERAND (exp, 0), temp,
6669 modifier == EXPAND_STACK_PARM ? 2 : 0);
6670
6671 TREE_USED (exp) = 1;
6672 }
6673
6674 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6675 must be a promoted value. We return a SUBREG of the wanted mode,
6676 but mark it so that we know that it was already extended. */
6677
6678 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6679 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6680 {
6681 /* Compute the signedness and make the proper SUBREG. */
6682 promote_mode (type, mode, &unsignedp, 0);
6683 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6684 SUBREG_PROMOTED_VAR_P (temp) = 1;
6685 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6686 return temp;
6687 }
6688
6689 return SAVE_EXPR_RTL (exp);
6690
6691 case UNSAVE_EXPR:
6692 {
6693 rtx temp;
6694 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6695 TREE_OPERAND (exp, 0)
6696 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6697 return temp;
6698 }
6699
6700 case PLACEHOLDER_EXPR:
6701 {
6702 tree old_list = placeholder_list;
6703 tree placeholder_expr = 0;
6704
6705 exp = find_placeholder (exp, &placeholder_expr);
6706 if (exp == 0)
6707 abort ();
6708
6709 placeholder_list = TREE_CHAIN (placeholder_expr);
6710 temp = expand_expr (exp, original_target, tmode, modifier);
6711 placeholder_list = old_list;
6712 return temp;
6713 }
6714
6715 case WITH_RECORD_EXPR:
6716 /* Put the object on the placeholder list, expand our first operand,
6717 and pop the list. */
6718 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6719 placeholder_list);
6720 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6721 modifier);
6722 placeholder_list = TREE_CHAIN (placeholder_list);
6723 return target;
6724
6725 case GOTO_EXPR:
6726 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6727 expand_goto (TREE_OPERAND (exp, 0));
6728 else
6729 expand_computed_goto (TREE_OPERAND (exp, 0));
6730 return const0_rtx;
6731
6732 case EXIT_EXPR:
6733 expand_exit_loop_if_false (NULL,
6734 invert_truthvalue (TREE_OPERAND (exp, 0)));
6735 return const0_rtx;
6736
6737 case LABELED_BLOCK_EXPR:
6738 if (LABELED_BLOCK_BODY (exp))
6739 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6740 /* Should perhaps use expand_label, but this is simpler and safer. */
6741 do_pending_stack_adjust ();
6742 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6743 return const0_rtx;
6744
6745 case EXIT_BLOCK_EXPR:
6746 if (EXIT_BLOCK_RETURN (exp))
6747 sorry ("returned value in block_exit_expr");
6748 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6749 return const0_rtx;
6750
6751 case LOOP_EXPR:
6752 push_temp_slots ();
6753 expand_start_loop (1);
6754 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6755 expand_end_loop ();
6756 pop_temp_slots ();
6757
6758 return const0_rtx;
6759
6760 case BIND_EXPR:
6761 {
6762 tree vars = TREE_OPERAND (exp, 0);
6763
6764 /* Need to open a binding contour here because
6765 if there are any cleanups they must be contained here. */
6766 expand_start_bindings (2);
6767
6768 /* Mark the corresponding BLOCK for output in its proper place. */
6769 if (TREE_OPERAND (exp, 2) != 0
6770 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6771 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6772
6773 /* If VARS have not yet been expanded, expand them now. */
6774 while (vars)
6775 {
6776 if (!DECL_RTL_SET_P (vars))
6777 expand_decl (vars);
6778 expand_decl_init (vars);
6779 vars = TREE_CHAIN (vars);
6780 }
6781
6782 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6783
6784 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6785
6786 return temp;
6787 }
6788
6789 case RTL_EXPR:
6790 if (RTL_EXPR_SEQUENCE (exp))
6791 {
6792 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6793 abort ();
6794 emit_insn (RTL_EXPR_SEQUENCE (exp));
6795 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6796 }
6797 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6798 free_temps_for_rtl_expr (exp);
6799 return RTL_EXPR_RTL (exp);
6800
6801 case CONSTRUCTOR:
6802 /* If we don't need the result, just ensure we evaluate any
6803 subexpressions. */
6804 if (ignore)
6805 {
6806 tree elt;
6807
6808 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6809 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6810
6811 return const0_rtx;
6812 }
6813
6814 /* All elts simple constants => refer to a constant in memory. But
6815 if this is a non-BLKmode mode, let it store a field at a time
6816 since that should make a CONST_INT or CONST_DOUBLE when we
6817 fold. Likewise, if we have a target we can use, it is best to
6818 store directly into the target unless the type is large enough
6819 that memcpy will be used. If we are making an initializer and
6820 all operands are constant, put it in memory as well.
6821
6822 FIXME: Avoid trying to fill vector constructors piece-meal.
6823 Output them with output_constant_def below unless we're sure
6824 they're zeros. This should go away when vector initializers
6825 are treated like VECTOR_CST instead of arrays.
6826 */
6827 else if ((TREE_STATIC (exp)
6828 && ((mode == BLKmode
6829 && ! (target != 0 && safe_from_p (target, exp, 1)))
6830 || TREE_ADDRESSABLE (exp)
6831 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6832 && (! MOVE_BY_PIECES_P
6833 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6834 TYPE_ALIGN (type)))
6835 && ((TREE_CODE (type) == VECTOR_TYPE
6836 && !is_zeros_p (exp))
6837 || ! mostly_zeros_p (exp)))))
6838 || ((modifier == EXPAND_INITIALIZER
6839 || modifier == EXPAND_CONST_ADDRESS)
6840 && TREE_CONSTANT (exp)))
6841 {
6842 rtx constructor = output_constant_def (exp, 1);
6843
6844 if (modifier != EXPAND_CONST_ADDRESS
6845 && modifier != EXPAND_INITIALIZER
6846 && modifier != EXPAND_SUM)
6847 constructor = validize_mem (constructor);
6848
6849 return constructor;
6850 }
6851 else
6852 {
6853 /* Handle calls that pass values in multiple non-contiguous
6854 locations. The Irix 6 ABI has examples of this. */
6855 if (target == 0 || ! safe_from_p (target, exp, 1)
6856 || GET_CODE (target) == PARALLEL
6857 || modifier == EXPAND_STACK_PARM)
6858 target
6859 = assign_temp (build_qualified_type (type,
6860 (TYPE_QUALS (type)
6861 | (TREE_READONLY (exp)
6862 * TYPE_QUAL_CONST))),
6863 0, TREE_ADDRESSABLE (exp), 1);
6864
6865 store_constructor (exp, target, 0, int_expr_size (exp));
6866 return target;
6867 }
6868
6869 case INDIRECT_REF:
6870 {
6871 tree exp1 = TREE_OPERAND (exp, 0);
6872 tree index;
6873 tree string = string_constant (exp1, &index);
6874
6875 /* Try to optimize reads from const strings. */
6876 if (string
6877 && TREE_CODE (string) == STRING_CST
6878 && TREE_CODE (index) == INTEGER_CST
6879 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6880 && GET_MODE_CLASS (mode) == MODE_INT
6881 && GET_MODE_SIZE (mode) == 1
6882 && modifier != EXPAND_WRITE)
6883 return gen_int_mode (TREE_STRING_POINTER (string)
6884 [TREE_INT_CST_LOW (index)], mode);
6885
6886 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6887 op0 = memory_address (mode, op0);
6888 temp = gen_rtx_MEM (mode, op0);
6889 set_mem_attributes (temp, exp, 0);
6890
6891 /* If we are writing to this object and its type is a record with
6892 readonly fields, we must mark it as readonly so it will
6893 conflict with readonly references to those fields. */
6894 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6895 RTX_UNCHANGING_P (temp) = 1;
6896
6897 return temp;
6898 }
6899
6900 case ARRAY_REF:
6901 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6902 abort ();
6903
6904 {
6905 tree array = TREE_OPERAND (exp, 0);
6906 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6907 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6908 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6909 HOST_WIDE_INT i;
6910
6911 /* Optimize the special-case of a zero lower bound.
6912
6913 We convert the low_bound to sizetype to avoid some problems
6914 with constant folding. (E.g. suppose the lower bound is 1,
6915 and its mode is QI. Without the conversion, (ARRAY
6916 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6917 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6918
6919 if (! integer_zerop (low_bound))
6920 index = size_diffop (index, convert (sizetype, low_bound));
6921
6922 /* Fold an expression like: "foo"[2].
6923 This is not done in fold so it won't happen inside &.
6924 Don't fold if this is for wide characters since it's too
6925 difficult to do correctly and this is a very rare case. */
6926
6927 if (modifier != EXPAND_CONST_ADDRESS
6928 && modifier != EXPAND_INITIALIZER
6929 && modifier != EXPAND_MEMORY
6930 && TREE_CODE (array) == STRING_CST
6931 && TREE_CODE (index) == INTEGER_CST
6932 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6933 && GET_MODE_CLASS (mode) == MODE_INT
6934 && GET_MODE_SIZE (mode) == 1)
6935 return gen_int_mode (TREE_STRING_POINTER (array)
6936 [TREE_INT_CST_LOW (index)], mode);
6937
6938 /* If this is a constant index into a constant array,
6939 just get the value from the array. Handle both the cases when
6940 we have an explicit constructor and when our operand is a variable
6941 that was declared const. */
6942
6943 if (modifier != EXPAND_CONST_ADDRESS
6944 && modifier != EXPAND_INITIALIZER
6945 && modifier != EXPAND_MEMORY
6946 && TREE_CODE (array) == CONSTRUCTOR
6947 && ! TREE_SIDE_EFFECTS (array)
6948 && TREE_CODE (index) == INTEGER_CST
6949 && 0 > compare_tree_int (index,
6950 list_length (CONSTRUCTOR_ELTS
6951 (TREE_OPERAND (exp, 0)))))
6952 {
6953 tree elem;
6954
6955 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6956 i = TREE_INT_CST_LOW (index);
6957 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6958 ;
6959
6960 if (elem)
6961 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6962 modifier);
6963 }
6964
6965 else if (optimize >= 1
6966 && modifier != EXPAND_CONST_ADDRESS
6967 && modifier != EXPAND_INITIALIZER
6968 && modifier != EXPAND_MEMORY
6969 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6970 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6971 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6972 && targetm.binds_local_p (array))
6973 {
6974 if (TREE_CODE (index) == INTEGER_CST)
6975 {
6976 tree init = DECL_INITIAL (array);
6977
6978 if (TREE_CODE (init) == CONSTRUCTOR)
6979 {
6980 tree elem;
6981
6982 for (elem = CONSTRUCTOR_ELTS (init);
6983 (elem
6984 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6985 elem = TREE_CHAIN (elem))
6986 ;
6987
6988 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6989 return expand_expr (fold (TREE_VALUE (elem)), target,
6990 tmode, modifier);
6991 }
6992 else if (TREE_CODE (init) == STRING_CST
6993 && 0 > compare_tree_int (index,
6994 TREE_STRING_LENGTH (init)))
6995 {
6996 tree type = TREE_TYPE (TREE_TYPE (init));
6997 enum machine_mode mode = TYPE_MODE (type);
6998
6999 if (GET_MODE_CLASS (mode) == MODE_INT
7000 && GET_MODE_SIZE (mode) == 1)
7001 return gen_int_mode (TREE_STRING_POINTER (init)
7002 [TREE_INT_CST_LOW (index)], mode);
7003 }
7004 }
7005 }
7006 }
7007 goto normal_inner_ref;
7008
7009 case COMPONENT_REF:
7010 /* If the operand is a CONSTRUCTOR, we can just extract the
7011 appropriate field if it is present. */
7012 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7013 {
7014 tree elt;
7015
7016 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7017 elt = TREE_CHAIN (elt))
7018 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7019 /* We can normally use the value of the field in the
7020 CONSTRUCTOR. However, if this is a bitfield in
7021 an integral mode that we can fit in a HOST_WIDE_INT,
7022 we must mask only the number of bits in the bitfield,
7023 since this is done implicitly by the constructor. If
7024 the bitfield does not meet either of those conditions,
7025 we can't do this optimization. */
7026 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7027 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7028 == MODE_INT)
7029 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7030 <= HOST_BITS_PER_WIDE_INT))))
7031 {
7032 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7033 && modifier == EXPAND_STACK_PARM)
7034 target = 0;
7035 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7036 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7037 {
7038 HOST_WIDE_INT bitsize
7039 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7040 enum machine_mode imode
7041 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7042
7043 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7044 {
7045 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7046 op0 = expand_and (imode, op0, op1, target);
7047 }
7048 else
7049 {
7050 tree count
7051 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7052 0);
7053
7054 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7055 target, 0);
7056 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7057 target, 0);
7058 }
7059 }
7060
7061 return op0;
7062 }
7063 }
7064 goto normal_inner_ref;
7065
7066 case BIT_FIELD_REF:
7067 case ARRAY_RANGE_REF:
7068 normal_inner_ref:
7069 {
7070 enum machine_mode mode1;
7071 HOST_WIDE_INT bitsize, bitpos;
7072 tree offset;
7073 int volatilep = 0;
7074 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7075 &mode1, &unsignedp, &volatilep);
7076 rtx orig_op0;
7077
7078 /* If we got back the original object, something is wrong. Perhaps
7079 we are evaluating an expression too early. In any event, don't
7080 infinitely recurse. */
7081 if (tem == exp)
7082 abort ();
7083
7084 /* If TEM's type is a union of variable size, pass TARGET to the inner
7085 computation, since it will need a temporary and TARGET is known
7086 to have to do. This occurs in unchecked conversion in Ada. */
7087
7088 orig_op0 = op0
7089 = expand_expr (tem,
7090 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7091 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7092 != INTEGER_CST)
7093 && modifier != EXPAND_STACK_PARM
7094 ? target : NULL_RTX),
7095 VOIDmode,
7096 (modifier == EXPAND_INITIALIZER
7097 || modifier == EXPAND_CONST_ADDRESS
7098 || modifier == EXPAND_STACK_PARM)
7099 ? modifier : EXPAND_NORMAL);
7100
7101 /* If this is a constant, put it into a register if it is a
7102 legitimate constant and OFFSET is 0 and memory if it isn't. */
7103 if (CONSTANT_P (op0))
7104 {
7105 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7106 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7107 && offset == 0)
7108 op0 = force_reg (mode, op0);
7109 else
7110 op0 = validize_mem (force_const_mem (mode, op0));
7111 }
7112
7113 /* Otherwise, if this object not in memory and we either have an
7114 offset or a BLKmode result, put it there. This case can't occur in
7115 C, but can in Ada if we have unchecked conversion of an expression
7116 from a scalar type to an array or record type or for an
7117 ARRAY_RANGE_REF whose type is BLKmode. */
7118 else if (GET_CODE (op0) != MEM
7119 && (offset != 0
7120 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7121 {
7122 /* If the operand is a SAVE_EXPR, we can deal with this by
7123 forcing the SAVE_EXPR into memory. */
7124 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7125 {
7126 put_var_into_stack (TREE_OPERAND (exp, 0),
7127 /*rescan=*/true);
7128 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7129 }
7130 else
7131 {
7132 tree nt
7133 = build_qualified_type (TREE_TYPE (tem),
7134 (TYPE_QUALS (TREE_TYPE (tem))
7135 | TYPE_QUAL_CONST));
7136 rtx memloc = assign_temp (nt, 1, 1, 1);
7137
7138 emit_move_insn (memloc, op0);
7139 op0 = memloc;
7140 }
7141 }
7142
7143 if (offset != 0)
7144 {
7145 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7146 EXPAND_SUM);
7147
7148 if (GET_CODE (op0) != MEM)
7149 abort ();
7150
7151 #ifdef POINTERS_EXTEND_UNSIGNED
7152 if (GET_MODE (offset_rtx) != Pmode)
7153 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7154 #else
7155 if (GET_MODE (offset_rtx) != ptr_mode)
7156 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7157 #endif
7158
7159 if (GET_MODE (op0) == BLKmode
7160 /* A constant address in OP0 can have VOIDmode, we must
7161 not try to call force_reg in that case. */
7162 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7163 && bitsize != 0
7164 && (bitpos % bitsize) == 0
7165 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7166 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7167 {
7168 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7169 bitpos = 0;
7170 }
7171
7172 op0 = offset_address (op0, offset_rtx,
7173 highest_pow2_factor (offset));
7174 }
7175
7176 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7177 record its alignment as BIGGEST_ALIGNMENT. */
7178 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7179 && is_aligning_offset (offset, tem))
7180 set_mem_align (op0, BIGGEST_ALIGNMENT);
7181
7182 /* Don't forget about volatility even if this is a bitfield. */
7183 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7184 {
7185 if (op0 == orig_op0)
7186 op0 = copy_rtx (op0);
7187
7188 MEM_VOLATILE_P (op0) = 1;
7189 }
7190
7191 /* The following code doesn't handle CONCAT.
7192 Assume only bitpos == 0 can be used for CONCAT, due to
7193 one element arrays having the same mode as its element. */
7194 if (GET_CODE (op0) == CONCAT)
7195 {
7196 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7197 abort ();
7198 return op0;
7199 }
7200
7201 /* In cases where an aligned union has an unaligned object
7202 as a field, we might be extracting a BLKmode value from
7203 an integer-mode (e.g., SImode) object. Handle this case
7204 by doing the extract into an object as wide as the field
7205 (which we know to be the width of a basic mode), then
7206 storing into memory, and changing the mode to BLKmode. */
7207 if (mode1 == VOIDmode
7208 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7209 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7210 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7211 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7212 && modifier != EXPAND_CONST_ADDRESS
7213 && modifier != EXPAND_INITIALIZER)
7214 /* If the field isn't aligned enough to fetch as a memref,
7215 fetch it as a bit field. */
7216 || (mode1 != BLKmode
7217 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7218 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7219 || (GET_CODE (op0) == MEM
7220 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7221 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7222 && ((modifier == EXPAND_CONST_ADDRESS
7223 || modifier == EXPAND_INITIALIZER)
7224 ? STRICT_ALIGNMENT
7225 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7226 || (bitpos % BITS_PER_UNIT != 0)))
7227 /* If the type and the field are a constant size and the
7228 size of the type isn't the same size as the bitfield,
7229 we must use bitfield operations. */
7230 || (bitsize >= 0
7231 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7232 == INTEGER_CST)
7233 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7234 bitsize)))
7235 {
7236 enum machine_mode ext_mode = mode;
7237
7238 if (ext_mode == BLKmode
7239 && ! (target != 0 && GET_CODE (op0) == MEM
7240 && GET_CODE (target) == MEM
7241 && bitpos % BITS_PER_UNIT == 0))
7242 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7243
7244 if (ext_mode == BLKmode)
7245 {
7246 if (target == 0)
7247 target = assign_temp (type, 0, 1, 1);
7248
7249 if (bitsize == 0)
7250 return target;
7251
7252 /* In this case, BITPOS must start at a byte boundary and
7253 TARGET, if specified, must be a MEM. */
7254 if (GET_CODE (op0) != MEM
7255 || (target != 0 && GET_CODE (target) != MEM)
7256 || bitpos % BITS_PER_UNIT != 0)
7257 abort ();
7258
7259 emit_block_move (target,
7260 adjust_address (op0, VOIDmode,
7261 bitpos / BITS_PER_UNIT),
7262 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7263 / BITS_PER_UNIT),
7264 (modifier == EXPAND_STACK_PARM
7265 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7266
7267 return target;
7268 }
7269
7270 op0 = validize_mem (op0);
7271
7272 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7273 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7274
7275 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7276 (modifier == EXPAND_STACK_PARM
7277 ? NULL_RTX : target),
7278 ext_mode, ext_mode,
7279 int_size_in_bytes (TREE_TYPE (tem)));
7280
7281 /* If the result is a record type and BITSIZE is narrower than
7282 the mode of OP0, an integral mode, and this is a big endian
7283 machine, we must put the field into the high-order bits. */
7284 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7285 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7286 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7287 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7288 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7289 - bitsize),
7290 op0, 1);
7291
7292 if (mode == BLKmode)
7293 {
7294 rtx new = assign_temp (build_qualified_type
7295 ((*lang_hooks.types.type_for_mode)
7296 (ext_mode, 0),
7297 TYPE_QUAL_CONST), 0, 1, 1);
7298
7299 emit_move_insn (new, op0);
7300 op0 = copy_rtx (new);
7301 PUT_MODE (op0, BLKmode);
7302 set_mem_attributes (op0, exp, 1);
7303 }
7304
7305 return op0;
7306 }
7307
7308 /* If the result is BLKmode, use that to access the object
7309 now as well. */
7310 if (mode == BLKmode)
7311 mode1 = BLKmode;
7312
7313 /* Get a reference to just this component. */
7314 if (modifier == EXPAND_CONST_ADDRESS
7315 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7316 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7317 else
7318 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7319
7320 if (op0 == orig_op0)
7321 op0 = copy_rtx (op0);
7322
7323 set_mem_attributes (op0, exp, 0);
7324 if (GET_CODE (XEXP (op0, 0)) == REG)
7325 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7326
7327 MEM_VOLATILE_P (op0) |= volatilep;
7328 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7329 || modifier == EXPAND_CONST_ADDRESS
7330 || modifier == EXPAND_INITIALIZER)
7331 return op0;
7332 else if (target == 0)
7333 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7334
7335 convert_move (target, op0, unsignedp);
7336 return target;
7337 }
7338
7339 case VTABLE_REF:
7340 {
7341 rtx insn, before = get_last_insn (), vtbl_ref;
7342
7343 /* Evaluate the interior expression. */
7344 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7345 tmode, modifier);
7346
7347 /* Get or create an instruction off which to hang a note. */
7348 if (REG_P (subtarget))
7349 {
7350 target = subtarget;
7351 insn = get_last_insn ();
7352 if (insn == before)
7353 abort ();
7354 if (! INSN_P (insn))
7355 insn = prev_nonnote_insn (insn);
7356 }
7357 else
7358 {
7359 target = gen_reg_rtx (GET_MODE (subtarget));
7360 insn = emit_move_insn (target, subtarget);
7361 }
7362
7363 /* Collect the data for the note. */
7364 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7365 vtbl_ref = plus_constant (vtbl_ref,
7366 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7367 /* Discard the initial CONST that was added. */
7368 vtbl_ref = XEXP (vtbl_ref, 0);
7369
7370 REG_NOTES (insn)
7371 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7372
7373 return target;
7374 }
7375
7376 /* Intended for a reference to a buffer of a file-object in Pascal.
7377 But it's not certain that a special tree code will really be
7378 necessary for these. INDIRECT_REF might work for them. */
7379 case BUFFER_REF:
7380 abort ();
7381
7382 case IN_EXPR:
7383 {
7384 /* Pascal set IN expression.
7385
7386 Algorithm:
7387 rlo = set_low - (set_low%bits_per_word);
7388 the_word = set [ (index - rlo)/bits_per_word ];
7389 bit_index = index % bits_per_word;
7390 bitmask = 1 << bit_index;
7391 return !!(the_word & bitmask); */
7392
7393 tree set = TREE_OPERAND (exp, 0);
7394 tree index = TREE_OPERAND (exp, 1);
7395 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7396 tree set_type = TREE_TYPE (set);
7397 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7398 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7399 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7400 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7401 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7402 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7403 rtx setaddr = XEXP (setval, 0);
7404 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7405 rtx rlow;
7406 rtx diff, quo, rem, addr, bit, result;
7407
7408 /* If domain is empty, answer is no. Likewise if index is constant
7409 and out of bounds. */
7410 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7411 && TREE_CODE (set_low_bound) == INTEGER_CST
7412 && tree_int_cst_lt (set_high_bound, set_low_bound))
7413 || (TREE_CODE (index) == INTEGER_CST
7414 && TREE_CODE (set_low_bound) == INTEGER_CST
7415 && tree_int_cst_lt (index, set_low_bound))
7416 || (TREE_CODE (set_high_bound) == INTEGER_CST
7417 && TREE_CODE (index) == INTEGER_CST
7418 && tree_int_cst_lt (set_high_bound, index))))
7419 return const0_rtx;
7420
7421 if (target == 0)
7422 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7423
7424 /* If we get here, we have to generate the code for both cases
7425 (in range and out of range). */
7426
7427 op0 = gen_label_rtx ();
7428 op1 = gen_label_rtx ();
7429
7430 if (! (GET_CODE (index_val) == CONST_INT
7431 && GET_CODE (lo_r) == CONST_INT))
7432 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7433 GET_MODE (index_val), iunsignedp, op1);
7434
7435 if (! (GET_CODE (index_val) == CONST_INT
7436 && GET_CODE (hi_r) == CONST_INT))
7437 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7438 GET_MODE (index_val), iunsignedp, op1);
7439
7440 /* Calculate the element number of bit zero in the first word
7441 of the set. */
7442 if (GET_CODE (lo_r) == CONST_INT)
7443 rlow = GEN_INT (INTVAL (lo_r)
7444 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7445 else
7446 rlow = expand_binop (index_mode, and_optab, lo_r,
7447 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7448 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7449
7450 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7451 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7452
7453 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7454 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7455 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7456 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7457
7458 addr = memory_address (byte_mode,
7459 expand_binop (index_mode, add_optab, diff,
7460 setaddr, NULL_RTX, iunsignedp,
7461 OPTAB_LIB_WIDEN));
7462
7463 /* Extract the bit we want to examine. */
7464 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7465 gen_rtx_MEM (byte_mode, addr),
7466 make_tree (TREE_TYPE (index), rem),
7467 NULL_RTX, 1);
7468 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7469 GET_MODE (target) == byte_mode ? target : 0,
7470 1, OPTAB_LIB_WIDEN);
7471
7472 if (result != target)
7473 convert_move (target, result, 1);
7474
7475 /* Output the code to handle the out-of-range case. */
7476 emit_jump (op0);
7477 emit_label (op1);
7478 emit_move_insn (target, const0_rtx);
7479 emit_label (op0);
7480 return target;
7481 }
7482
7483 case WITH_CLEANUP_EXPR:
7484 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7485 {
7486 WITH_CLEANUP_EXPR_RTL (exp)
7487 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7488 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7489 CLEANUP_EH_ONLY (exp));
7490
7491 /* That's it for this cleanup. */
7492 TREE_OPERAND (exp, 1) = 0;
7493 }
7494 return WITH_CLEANUP_EXPR_RTL (exp);
7495
7496 case CLEANUP_POINT_EXPR:
7497 {
7498 /* Start a new binding layer that will keep track of all cleanup
7499 actions to be performed. */
7500 expand_start_bindings (2);
7501
7502 target_temp_slot_level = temp_slot_level;
7503
7504 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7505 /* If we're going to use this value, load it up now. */
7506 if (! ignore)
7507 op0 = force_not_mem (op0);
7508 preserve_temp_slots (op0);
7509 expand_end_bindings (NULL_TREE, 0, 0);
7510 }
7511 return op0;
7512
7513 case CALL_EXPR:
7514 /* Check for a built-in function. */
7515 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7516 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7517 == FUNCTION_DECL)
7518 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7519 {
7520 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7521 == BUILT_IN_FRONTEND)
7522 return (*lang_hooks.expand_expr) (exp, original_target,
7523 tmode, modifier);
7524 else
7525 return expand_builtin (exp, target, subtarget, tmode, ignore);
7526 }
7527
7528 return expand_call (exp, target, ignore);
7529
7530 case NON_LVALUE_EXPR:
7531 case NOP_EXPR:
7532 case CONVERT_EXPR:
7533 case REFERENCE_EXPR:
7534 if (TREE_OPERAND (exp, 0) == error_mark_node)
7535 return const0_rtx;
7536
7537 if (TREE_CODE (type) == UNION_TYPE)
7538 {
7539 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7540
7541 /* If both input and output are BLKmode, this conversion isn't doing
7542 anything except possibly changing memory attribute. */
7543 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7544 {
7545 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7546 modifier);
7547
7548 result = copy_rtx (result);
7549 set_mem_attributes (result, exp, 0);
7550 return result;
7551 }
7552
7553 if (target == 0)
7554 target = assign_temp (type, 0, 1, 1);
7555
7556 if (GET_CODE (target) == MEM)
7557 /* Store data into beginning of memory target. */
7558 store_expr (TREE_OPERAND (exp, 0),
7559 adjust_address (target, TYPE_MODE (valtype), 0),
7560 modifier == EXPAND_STACK_PARM ? 2 : 0);
7561
7562 else if (GET_CODE (target) == REG)
7563 /* Store this field into a union of the proper type. */
7564 store_field (target,
7565 MIN ((int_size_in_bytes (TREE_TYPE
7566 (TREE_OPERAND (exp, 0)))
7567 * BITS_PER_UNIT),
7568 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7569 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7570 VOIDmode, 0, type, 0);
7571 else
7572 abort ();
7573
7574 /* Return the entire union. */
7575 return target;
7576 }
7577
7578 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7579 {
7580 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7581 modifier);
7582
7583 /* If the signedness of the conversion differs and OP0 is
7584 a promoted SUBREG, clear that indication since we now
7585 have to do the proper extension. */
7586 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7587 && GET_CODE (op0) == SUBREG)
7588 SUBREG_PROMOTED_VAR_P (op0) = 0;
7589
7590 return op0;
7591 }
7592
7593 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7594 if (GET_MODE (op0) == mode)
7595 return op0;
7596
7597 /* If OP0 is a constant, just convert it into the proper mode. */
7598 if (CONSTANT_P (op0))
7599 {
7600 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7601 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7602
7603 if (modifier == EXPAND_INITIALIZER)
7604 return simplify_gen_subreg (mode, op0, inner_mode,
7605 subreg_lowpart_offset (mode,
7606 inner_mode));
7607 else
7608 return convert_modes (mode, inner_mode, op0,
7609 TREE_UNSIGNED (inner_type));
7610 }
7611
7612 if (modifier == EXPAND_INITIALIZER)
7613 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7614
7615 if (target == 0)
7616 return
7617 convert_to_mode (mode, op0,
7618 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7619 else
7620 convert_move (target, op0,
7621 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7622 return target;
7623
7624 case VIEW_CONVERT_EXPR:
7625 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7626
7627 /* If the input and output modes are both the same, we are done.
7628 Otherwise, if neither mode is BLKmode and both are integral and within
7629 a word, we can use gen_lowpart. If neither is true, make sure the
7630 operand is in memory and convert the MEM to the new mode. */
7631 if (TYPE_MODE (type) == GET_MODE (op0))
7632 ;
7633 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7634 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7635 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7636 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7637 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7638 op0 = gen_lowpart (TYPE_MODE (type), op0);
7639 else if (GET_CODE (op0) != MEM)
7640 {
7641 /* If the operand is not a MEM, force it into memory. Since we
7642 are going to be be changing the mode of the MEM, don't call
7643 force_const_mem for constants because we don't allow pool
7644 constants to change mode. */
7645 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7646
7647 if (TREE_ADDRESSABLE (exp))
7648 abort ();
7649
7650 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7651 target
7652 = assign_stack_temp_for_type
7653 (TYPE_MODE (inner_type),
7654 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7655
7656 emit_move_insn (target, op0);
7657 op0 = target;
7658 }
7659
7660 /* At this point, OP0 is in the correct mode. If the output type is such
7661 that the operand is known to be aligned, indicate that it is.
7662 Otherwise, we need only be concerned about alignment for non-BLKmode
7663 results. */
7664 if (GET_CODE (op0) == MEM)
7665 {
7666 op0 = copy_rtx (op0);
7667
7668 if (TYPE_ALIGN_OK (type))
7669 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7670 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7671 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7672 {
7673 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7674 HOST_WIDE_INT temp_size
7675 = MAX (int_size_in_bytes (inner_type),
7676 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7677 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7678 temp_size, 0, type);
7679 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7680
7681 if (TREE_ADDRESSABLE (exp))
7682 abort ();
7683
7684 if (GET_MODE (op0) == BLKmode)
7685 emit_block_move (new_with_op0_mode, op0,
7686 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7687 (modifier == EXPAND_STACK_PARM
7688 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7689 else
7690 emit_move_insn (new_with_op0_mode, op0);
7691
7692 op0 = new;
7693 }
7694
7695 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7696 }
7697
7698 return op0;
7699
7700 case PLUS_EXPR:
7701 this_optab = ! unsignedp && flag_trapv
7702 && (GET_MODE_CLASS (mode) == MODE_INT)
7703 ? addv_optab : add_optab;
7704
7705 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7706 something else, make sure we add the register to the constant and
7707 then to the other thing. This case can occur during strength
7708 reduction and doing it this way will produce better code if the
7709 frame pointer or argument pointer is eliminated.
7710
7711 fold-const.c will ensure that the constant is always in the inner
7712 PLUS_EXPR, so the only case we need to do anything about is if
7713 sp, ap, or fp is our second argument, in which case we must swap
7714 the innermost first argument and our second argument. */
7715
7716 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7717 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7718 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7719 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7720 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7721 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7722 {
7723 tree t = TREE_OPERAND (exp, 1);
7724
7725 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7726 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7727 }
7728
7729 /* If the result is to be ptr_mode and we are adding an integer to
7730 something, we might be forming a constant. So try to use
7731 plus_constant. If it produces a sum and we can't accept it,
7732 use force_operand. This allows P = &ARR[const] to generate
7733 efficient code on machines where a SYMBOL_REF is not a valid
7734 address.
7735
7736 If this is an EXPAND_SUM call, always return the sum. */
7737 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7738 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7739 {
7740 if (modifier == EXPAND_STACK_PARM)
7741 target = 0;
7742 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7743 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7744 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7745 {
7746 rtx constant_part;
7747
7748 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7749 EXPAND_SUM);
7750 /* Use immed_double_const to ensure that the constant is
7751 truncated according to the mode of OP1, then sign extended
7752 to a HOST_WIDE_INT. Using the constant directly can result
7753 in non-canonical RTL in a 64x32 cross compile. */
7754 constant_part
7755 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7756 (HOST_WIDE_INT) 0,
7757 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7758 op1 = plus_constant (op1, INTVAL (constant_part));
7759 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7760 op1 = force_operand (op1, target);
7761 return op1;
7762 }
7763
7764 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7765 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7766 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7767 {
7768 rtx constant_part;
7769
7770 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7771 (modifier == EXPAND_INITIALIZER
7772 ? EXPAND_INITIALIZER : EXPAND_SUM));
7773 if (! CONSTANT_P (op0))
7774 {
7775 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7776 VOIDmode, modifier);
7777 /* Return a PLUS if modifier says it's OK. */
7778 if (modifier == EXPAND_SUM
7779 || modifier == EXPAND_INITIALIZER)
7780 return simplify_gen_binary (PLUS, mode, op0, op1);
7781 goto binop2;
7782 }
7783 /* Use immed_double_const to ensure that the constant is
7784 truncated according to the mode of OP1, then sign extended
7785 to a HOST_WIDE_INT. Using the constant directly can result
7786 in non-canonical RTL in a 64x32 cross compile. */
7787 constant_part
7788 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7789 (HOST_WIDE_INT) 0,
7790 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7791 op0 = plus_constant (op0, INTVAL (constant_part));
7792 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7793 op0 = force_operand (op0, target);
7794 return op0;
7795 }
7796 }
7797
7798 /* No sense saving up arithmetic to be done
7799 if it's all in the wrong mode to form part of an address.
7800 And force_operand won't know whether to sign-extend or
7801 zero-extend. */
7802 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7803 || mode != ptr_mode)
7804 {
7805 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7806 subtarget, &op0, &op1, 0);
7807 if (op0 == const0_rtx)
7808 return op1;
7809 if (op1 == const0_rtx)
7810 return op0;
7811 goto binop2;
7812 }
7813
7814 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7815 subtarget, &op0, &op1, modifier);
7816 return simplify_gen_binary (PLUS, mode, op0, op1);
7817
7818 case MINUS_EXPR:
7819 /* For initializers, we are allowed to return a MINUS of two
7820 symbolic constants. Here we handle all cases when both operands
7821 are constant. */
7822 /* Handle difference of two symbolic constants,
7823 for the sake of an initializer. */
7824 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7825 && really_constant_p (TREE_OPERAND (exp, 0))
7826 && really_constant_p (TREE_OPERAND (exp, 1)))
7827 {
7828 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7829 NULL_RTX, &op0, &op1, modifier);
7830
7831 /* If the last operand is a CONST_INT, use plus_constant of
7832 the negated constant. Else make the MINUS. */
7833 if (GET_CODE (op1) == CONST_INT)
7834 return plus_constant (op0, - INTVAL (op1));
7835 else
7836 return gen_rtx_MINUS (mode, op0, op1);
7837 }
7838
7839 this_optab = ! unsignedp && flag_trapv
7840 && (GET_MODE_CLASS(mode) == MODE_INT)
7841 ? subv_optab : sub_optab;
7842
7843 /* No sense saving up arithmetic to be done
7844 if it's all in the wrong mode to form part of an address.
7845 And force_operand won't know whether to sign-extend or
7846 zero-extend. */
7847 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7848 || mode != ptr_mode)
7849 goto binop;
7850
7851 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7852 subtarget, &op0, &op1, modifier);
7853
7854 /* Convert A - const to A + (-const). */
7855 if (GET_CODE (op1) == CONST_INT)
7856 {
7857 op1 = negate_rtx (mode, op1);
7858 return simplify_gen_binary (PLUS, mode, op0, op1);
7859 }
7860
7861 goto binop2;
7862
7863 case MULT_EXPR:
7864 /* If first operand is constant, swap them.
7865 Thus the following special case checks need only
7866 check the second operand. */
7867 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7868 {
7869 tree t1 = TREE_OPERAND (exp, 0);
7870 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7871 TREE_OPERAND (exp, 1) = t1;
7872 }
7873
7874 /* Attempt to return something suitable for generating an
7875 indexed address, for machines that support that. */
7876
7877 if (modifier == EXPAND_SUM && mode == ptr_mode
7878 && host_integerp (TREE_OPERAND (exp, 1), 0))
7879 {
7880 tree exp1 = TREE_OPERAND (exp, 1);
7881
7882 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7883 EXPAND_SUM);
7884
7885 if (GET_CODE (op0) != REG)
7886 op0 = force_operand (op0, NULL_RTX);
7887 if (GET_CODE (op0) != REG)
7888 op0 = copy_to_mode_reg (mode, op0);
7889
7890 return gen_rtx_MULT (mode, op0,
7891 gen_int_mode (tree_low_cst (exp1, 0),
7892 TYPE_MODE (TREE_TYPE (exp1))));
7893 }
7894
7895 if (modifier == EXPAND_STACK_PARM)
7896 target = 0;
7897
7898 /* Check for multiplying things that have been extended
7899 from a narrower type. If this machine supports multiplying
7900 in that narrower type with a result in the desired type,
7901 do it that way, and avoid the explicit type-conversion. */
7902 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7903 && TREE_CODE (type) == INTEGER_TYPE
7904 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7905 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7906 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7907 && int_fits_type_p (TREE_OPERAND (exp, 1),
7908 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7909 /* Don't use a widening multiply if a shift will do. */
7910 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7911 > HOST_BITS_PER_WIDE_INT)
7912 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7913 ||
7914 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7915 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7916 ==
7917 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7918 /* If both operands are extended, they must either both
7919 be zero-extended or both be sign-extended. */
7920 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7921 ==
7922 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7923 {
7924 enum machine_mode innermode
7925 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7926 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7927 ? smul_widen_optab : umul_widen_optab);
7928 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7929 ? umul_widen_optab : smul_widen_optab);
7930 if (mode == GET_MODE_WIDER_MODE (innermode))
7931 {
7932 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7933 {
7934 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7935 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7936 TREE_OPERAND (exp, 1),
7937 NULL_RTX, &op0, &op1, 0);
7938 else
7939 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7940 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7941 NULL_RTX, &op0, &op1, 0);
7942 goto binop2;
7943 }
7944 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7945 && innermode == word_mode)
7946 {
7947 rtx htem;
7948 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7949 NULL_RTX, VOIDmode, 0);
7950 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7951 op1 = convert_modes (innermode, mode,
7952 expand_expr (TREE_OPERAND (exp, 1),
7953 NULL_RTX, VOIDmode, 0),
7954 unsignedp);
7955 else
7956 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7957 NULL_RTX, VOIDmode, 0);
7958 temp = expand_binop (mode, other_optab, op0, op1, target,
7959 unsignedp, OPTAB_LIB_WIDEN);
7960 htem = expand_mult_highpart_adjust (innermode,
7961 gen_highpart (innermode, temp),
7962 op0, op1,
7963 gen_highpart (innermode, temp),
7964 unsignedp);
7965 emit_move_insn (gen_highpart (innermode, temp), htem);
7966 return temp;
7967 }
7968 }
7969 }
7970 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7971 subtarget, &op0, &op1, 0);
7972 return expand_mult (mode, op0, op1, target, unsignedp);
7973
7974 case TRUNC_DIV_EXPR:
7975 case FLOOR_DIV_EXPR:
7976 case CEIL_DIV_EXPR:
7977 case ROUND_DIV_EXPR:
7978 case EXACT_DIV_EXPR:
7979 if (modifier == EXPAND_STACK_PARM)
7980 target = 0;
7981 /* Possible optimization: compute the dividend with EXPAND_SUM
7982 then if the divisor is constant can optimize the case
7983 where some terms of the dividend have coeffs divisible by it. */
7984 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7985 subtarget, &op0, &op1, 0);
7986 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7987
7988 case RDIV_EXPR:
7989 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7990 expensive divide. If not, combine will rebuild the original
7991 computation. */
7992 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7993 && TREE_CODE (type) == REAL_TYPE
7994 && !real_onep (TREE_OPERAND (exp, 0)))
7995 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7996 build (RDIV_EXPR, type,
7997 build_real (type, dconst1),
7998 TREE_OPERAND (exp, 1))),
7999 target, tmode, modifier);
8000 this_optab = sdiv_optab;
8001 goto binop;
8002
8003 case TRUNC_MOD_EXPR:
8004 case FLOOR_MOD_EXPR:
8005 case CEIL_MOD_EXPR:
8006 case ROUND_MOD_EXPR:
8007 if (modifier == EXPAND_STACK_PARM)
8008 target = 0;
8009 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8010 subtarget, &op0, &op1, 0);
8011 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8012
8013 case FIX_ROUND_EXPR:
8014 case FIX_FLOOR_EXPR:
8015 case FIX_CEIL_EXPR:
8016 abort (); /* Not used for C. */
8017
8018 case FIX_TRUNC_EXPR:
8019 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8020 if (target == 0 || modifier == EXPAND_STACK_PARM)
8021 target = gen_reg_rtx (mode);
8022 expand_fix (target, op0, unsignedp);
8023 return target;
8024
8025 case FLOAT_EXPR:
8026 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8027 if (target == 0 || modifier == EXPAND_STACK_PARM)
8028 target = gen_reg_rtx (mode);
8029 /* expand_float can't figure out what to do if FROM has VOIDmode.
8030 So give it the correct mode. With -O, cse will optimize this. */
8031 if (GET_MODE (op0) == VOIDmode)
8032 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8033 op0);
8034 expand_float (target, op0,
8035 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8036 return target;
8037
8038 case NEGATE_EXPR:
8039 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8040 if (modifier == EXPAND_STACK_PARM)
8041 target = 0;
8042 temp = expand_unop (mode,
8043 ! unsignedp && flag_trapv
8044 && (GET_MODE_CLASS(mode) == MODE_INT)
8045 ? negv_optab : neg_optab, op0, target, 0);
8046 if (temp == 0)
8047 abort ();
8048 return temp;
8049
8050 case ABS_EXPR:
8051 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8052 if (modifier == EXPAND_STACK_PARM)
8053 target = 0;
8054
8055 /* ABS_EXPR is not valid for complex arguments. */
8056 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8057 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8058 abort ();
8059
8060 /* Unsigned abs is simply the operand. Testing here means we don't
8061 risk generating incorrect code below. */
8062 if (TREE_UNSIGNED (type))
8063 return op0;
8064
8065 return expand_abs (mode, op0, target, unsignedp,
8066 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8067
8068 case MAX_EXPR:
8069 case MIN_EXPR:
8070 target = original_target;
8071 if (target == 0
8072 || modifier == EXPAND_STACK_PARM
8073 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8074 || GET_MODE (target) != mode
8075 || (GET_CODE (target) == REG
8076 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8077 target = gen_reg_rtx (mode);
8078 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8079 target, &op0, &op1, 0);
8080
8081 /* First try to do it with a special MIN or MAX instruction.
8082 If that does not win, use a conditional jump to select the proper
8083 value. */
8084 this_optab = (TREE_UNSIGNED (type)
8085 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8086 : (code == MIN_EXPR ? smin_optab : smax_optab));
8087
8088 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8089 OPTAB_WIDEN);
8090 if (temp != 0)
8091 return temp;
8092
8093 /* At this point, a MEM target is no longer useful; we will get better
8094 code without it. */
8095
8096 if (GET_CODE (target) == MEM)
8097 target = gen_reg_rtx (mode);
8098
8099 /* If op1 was placed in target, swap op0 and op1. */
8100 if (target != op0 && target == op1)
8101 {
8102 rtx tem = op0;
8103 op0 = op1;
8104 op1 = tem;
8105 }
8106
8107 if (target != op0)
8108 emit_move_insn (target, op0);
8109
8110 op0 = gen_label_rtx ();
8111
8112 /* If this mode is an integer too wide to compare properly,
8113 compare word by word. Rely on cse to optimize constant cases. */
8114 if (GET_MODE_CLASS (mode) == MODE_INT
8115 && ! can_compare_p (GE, mode, ccp_jump))
8116 {
8117 if (code == MAX_EXPR)
8118 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8119 target, op1, NULL_RTX, op0);
8120 else
8121 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8122 op1, target, NULL_RTX, op0);
8123 }
8124 else
8125 {
8126 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8127 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8128 unsignedp, mode, NULL_RTX, NULL_RTX,
8129 op0);
8130 }
8131 emit_move_insn (target, op1);
8132 emit_label (op0);
8133 return target;
8134
8135 case BIT_NOT_EXPR:
8136 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8137 if (modifier == EXPAND_STACK_PARM)
8138 target = 0;
8139 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8140 if (temp == 0)
8141 abort ();
8142 return temp;
8143
8144 /* ??? Can optimize bitwise operations with one arg constant.
8145 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8146 and (a bitwise1 b) bitwise2 b (etc)
8147 but that is probably not worth while. */
8148
8149 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8150 boolean values when we want in all cases to compute both of them. In
8151 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8152 as actual zero-or-1 values and then bitwise anding. In cases where
8153 there cannot be any side effects, better code would be made by
8154 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8155 how to recognize those cases. */
8156
8157 case TRUTH_AND_EXPR:
8158 case BIT_AND_EXPR:
8159 this_optab = and_optab;
8160 goto binop;
8161
8162 case TRUTH_OR_EXPR:
8163 case BIT_IOR_EXPR:
8164 this_optab = ior_optab;
8165 goto binop;
8166
8167 case TRUTH_XOR_EXPR:
8168 case BIT_XOR_EXPR:
8169 this_optab = xor_optab;
8170 goto binop;
8171
8172 case LSHIFT_EXPR:
8173 case RSHIFT_EXPR:
8174 case LROTATE_EXPR:
8175 case RROTATE_EXPR:
8176 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8177 subtarget = 0;
8178 if (modifier == EXPAND_STACK_PARM)
8179 target = 0;
8180 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8181 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8182 unsignedp);
8183
8184 /* Could determine the answer when only additive constants differ. Also,
8185 the addition of one can be handled by changing the condition. */
8186 case LT_EXPR:
8187 case LE_EXPR:
8188 case GT_EXPR:
8189 case GE_EXPR:
8190 case EQ_EXPR:
8191 case NE_EXPR:
8192 case UNORDERED_EXPR:
8193 case ORDERED_EXPR:
8194 case UNLT_EXPR:
8195 case UNLE_EXPR:
8196 case UNGT_EXPR:
8197 case UNGE_EXPR:
8198 case UNEQ_EXPR:
8199 temp = do_store_flag (exp,
8200 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8201 tmode != VOIDmode ? tmode : mode, 0);
8202 if (temp != 0)
8203 return temp;
8204
8205 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8206 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8207 && original_target
8208 && GET_CODE (original_target) == REG
8209 && (GET_MODE (original_target)
8210 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8211 {
8212 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8213 VOIDmode, 0);
8214
8215 /* If temp is constant, we can just compute the result. */
8216 if (GET_CODE (temp) == CONST_INT)
8217 {
8218 if (INTVAL (temp) != 0)
8219 emit_move_insn (target, const1_rtx);
8220 else
8221 emit_move_insn (target, const0_rtx);
8222
8223 return target;
8224 }
8225
8226 if (temp != original_target)
8227 {
8228 enum machine_mode mode1 = GET_MODE (temp);
8229 if (mode1 == VOIDmode)
8230 mode1 = tmode != VOIDmode ? tmode : mode;
8231
8232 temp = copy_to_mode_reg (mode1, temp);
8233 }
8234
8235 op1 = gen_label_rtx ();
8236 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8237 GET_MODE (temp), unsignedp, op1);
8238 emit_move_insn (temp, const1_rtx);
8239 emit_label (op1);
8240 return temp;
8241 }
8242
8243 /* If no set-flag instruction, must generate a conditional
8244 store into a temporary variable. Drop through
8245 and handle this like && and ||. */
8246
8247 case TRUTH_ANDIF_EXPR:
8248 case TRUTH_ORIF_EXPR:
8249 if (! ignore
8250 && (target == 0
8251 || modifier == EXPAND_STACK_PARM
8252 || ! safe_from_p (target, exp, 1)
8253 /* Make sure we don't have a hard reg (such as function's return
8254 value) live across basic blocks, if not optimizing. */
8255 || (!optimize && GET_CODE (target) == REG
8256 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8257 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8258
8259 if (target)
8260 emit_clr_insn (target);
8261
8262 op1 = gen_label_rtx ();
8263 jumpifnot (exp, op1);
8264
8265 if (target)
8266 emit_0_to_1_insn (target);
8267
8268 emit_label (op1);
8269 return ignore ? const0_rtx : target;
8270
8271 case TRUTH_NOT_EXPR:
8272 if (modifier == EXPAND_STACK_PARM)
8273 target = 0;
8274 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8275 /* The parser is careful to generate TRUTH_NOT_EXPR
8276 only with operands that are always zero or one. */
8277 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8278 target, 1, OPTAB_LIB_WIDEN);
8279 if (temp == 0)
8280 abort ();
8281 return temp;
8282
8283 case COMPOUND_EXPR:
8284 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8285 emit_queue ();
8286 return expand_expr (TREE_OPERAND (exp, 1),
8287 (ignore ? const0_rtx : target),
8288 VOIDmode, modifier);
8289
8290 case COND_EXPR:
8291 /* If we would have a "singleton" (see below) were it not for a
8292 conversion in each arm, bring that conversion back out. */
8293 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8294 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8295 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8296 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8297 {
8298 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8299 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8300
8301 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8302 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8303 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8304 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8305 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8306 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8307 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8308 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8309 return expand_expr (build1 (NOP_EXPR, type,
8310 build (COND_EXPR, TREE_TYPE (iftrue),
8311 TREE_OPERAND (exp, 0),
8312 iftrue, iffalse)),
8313 target, tmode, modifier);
8314 }
8315
8316 {
8317 /* Note that COND_EXPRs whose type is a structure or union
8318 are required to be constructed to contain assignments of
8319 a temporary variable, so that we can evaluate them here
8320 for side effect only. If type is void, we must do likewise. */
8321
8322 /* If an arm of the branch requires a cleanup,
8323 only that cleanup is performed. */
8324
8325 tree singleton = 0;
8326 tree binary_op = 0, unary_op = 0;
8327
8328 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8329 convert it to our mode, if necessary. */
8330 if (integer_onep (TREE_OPERAND (exp, 1))
8331 && integer_zerop (TREE_OPERAND (exp, 2))
8332 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8333 {
8334 if (ignore)
8335 {
8336 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8337 modifier);
8338 return const0_rtx;
8339 }
8340
8341 if (modifier == EXPAND_STACK_PARM)
8342 target = 0;
8343 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8344 if (GET_MODE (op0) == mode)
8345 return op0;
8346
8347 if (target == 0)
8348 target = gen_reg_rtx (mode);
8349 convert_move (target, op0, unsignedp);
8350 return target;
8351 }
8352
8353 /* Check for X ? A + B : A. If we have this, we can copy A to the
8354 output and conditionally add B. Similarly for unary operations.
8355 Don't do this if X has side-effects because those side effects
8356 might affect A or B and the "?" operation is a sequence point in
8357 ANSI. (operand_equal_p tests for side effects.) */
8358
8359 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8360 && operand_equal_p (TREE_OPERAND (exp, 2),
8361 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8362 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8363 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8364 && operand_equal_p (TREE_OPERAND (exp, 1),
8365 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8366 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8367 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8368 && operand_equal_p (TREE_OPERAND (exp, 2),
8369 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8370 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8371 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8372 && operand_equal_p (TREE_OPERAND (exp, 1),
8373 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8374 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8375
8376 /* If we are not to produce a result, we have no target. Otherwise,
8377 if a target was specified use it; it will not be used as an
8378 intermediate target unless it is safe. If no target, use a
8379 temporary. */
8380
8381 if (ignore)
8382 temp = 0;
8383 else if (modifier == EXPAND_STACK_PARM)
8384 temp = assign_temp (type, 0, 0, 1);
8385 else if (original_target
8386 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8387 || (singleton && GET_CODE (original_target) == REG
8388 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8389 && original_target == var_rtx (singleton)))
8390 && GET_MODE (original_target) == mode
8391 #ifdef HAVE_conditional_move
8392 && (! can_conditionally_move_p (mode)
8393 || GET_CODE (original_target) == REG
8394 || TREE_ADDRESSABLE (type))
8395 #endif
8396 && (GET_CODE (original_target) != MEM
8397 || TREE_ADDRESSABLE (type)))
8398 temp = original_target;
8399 else if (TREE_ADDRESSABLE (type))
8400 abort ();
8401 else
8402 temp = assign_temp (type, 0, 0, 1);
8403
8404 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8405 do the test of X as a store-flag operation, do this as
8406 A + ((X != 0) << log C). Similarly for other simple binary
8407 operators. Only do for C == 1 if BRANCH_COST is low. */
8408 if (temp && singleton && binary_op
8409 && (TREE_CODE (binary_op) == PLUS_EXPR
8410 || TREE_CODE (binary_op) == MINUS_EXPR
8411 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8412 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8413 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8414 : integer_onep (TREE_OPERAND (binary_op, 1)))
8415 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8416 {
8417 rtx result;
8418 tree cond;
8419 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8420 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8421 ? addv_optab : add_optab)
8422 : TREE_CODE (binary_op) == MINUS_EXPR
8423 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8424 ? subv_optab : sub_optab)
8425 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8426 : xor_optab);
8427
8428 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8429 if (singleton == TREE_OPERAND (exp, 1))
8430 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8431 else
8432 cond = TREE_OPERAND (exp, 0);
8433
8434 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8435 ? temp : NULL_RTX),
8436 mode, BRANCH_COST <= 1);
8437
8438 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8439 result = expand_shift (LSHIFT_EXPR, mode, result,
8440 build_int_2 (tree_log2
8441 (TREE_OPERAND
8442 (binary_op, 1)),
8443 0),
8444 (safe_from_p (temp, singleton, 1)
8445 ? temp : NULL_RTX), 0);
8446
8447 if (result)
8448 {
8449 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8450 return expand_binop (mode, boptab, op1, result, temp,
8451 unsignedp, OPTAB_LIB_WIDEN);
8452 }
8453 }
8454
8455 do_pending_stack_adjust ();
8456 NO_DEFER_POP;
8457 op0 = gen_label_rtx ();
8458
8459 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8460 {
8461 if (temp != 0)
8462 {
8463 /* If the target conflicts with the other operand of the
8464 binary op, we can't use it. Also, we can't use the target
8465 if it is a hard register, because evaluating the condition
8466 might clobber it. */
8467 if ((binary_op
8468 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8469 || (GET_CODE (temp) == REG
8470 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8471 temp = gen_reg_rtx (mode);
8472 store_expr (singleton, temp,
8473 modifier == EXPAND_STACK_PARM ? 2 : 0);
8474 }
8475 else
8476 expand_expr (singleton,
8477 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8478 if (singleton == TREE_OPERAND (exp, 1))
8479 jumpif (TREE_OPERAND (exp, 0), op0);
8480 else
8481 jumpifnot (TREE_OPERAND (exp, 0), op0);
8482
8483 start_cleanup_deferral ();
8484 if (binary_op && temp == 0)
8485 /* Just touch the other operand. */
8486 expand_expr (TREE_OPERAND (binary_op, 1),
8487 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8488 else if (binary_op)
8489 store_expr (build (TREE_CODE (binary_op), type,
8490 make_tree (type, temp),
8491 TREE_OPERAND (binary_op, 1)),
8492 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8493 else
8494 store_expr (build1 (TREE_CODE (unary_op), type,
8495 make_tree (type, temp)),
8496 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8497 op1 = op0;
8498 }
8499 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8500 comparison operator. If we have one of these cases, set the
8501 output to A, branch on A (cse will merge these two references),
8502 then set the output to FOO. */
8503 else if (temp
8504 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8505 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8506 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8507 TREE_OPERAND (exp, 1), 0)
8508 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8509 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8510 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8511 {
8512 if (GET_CODE (temp) == REG
8513 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8514 temp = gen_reg_rtx (mode);
8515 store_expr (TREE_OPERAND (exp, 1), temp,
8516 modifier == EXPAND_STACK_PARM ? 2 : 0);
8517 jumpif (TREE_OPERAND (exp, 0), op0);
8518
8519 start_cleanup_deferral ();
8520 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8521 store_expr (TREE_OPERAND (exp, 2), temp,
8522 modifier == EXPAND_STACK_PARM ? 2 : 0);
8523 else
8524 expand_expr (TREE_OPERAND (exp, 2),
8525 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8526 op1 = op0;
8527 }
8528 else if (temp
8529 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8530 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8531 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8532 TREE_OPERAND (exp, 2), 0)
8533 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8534 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8535 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8536 {
8537 if (GET_CODE (temp) == REG
8538 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8539 temp = gen_reg_rtx (mode);
8540 store_expr (TREE_OPERAND (exp, 2), temp,
8541 modifier == EXPAND_STACK_PARM ? 2 : 0);
8542 jumpifnot (TREE_OPERAND (exp, 0), op0);
8543
8544 start_cleanup_deferral ();
8545 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8546 store_expr (TREE_OPERAND (exp, 1), temp,
8547 modifier == EXPAND_STACK_PARM ? 2 : 0);
8548 else
8549 expand_expr (TREE_OPERAND (exp, 1),
8550 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8551 op1 = op0;
8552 }
8553 else
8554 {
8555 op1 = gen_label_rtx ();
8556 jumpifnot (TREE_OPERAND (exp, 0), op0);
8557
8558 start_cleanup_deferral ();
8559
8560 /* One branch of the cond can be void, if it never returns. For
8561 example A ? throw : E */
8562 if (temp != 0
8563 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8564 store_expr (TREE_OPERAND (exp, 1), temp,
8565 modifier == EXPAND_STACK_PARM ? 2 : 0);
8566 else
8567 expand_expr (TREE_OPERAND (exp, 1),
8568 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8569 end_cleanup_deferral ();
8570 emit_queue ();
8571 emit_jump_insn (gen_jump (op1));
8572 emit_barrier ();
8573 emit_label (op0);
8574 start_cleanup_deferral ();
8575 if (temp != 0
8576 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8577 store_expr (TREE_OPERAND (exp, 2), temp,
8578 modifier == EXPAND_STACK_PARM ? 2 : 0);
8579 else
8580 expand_expr (TREE_OPERAND (exp, 2),
8581 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8582 }
8583
8584 end_cleanup_deferral ();
8585
8586 emit_queue ();
8587 emit_label (op1);
8588 OK_DEFER_POP;
8589
8590 return temp;
8591 }
8592
8593 case TARGET_EXPR:
8594 {
8595 /* Something needs to be initialized, but we didn't know
8596 where that thing was when building the tree. For example,
8597 it could be the return value of a function, or a parameter
8598 to a function which lays down in the stack, or a temporary
8599 variable which must be passed by reference.
8600
8601 We guarantee that the expression will either be constructed
8602 or copied into our original target. */
8603
8604 tree slot = TREE_OPERAND (exp, 0);
8605 tree cleanups = NULL_TREE;
8606 tree exp1;
8607
8608 if (TREE_CODE (slot) != VAR_DECL)
8609 abort ();
8610
8611 if (! ignore)
8612 target = original_target;
8613
8614 /* Set this here so that if we get a target that refers to a
8615 register variable that's already been used, put_reg_into_stack
8616 knows that it should fix up those uses. */
8617 TREE_USED (slot) = 1;
8618
8619 if (target == 0)
8620 {
8621 if (DECL_RTL_SET_P (slot))
8622 {
8623 target = DECL_RTL (slot);
8624 /* If we have already expanded the slot, so don't do
8625 it again. (mrs) */
8626 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8627 return target;
8628 }
8629 else
8630 {
8631 target = assign_temp (type, 2, 0, 1);
8632 /* All temp slots at this level must not conflict. */
8633 preserve_temp_slots (target);
8634 SET_DECL_RTL (slot, target);
8635 if (TREE_ADDRESSABLE (slot))
8636 put_var_into_stack (slot, /*rescan=*/false);
8637
8638 /* Since SLOT is not known to the called function
8639 to belong to its stack frame, we must build an explicit
8640 cleanup. This case occurs when we must build up a reference
8641 to pass the reference as an argument. In this case,
8642 it is very likely that such a reference need not be
8643 built here. */
8644
8645 if (TREE_OPERAND (exp, 2) == 0)
8646 TREE_OPERAND (exp, 2)
8647 = (*lang_hooks.maybe_build_cleanup) (slot);
8648 cleanups = TREE_OPERAND (exp, 2);
8649 }
8650 }
8651 else
8652 {
8653 /* This case does occur, when expanding a parameter which
8654 needs to be constructed on the stack. The target
8655 is the actual stack address that we want to initialize.
8656 The function we call will perform the cleanup in this case. */
8657
8658 /* If we have already assigned it space, use that space,
8659 not target that we were passed in, as our target
8660 parameter is only a hint. */
8661 if (DECL_RTL_SET_P (slot))
8662 {
8663 target = DECL_RTL (slot);
8664 /* If we have already expanded the slot, so don't do
8665 it again. (mrs) */
8666 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8667 return target;
8668 }
8669 else
8670 {
8671 SET_DECL_RTL (slot, target);
8672 /* If we must have an addressable slot, then make sure that
8673 the RTL that we just stored in slot is OK. */
8674 if (TREE_ADDRESSABLE (slot))
8675 put_var_into_stack (slot, /*rescan=*/true);
8676 }
8677 }
8678
8679 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8680 /* Mark it as expanded. */
8681 TREE_OPERAND (exp, 1) = NULL_TREE;
8682
8683 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
8684
8685 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8686
8687 return target;
8688 }
8689
8690 case INIT_EXPR:
8691 {
8692 tree lhs = TREE_OPERAND (exp, 0);
8693 tree rhs = TREE_OPERAND (exp, 1);
8694
8695 temp = expand_assignment (lhs, rhs, ! ignore);
8696 return temp;
8697 }
8698
8699 case MODIFY_EXPR:
8700 {
8701 /* If lhs is complex, expand calls in rhs before computing it.
8702 That's so we don't compute a pointer and save it over a
8703 call. If lhs is simple, compute it first so we can give it
8704 as a target if the rhs is just a call. This avoids an
8705 extra temp and copy and that prevents a partial-subsumption
8706 which makes bad code. Actually we could treat
8707 component_ref's of vars like vars. */
8708
8709 tree lhs = TREE_OPERAND (exp, 0);
8710 tree rhs = TREE_OPERAND (exp, 1);
8711
8712 temp = 0;
8713
8714 /* Check for |= or &= of a bitfield of size one into another bitfield
8715 of size 1. In this case, (unless we need the result of the
8716 assignment) we can do this more efficiently with a
8717 test followed by an assignment, if necessary.
8718
8719 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8720 things change so we do, this code should be enhanced to
8721 support it. */
8722 if (ignore
8723 && TREE_CODE (lhs) == COMPONENT_REF
8724 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8725 || TREE_CODE (rhs) == BIT_AND_EXPR)
8726 && TREE_OPERAND (rhs, 0) == lhs
8727 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8728 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8729 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8730 {
8731 rtx label = gen_label_rtx ();
8732
8733 do_jump (TREE_OPERAND (rhs, 1),
8734 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8735 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8736 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8737 (TREE_CODE (rhs) == BIT_IOR_EXPR
8738 ? integer_one_node
8739 : integer_zero_node)),
8740 0);
8741 do_pending_stack_adjust ();
8742 emit_label (label);
8743 return const0_rtx;
8744 }
8745
8746 temp = expand_assignment (lhs, rhs, ! ignore);
8747
8748 return temp;
8749 }
8750
8751 case RETURN_EXPR:
8752 if (!TREE_OPERAND (exp, 0))
8753 expand_null_return ();
8754 else
8755 expand_return (TREE_OPERAND (exp, 0));
8756 return const0_rtx;
8757
8758 case PREINCREMENT_EXPR:
8759 case PREDECREMENT_EXPR:
8760 return expand_increment (exp, 0, ignore);
8761
8762 case POSTINCREMENT_EXPR:
8763 case POSTDECREMENT_EXPR:
8764 /* Faster to treat as pre-increment if result is not used. */
8765 return expand_increment (exp, ! ignore, ignore);
8766
8767 case ADDR_EXPR:
8768 if (modifier == EXPAND_STACK_PARM)
8769 target = 0;
8770 /* Are we taking the address of a nested function? */
8771 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8772 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8773 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8774 && ! TREE_STATIC (exp))
8775 {
8776 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8777 op0 = force_operand (op0, target);
8778 }
8779 /* If we are taking the address of something erroneous, just
8780 return a zero. */
8781 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8782 return const0_rtx;
8783 /* If we are taking the address of a constant and are at the
8784 top level, we have to use output_constant_def since we can't
8785 call force_const_mem at top level. */
8786 else if (cfun == 0
8787 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8788 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8789 == 'c')))
8790 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8791 else
8792 {
8793 /* We make sure to pass const0_rtx down if we came in with
8794 ignore set, to avoid doing the cleanups twice for something. */
8795 op0 = expand_expr (TREE_OPERAND (exp, 0),
8796 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8797 (modifier == EXPAND_INITIALIZER
8798 ? modifier : EXPAND_CONST_ADDRESS));
8799
8800 /* If we are going to ignore the result, OP0 will have been set
8801 to const0_rtx, so just return it. Don't get confused and
8802 think we are taking the address of the constant. */
8803 if (ignore)
8804 return op0;
8805
8806 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8807 clever and returns a REG when given a MEM. */
8808 op0 = protect_from_queue (op0, 1);
8809
8810 /* We would like the object in memory. If it is a constant, we can
8811 have it be statically allocated into memory. For a non-constant,
8812 we need to allocate some memory and store the value into it. */
8813
8814 if (CONSTANT_P (op0))
8815 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8816 op0);
8817 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8818 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8819 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
8820 {
8821 /* If the operand is a SAVE_EXPR, we can deal with this by
8822 forcing the SAVE_EXPR into memory. */
8823 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8824 {
8825 put_var_into_stack (TREE_OPERAND (exp, 0),
8826 /*rescan=*/true);
8827 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8828 }
8829 else
8830 {
8831 /* If this object is in a register, it can't be BLKmode. */
8832 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8833 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8834
8835 if (GET_CODE (op0) == PARALLEL)
8836 /* Handle calls that pass values in multiple
8837 non-contiguous locations. The Irix 6 ABI has examples
8838 of this. */
8839 emit_group_store (memloc, op0, inner_type,
8840 int_size_in_bytes (inner_type));
8841 else
8842 emit_move_insn (memloc, op0);
8843
8844 op0 = memloc;
8845 }
8846 }
8847
8848 if (GET_CODE (op0) != MEM)
8849 abort ();
8850
8851 mark_temp_addr_taken (op0);
8852 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8853 {
8854 op0 = XEXP (op0, 0);
8855 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8856 op0 = convert_memory_address (ptr_mode, op0);
8857 return op0;
8858 }
8859
8860 /* If OP0 is not aligned as least as much as the type requires, we
8861 need to make a temporary, copy OP0 to it, and take the address of
8862 the temporary. We want to use the alignment of the type, not of
8863 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8864 the test for BLKmode means that can't happen. The test for
8865 BLKmode is because we never make mis-aligned MEMs with
8866 non-BLKmode.
8867
8868 We don't need to do this at all if the machine doesn't have
8869 strict alignment. */
8870 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8871 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8872 > MEM_ALIGN (op0))
8873 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8874 {
8875 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8876 rtx new;
8877
8878 if (TYPE_ALIGN_OK (inner_type))
8879 abort ();
8880
8881 if (TREE_ADDRESSABLE (inner_type))
8882 {
8883 /* We can't make a bitwise copy of this object, so fail. */
8884 error ("cannot take the address of an unaligned member");
8885 return const0_rtx;
8886 }
8887
8888 new = assign_stack_temp_for_type
8889 (TYPE_MODE (inner_type),
8890 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8891 : int_size_in_bytes (inner_type),
8892 1, build_qualified_type (inner_type,
8893 (TYPE_QUALS (inner_type)
8894 | TYPE_QUAL_CONST)));
8895
8896 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8897 (modifier == EXPAND_STACK_PARM
8898 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8899
8900 op0 = new;
8901 }
8902
8903 op0 = force_operand (XEXP (op0, 0), target);
8904 }
8905
8906 if (flag_force_addr
8907 && GET_CODE (op0) != REG
8908 && modifier != EXPAND_CONST_ADDRESS
8909 && modifier != EXPAND_INITIALIZER
8910 && modifier != EXPAND_SUM)
8911 op0 = force_reg (Pmode, op0);
8912
8913 if (GET_CODE (op0) == REG
8914 && ! REG_USERVAR_P (op0))
8915 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8916
8917 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8918 op0 = convert_memory_address (ptr_mode, op0);
8919
8920 return op0;
8921
8922 case ENTRY_VALUE_EXPR:
8923 abort ();
8924
8925 /* COMPLEX type for Extended Pascal & Fortran */
8926 case COMPLEX_EXPR:
8927 {
8928 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8929 rtx insns;
8930
8931 /* Get the rtx code of the operands. */
8932 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8933 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8934
8935 if (! target)
8936 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8937
8938 start_sequence ();
8939
8940 /* Move the real (op0) and imaginary (op1) parts to their location. */
8941 emit_move_insn (gen_realpart (mode, target), op0);
8942 emit_move_insn (gen_imagpart (mode, target), op1);
8943
8944 insns = get_insns ();
8945 end_sequence ();
8946
8947 /* Complex construction should appear as a single unit. */
8948 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8949 each with a separate pseudo as destination.
8950 It's not correct for flow to treat them as a unit. */
8951 if (GET_CODE (target) != CONCAT)
8952 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8953 else
8954 emit_insn (insns);
8955
8956 return target;
8957 }
8958
8959 case REALPART_EXPR:
8960 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8961 return gen_realpart (mode, op0);
8962
8963 case IMAGPART_EXPR:
8964 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8965 return gen_imagpart (mode, op0);
8966
8967 case CONJ_EXPR:
8968 {
8969 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8970 rtx imag_t;
8971 rtx insns;
8972
8973 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8974
8975 if (! target)
8976 target = gen_reg_rtx (mode);
8977
8978 start_sequence ();
8979
8980 /* Store the realpart and the negated imagpart to target. */
8981 emit_move_insn (gen_realpart (partmode, target),
8982 gen_realpart (partmode, op0));
8983
8984 imag_t = gen_imagpart (partmode, target);
8985 temp = expand_unop (partmode,
8986 ! unsignedp && flag_trapv
8987 && (GET_MODE_CLASS(partmode) == MODE_INT)
8988 ? negv_optab : neg_optab,
8989 gen_imagpart (partmode, op0), imag_t, 0);
8990 if (temp != imag_t)
8991 emit_move_insn (imag_t, temp);
8992
8993 insns = get_insns ();
8994 end_sequence ();
8995
8996 /* Conjugate should appear as a single unit
8997 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8998 each with a separate pseudo as destination.
8999 It's not correct for flow to treat them as a unit. */
9000 if (GET_CODE (target) != CONCAT)
9001 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9002 else
9003 emit_insn (insns);
9004
9005 return target;
9006 }
9007
9008 case TRY_CATCH_EXPR:
9009 {
9010 tree handler = TREE_OPERAND (exp, 1);
9011
9012 expand_eh_region_start ();
9013
9014 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9015
9016 expand_eh_region_end_cleanup (handler);
9017
9018 return op0;
9019 }
9020
9021 case TRY_FINALLY_EXPR:
9022 {
9023 tree try_block = TREE_OPERAND (exp, 0);
9024 tree finally_block = TREE_OPERAND (exp, 1);
9025
9026 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9027 {
9028 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9029 is not sufficient, so we cannot expand the block twice.
9030 So we play games with GOTO_SUBROUTINE_EXPR to let us
9031 expand the thing only once. */
9032 /* When not optimizing, we go ahead with this form since
9033 (1) user breakpoints operate more predictably without
9034 code duplication, and
9035 (2) we're not running any of the global optimizers
9036 that would explode in time/space with the highly
9037 connected CFG created by the indirect branching. */
9038
9039 rtx finally_label = gen_label_rtx ();
9040 rtx done_label = gen_label_rtx ();
9041 rtx return_link = gen_reg_rtx (Pmode);
9042 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9043 (tree) finally_label, (tree) return_link);
9044 TREE_SIDE_EFFECTS (cleanup) = 1;
9045
9046 /* Start a new binding layer that will keep track of all cleanup
9047 actions to be performed. */
9048 expand_start_bindings (2);
9049 target_temp_slot_level = temp_slot_level;
9050
9051 expand_decl_cleanup (NULL_TREE, cleanup);
9052 op0 = expand_expr (try_block, target, tmode, modifier);
9053
9054 preserve_temp_slots (op0);
9055 expand_end_bindings (NULL_TREE, 0, 0);
9056 emit_jump (done_label);
9057 emit_label (finally_label);
9058 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9059 emit_indirect_jump (return_link);
9060 emit_label (done_label);
9061 }
9062 else
9063 {
9064 expand_start_bindings (2);
9065 target_temp_slot_level = temp_slot_level;
9066
9067 expand_decl_cleanup (NULL_TREE, finally_block);
9068 op0 = expand_expr (try_block, target, tmode, modifier);
9069
9070 preserve_temp_slots (op0);
9071 expand_end_bindings (NULL_TREE, 0, 0);
9072 }
9073
9074 return op0;
9075 }
9076
9077 case GOTO_SUBROUTINE_EXPR:
9078 {
9079 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9080 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9081 rtx return_address = gen_label_rtx ();
9082 emit_move_insn (return_link,
9083 gen_rtx_LABEL_REF (Pmode, return_address));
9084 emit_jump (subr);
9085 emit_label (return_address);
9086 return const0_rtx;
9087 }
9088
9089 case VA_ARG_EXPR:
9090 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9091
9092 case EXC_PTR_EXPR:
9093 return get_exception_pointer (cfun);
9094
9095 case FDESC_EXPR:
9096 /* Function descriptors are not valid except for as
9097 initialization constants, and should not be expanded. */
9098 abort ();
9099
9100 default:
9101 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9102 }
9103
9104 /* Here to do an ordinary binary operator, generating an instruction
9105 from the optab already placed in `this_optab'. */
9106 binop:
9107 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9108 subtarget, &op0, &op1, 0);
9109 binop2:
9110 if (modifier == EXPAND_STACK_PARM)
9111 target = 0;
9112 temp = expand_binop (mode, this_optab, op0, op1, target,
9113 unsignedp, OPTAB_LIB_WIDEN);
9114 if (temp == 0)
9115 abort ();
9116 return temp;
9117 }
9118 \f
9119 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9120 when applied to the address of EXP produces an address known to be
9121 aligned more than BIGGEST_ALIGNMENT. */
9122
9123 static int
9124 is_aligning_offset (tree offset, tree exp)
9125 {
9126 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9127 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9128 || TREE_CODE (offset) == NOP_EXPR
9129 || TREE_CODE (offset) == CONVERT_EXPR
9130 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9131 offset = TREE_OPERAND (offset, 0);
9132
9133 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9134 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9135 if (TREE_CODE (offset) != BIT_AND_EXPR
9136 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9137 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9138 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9139 return 0;
9140
9141 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9142 It must be NEGATE_EXPR. Then strip any more conversions. */
9143 offset = TREE_OPERAND (offset, 0);
9144 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9145 || TREE_CODE (offset) == NOP_EXPR
9146 || TREE_CODE (offset) == CONVERT_EXPR)
9147 offset = TREE_OPERAND (offset, 0);
9148
9149 if (TREE_CODE (offset) != NEGATE_EXPR)
9150 return 0;
9151
9152 offset = TREE_OPERAND (offset, 0);
9153 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9154 || TREE_CODE (offset) == NOP_EXPR
9155 || TREE_CODE (offset) == CONVERT_EXPR)
9156 offset = TREE_OPERAND (offset, 0);
9157
9158 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9159 whose type is the same as EXP. */
9160 return (TREE_CODE (offset) == ADDR_EXPR
9161 && (TREE_OPERAND (offset, 0) == exp
9162 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9163 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9164 == TREE_TYPE (exp)))));
9165 }
9166 \f
9167 /* Return the tree node if an ARG corresponds to a string constant or zero
9168 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9169 in bytes within the string that ARG is accessing. The type of the
9170 offset will be `sizetype'. */
9171
9172 tree
9173 string_constant (tree arg, tree *ptr_offset)
9174 {
9175 STRIP_NOPS (arg);
9176
9177 if (TREE_CODE (arg) == ADDR_EXPR
9178 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9179 {
9180 *ptr_offset = size_zero_node;
9181 return TREE_OPERAND (arg, 0);
9182 }
9183 else if (TREE_CODE (arg) == PLUS_EXPR)
9184 {
9185 tree arg0 = TREE_OPERAND (arg, 0);
9186 tree arg1 = TREE_OPERAND (arg, 1);
9187
9188 STRIP_NOPS (arg0);
9189 STRIP_NOPS (arg1);
9190
9191 if (TREE_CODE (arg0) == ADDR_EXPR
9192 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9193 {
9194 *ptr_offset = convert (sizetype, arg1);
9195 return TREE_OPERAND (arg0, 0);
9196 }
9197 else if (TREE_CODE (arg1) == ADDR_EXPR
9198 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9199 {
9200 *ptr_offset = convert (sizetype, arg0);
9201 return TREE_OPERAND (arg1, 0);
9202 }
9203 }
9204
9205 return 0;
9206 }
9207 \f
9208 /* Expand code for a post- or pre- increment or decrement
9209 and return the RTX for the result.
9210 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9211
9212 static rtx
9213 expand_increment (tree exp, int post, int ignore)
9214 {
9215 rtx op0, op1;
9216 rtx temp, value;
9217 tree incremented = TREE_OPERAND (exp, 0);
9218 optab this_optab = add_optab;
9219 int icode;
9220 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9221 int op0_is_copy = 0;
9222 int single_insn = 0;
9223 /* 1 means we can't store into OP0 directly,
9224 because it is a subreg narrower than a word,
9225 and we don't dare clobber the rest of the word. */
9226 int bad_subreg = 0;
9227
9228 /* Stabilize any component ref that might need to be
9229 evaluated more than once below. */
9230 if (!post
9231 || TREE_CODE (incremented) == BIT_FIELD_REF
9232 || (TREE_CODE (incremented) == COMPONENT_REF
9233 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9234 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9235 incremented = stabilize_reference (incremented);
9236 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9237 ones into save exprs so that they don't accidentally get evaluated
9238 more than once by the code below. */
9239 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9240 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9241 incremented = save_expr (incremented);
9242
9243 /* Compute the operands as RTX.
9244 Note whether OP0 is the actual lvalue or a copy of it:
9245 I believe it is a copy iff it is a register or subreg
9246 and insns were generated in computing it. */
9247
9248 temp = get_last_insn ();
9249 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9250
9251 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9252 in place but instead must do sign- or zero-extension during assignment,
9253 so we copy it into a new register and let the code below use it as
9254 a copy.
9255
9256 Note that we can safely modify this SUBREG since it is know not to be
9257 shared (it was made by the expand_expr call above). */
9258
9259 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9260 {
9261 if (post)
9262 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9263 else
9264 bad_subreg = 1;
9265 }
9266 else if (GET_CODE (op0) == SUBREG
9267 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9268 {
9269 /* We cannot increment this SUBREG in place. If we are
9270 post-incrementing, get a copy of the old value. Otherwise,
9271 just mark that we cannot increment in place. */
9272 if (post)
9273 op0 = copy_to_reg (op0);
9274 else
9275 bad_subreg = 1;
9276 }
9277
9278 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9279 && temp != get_last_insn ());
9280 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9281
9282 /* Decide whether incrementing or decrementing. */
9283 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9284 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9285 this_optab = sub_optab;
9286
9287 /* Convert decrement by a constant into a negative increment. */
9288 if (this_optab == sub_optab
9289 && GET_CODE (op1) == CONST_INT)
9290 {
9291 op1 = GEN_INT (-INTVAL (op1));
9292 this_optab = add_optab;
9293 }
9294
9295 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9296 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9297
9298 /* For a preincrement, see if we can do this with a single instruction. */
9299 if (!post)
9300 {
9301 icode = (int) this_optab->handlers[(int) mode].insn_code;
9302 if (icode != (int) CODE_FOR_nothing
9303 /* Make sure that OP0 is valid for operands 0 and 1
9304 of the insn we want to queue. */
9305 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9306 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9307 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9308 single_insn = 1;
9309 }
9310
9311 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9312 then we cannot just increment OP0. We must therefore contrive to
9313 increment the original value. Then, for postincrement, we can return
9314 OP0 since it is a copy of the old value. For preincrement, expand here
9315 unless we can do it with a single insn.
9316
9317 Likewise if storing directly into OP0 would clobber high bits
9318 we need to preserve (bad_subreg). */
9319 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9320 {
9321 /* This is the easiest way to increment the value wherever it is.
9322 Problems with multiple evaluation of INCREMENTED are prevented
9323 because either (1) it is a component_ref or preincrement,
9324 in which case it was stabilized above, or (2) it is an array_ref
9325 with constant index in an array in a register, which is
9326 safe to reevaluate. */
9327 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9328 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9329 ? MINUS_EXPR : PLUS_EXPR),
9330 TREE_TYPE (exp),
9331 incremented,
9332 TREE_OPERAND (exp, 1));
9333
9334 while (TREE_CODE (incremented) == NOP_EXPR
9335 || TREE_CODE (incremented) == CONVERT_EXPR)
9336 {
9337 newexp = convert (TREE_TYPE (incremented), newexp);
9338 incremented = TREE_OPERAND (incremented, 0);
9339 }
9340
9341 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9342 return post ? op0 : temp;
9343 }
9344
9345 if (post)
9346 {
9347 /* We have a true reference to the value in OP0.
9348 If there is an insn to add or subtract in this mode, queue it.
9349 Queuing the increment insn avoids the register shuffling
9350 that often results if we must increment now and first save
9351 the old value for subsequent use. */
9352
9353 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9354 op0 = stabilize (op0);
9355 #endif
9356
9357 icode = (int) this_optab->handlers[(int) mode].insn_code;
9358 if (icode != (int) CODE_FOR_nothing
9359 /* Make sure that OP0 is valid for operands 0 and 1
9360 of the insn we want to queue. */
9361 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9362 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9363 {
9364 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9365 op1 = force_reg (mode, op1);
9366
9367 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9368 }
9369 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9370 {
9371 rtx addr = (general_operand (XEXP (op0, 0), mode)
9372 ? force_reg (Pmode, XEXP (op0, 0))
9373 : copy_to_reg (XEXP (op0, 0)));
9374 rtx temp, result;
9375
9376 op0 = replace_equiv_address (op0, addr);
9377 temp = force_reg (GET_MODE (op0), op0);
9378 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9379 op1 = force_reg (mode, op1);
9380
9381 /* The increment queue is LIFO, thus we have to `queue'
9382 the instructions in reverse order. */
9383 enqueue_insn (op0, gen_move_insn (op0, temp));
9384 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9385 return result;
9386 }
9387 }
9388
9389 /* Preincrement, or we can't increment with one simple insn. */
9390 if (post)
9391 /* Save a copy of the value before inc or dec, to return it later. */
9392 temp = value = copy_to_reg (op0);
9393 else
9394 /* Arrange to return the incremented value. */
9395 /* Copy the rtx because expand_binop will protect from the queue,
9396 and the results of that would be invalid for us to return
9397 if our caller does emit_queue before using our result. */
9398 temp = copy_rtx (value = op0);
9399
9400 /* Increment however we can. */
9401 op1 = expand_binop (mode, this_optab, value, op1, op0,
9402 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9403
9404 /* Make sure the value is stored into OP0. */
9405 if (op1 != op0)
9406 emit_move_insn (op0, op1);
9407
9408 return temp;
9409 }
9410 \f
9411 /* Generate code to calculate EXP using a store-flag instruction
9412 and return an rtx for the result. EXP is either a comparison
9413 or a TRUTH_NOT_EXPR whose operand is a comparison.
9414
9415 If TARGET is nonzero, store the result there if convenient.
9416
9417 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9418 cheap.
9419
9420 Return zero if there is no suitable set-flag instruction
9421 available on this machine.
9422
9423 Once expand_expr has been called on the arguments of the comparison,
9424 we are committed to doing the store flag, since it is not safe to
9425 re-evaluate the expression. We emit the store-flag insn by calling
9426 emit_store_flag, but only expand the arguments if we have a reason
9427 to believe that emit_store_flag will be successful. If we think that
9428 it will, but it isn't, we have to simulate the store-flag with a
9429 set/jump/set sequence. */
9430
9431 static rtx
9432 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9433 {
9434 enum rtx_code code;
9435 tree arg0, arg1, type;
9436 tree tem;
9437 enum machine_mode operand_mode;
9438 int invert = 0;
9439 int unsignedp;
9440 rtx op0, op1;
9441 enum insn_code icode;
9442 rtx subtarget = target;
9443 rtx result, label;
9444
9445 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9446 result at the end. We can't simply invert the test since it would
9447 have already been inverted if it were valid. This case occurs for
9448 some floating-point comparisons. */
9449
9450 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9451 invert = 1, exp = TREE_OPERAND (exp, 0);
9452
9453 arg0 = TREE_OPERAND (exp, 0);
9454 arg1 = TREE_OPERAND (exp, 1);
9455
9456 /* Don't crash if the comparison was erroneous. */
9457 if (arg0 == error_mark_node || arg1 == error_mark_node)
9458 return const0_rtx;
9459
9460 type = TREE_TYPE (arg0);
9461 operand_mode = TYPE_MODE (type);
9462 unsignedp = TREE_UNSIGNED (type);
9463
9464 /* We won't bother with BLKmode store-flag operations because it would mean
9465 passing a lot of information to emit_store_flag. */
9466 if (operand_mode == BLKmode)
9467 return 0;
9468
9469 /* We won't bother with store-flag operations involving function pointers
9470 when function pointers must be canonicalized before comparisons. */
9471 #ifdef HAVE_canonicalize_funcptr_for_compare
9472 if (HAVE_canonicalize_funcptr_for_compare
9473 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9474 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9475 == FUNCTION_TYPE))
9476 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9477 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9478 == FUNCTION_TYPE))))
9479 return 0;
9480 #endif
9481
9482 STRIP_NOPS (arg0);
9483 STRIP_NOPS (arg1);
9484
9485 /* Get the rtx comparison code to use. We know that EXP is a comparison
9486 operation of some type. Some comparisons against 1 and -1 can be
9487 converted to comparisons with zero. Do so here so that the tests
9488 below will be aware that we have a comparison with zero. These
9489 tests will not catch constants in the first operand, but constants
9490 are rarely passed as the first operand. */
9491
9492 switch (TREE_CODE (exp))
9493 {
9494 case EQ_EXPR:
9495 code = EQ;
9496 break;
9497 case NE_EXPR:
9498 code = NE;
9499 break;
9500 case LT_EXPR:
9501 if (integer_onep (arg1))
9502 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9503 else
9504 code = unsignedp ? LTU : LT;
9505 break;
9506 case LE_EXPR:
9507 if (! unsignedp && integer_all_onesp (arg1))
9508 arg1 = integer_zero_node, code = LT;
9509 else
9510 code = unsignedp ? LEU : LE;
9511 break;
9512 case GT_EXPR:
9513 if (! unsignedp && integer_all_onesp (arg1))
9514 arg1 = integer_zero_node, code = GE;
9515 else
9516 code = unsignedp ? GTU : GT;
9517 break;
9518 case GE_EXPR:
9519 if (integer_onep (arg1))
9520 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9521 else
9522 code = unsignedp ? GEU : GE;
9523 break;
9524
9525 case UNORDERED_EXPR:
9526 code = UNORDERED;
9527 break;
9528 case ORDERED_EXPR:
9529 code = ORDERED;
9530 break;
9531 case UNLT_EXPR:
9532 code = UNLT;
9533 break;
9534 case UNLE_EXPR:
9535 code = UNLE;
9536 break;
9537 case UNGT_EXPR:
9538 code = UNGT;
9539 break;
9540 case UNGE_EXPR:
9541 code = UNGE;
9542 break;
9543 case UNEQ_EXPR:
9544 code = UNEQ;
9545 break;
9546
9547 default:
9548 abort ();
9549 }
9550
9551 /* Put a constant second. */
9552 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9553 {
9554 tem = arg0; arg0 = arg1; arg1 = tem;
9555 code = swap_condition (code);
9556 }
9557
9558 /* If this is an equality or inequality test of a single bit, we can
9559 do this by shifting the bit being tested to the low-order bit and
9560 masking the result with the constant 1. If the condition was EQ,
9561 we xor it with 1. This does not require an scc insn and is faster
9562 than an scc insn even if we have it.
9563
9564 The code to make this transformation was moved into fold_single_bit_test,
9565 so we just call into the folder and expand its result. */
9566
9567 if ((code == NE || code == EQ)
9568 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9569 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9570 {
9571 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
9572 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9573 arg0, arg1, type),
9574 target, VOIDmode, EXPAND_NORMAL);
9575 }
9576
9577 /* Now see if we are likely to be able to do this. Return if not. */
9578 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9579 return 0;
9580
9581 icode = setcc_gen_code[(int) code];
9582 if (icode == CODE_FOR_nothing
9583 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9584 {
9585 /* We can only do this if it is one of the special cases that
9586 can be handled without an scc insn. */
9587 if ((code == LT && integer_zerop (arg1))
9588 || (! only_cheap && code == GE && integer_zerop (arg1)))
9589 ;
9590 else if (BRANCH_COST >= 0
9591 && ! only_cheap && (code == NE || code == EQ)
9592 && TREE_CODE (type) != REAL_TYPE
9593 && ((abs_optab->handlers[(int) operand_mode].insn_code
9594 != CODE_FOR_nothing)
9595 || (ffs_optab->handlers[(int) operand_mode].insn_code
9596 != CODE_FOR_nothing)))
9597 ;
9598 else
9599 return 0;
9600 }
9601
9602 if (! get_subtarget (target)
9603 || GET_MODE (subtarget) != operand_mode)
9604 subtarget = 0;
9605
9606 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9607
9608 if (target == 0)
9609 target = gen_reg_rtx (mode);
9610
9611 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9612 because, if the emit_store_flag does anything it will succeed and
9613 OP0 and OP1 will not be used subsequently. */
9614
9615 result = emit_store_flag (target, code,
9616 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9617 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9618 operand_mode, unsignedp, 1);
9619
9620 if (result)
9621 {
9622 if (invert)
9623 result = expand_binop (mode, xor_optab, result, const1_rtx,
9624 result, 0, OPTAB_LIB_WIDEN);
9625 return result;
9626 }
9627
9628 /* If this failed, we have to do this with set/compare/jump/set code. */
9629 if (GET_CODE (target) != REG
9630 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9631 target = gen_reg_rtx (GET_MODE (target));
9632
9633 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9634 result = compare_from_rtx (op0, op1, code, unsignedp,
9635 operand_mode, NULL_RTX);
9636 if (GET_CODE (result) == CONST_INT)
9637 return (((result == const0_rtx && ! invert)
9638 || (result != const0_rtx && invert))
9639 ? const0_rtx : const1_rtx);
9640
9641 /* The code of RESULT may not match CODE if compare_from_rtx
9642 decided to swap its operands and reverse the original code.
9643
9644 We know that compare_from_rtx returns either a CONST_INT or
9645 a new comparison code, so it is safe to just extract the
9646 code from RESULT. */
9647 code = GET_CODE (result);
9648
9649 label = gen_label_rtx ();
9650 if (bcc_gen_fctn[(int) code] == 0)
9651 abort ();
9652
9653 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9654 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9655 emit_label (label);
9656
9657 return target;
9658 }
9659 \f
9660
9661 /* Stubs in case we haven't got a casesi insn. */
9662 #ifndef HAVE_casesi
9663 # define HAVE_casesi 0
9664 # define gen_casesi(a, b, c, d, e) (0)
9665 # define CODE_FOR_casesi CODE_FOR_nothing
9666 #endif
9667
9668 /* If the machine does not have a case insn that compares the bounds,
9669 this means extra overhead for dispatch tables, which raises the
9670 threshold for using them. */
9671 #ifndef CASE_VALUES_THRESHOLD
9672 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9673 #endif /* CASE_VALUES_THRESHOLD */
9674
9675 unsigned int
9676 case_values_threshold (void)
9677 {
9678 return CASE_VALUES_THRESHOLD;
9679 }
9680
9681 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9682 0 otherwise (i.e. if there is no casesi instruction). */
9683 int
9684 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9685 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9686 {
9687 enum machine_mode index_mode = SImode;
9688 int index_bits = GET_MODE_BITSIZE (index_mode);
9689 rtx op1, op2, index;
9690 enum machine_mode op_mode;
9691
9692 if (! HAVE_casesi)
9693 return 0;
9694
9695 /* Convert the index to SImode. */
9696 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9697 {
9698 enum machine_mode omode = TYPE_MODE (index_type);
9699 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9700
9701 /* We must handle the endpoints in the original mode. */
9702 index_expr = build (MINUS_EXPR, index_type,
9703 index_expr, minval);
9704 minval = integer_zero_node;
9705 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9706 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9707 omode, 1, default_label);
9708 /* Now we can safely truncate. */
9709 index = convert_to_mode (index_mode, index, 0);
9710 }
9711 else
9712 {
9713 if (TYPE_MODE (index_type) != index_mode)
9714 {
9715 index_expr = convert ((*lang_hooks.types.type_for_size)
9716 (index_bits, 0), index_expr);
9717 index_type = TREE_TYPE (index_expr);
9718 }
9719
9720 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9721 }
9722 emit_queue ();
9723 index = protect_from_queue (index, 0);
9724 do_pending_stack_adjust ();
9725
9726 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9727 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9728 (index, op_mode))
9729 index = copy_to_mode_reg (op_mode, index);
9730
9731 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9732
9733 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9734 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9735 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
9736 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9737 (op1, op_mode))
9738 op1 = copy_to_mode_reg (op_mode, op1);
9739
9740 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9741
9742 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9743 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9744 op2, TREE_UNSIGNED (TREE_TYPE (range)));
9745 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9746 (op2, op_mode))
9747 op2 = copy_to_mode_reg (op_mode, op2);
9748
9749 emit_jump_insn (gen_casesi (index, op1, op2,
9750 table_label, default_label));
9751 return 1;
9752 }
9753
9754 /* Attempt to generate a tablejump instruction; same concept. */
9755 #ifndef HAVE_tablejump
9756 #define HAVE_tablejump 0
9757 #define gen_tablejump(x, y) (0)
9758 #endif
9759
9760 /* Subroutine of the next function.
9761
9762 INDEX is the value being switched on, with the lowest value
9763 in the table already subtracted.
9764 MODE is its expected mode (needed if INDEX is constant).
9765 RANGE is the length of the jump table.
9766 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9767
9768 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9769 index value is out of range. */
9770
9771 static void
9772 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9773 rtx default_label)
9774 {
9775 rtx temp, vector;
9776
9777 if (INTVAL (range) > cfun->max_jumptable_ents)
9778 cfun->max_jumptable_ents = INTVAL (range);
9779
9780 /* Do an unsigned comparison (in the proper mode) between the index
9781 expression and the value which represents the length of the range.
9782 Since we just finished subtracting the lower bound of the range
9783 from the index expression, this comparison allows us to simultaneously
9784 check that the original index expression value is both greater than
9785 or equal to the minimum value of the range and less than or equal to
9786 the maximum value of the range. */
9787
9788 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9789 default_label);
9790
9791 /* If index is in range, it must fit in Pmode.
9792 Convert to Pmode so we can index with it. */
9793 if (mode != Pmode)
9794 index = convert_to_mode (Pmode, index, 1);
9795
9796 /* Don't let a MEM slip through, because then INDEX that comes
9797 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9798 and break_out_memory_refs will go to work on it and mess it up. */
9799 #ifdef PIC_CASE_VECTOR_ADDRESS
9800 if (flag_pic && GET_CODE (index) != REG)
9801 index = copy_to_mode_reg (Pmode, index);
9802 #endif
9803
9804 /* If flag_force_addr were to affect this address
9805 it could interfere with the tricky assumptions made
9806 about addresses that contain label-refs,
9807 which may be valid only very near the tablejump itself. */
9808 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9809 GET_MODE_SIZE, because this indicates how large insns are. The other
9810 uses should all be Pmode, because they are addresses. This code
9811 could fail if addresses and insns are not the same size. */
9812 index = gen_rtx_PLUS (Pmode,
9813 gen_rtx_MULT (Pmode, index,
9814 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9815 gen_rtx_LABEL_REF (Pmode, table_label));
9816 #ifdef PIC_CASE_VECTOR_ADDRESS
9817 if (flag_pic)
9818 index = PIC_CASE_VECTOR_ADDRESS (index);
9819 else
9820 #endif
9821 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9822 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9823 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9824 RTX_UNCHANGING_P (vector) = 1;
9825 MEM_NOTRAP_P (vector) = 1;
9826 convert_move (temp, vector, 0);
9827
9828 emit_jump_insn (gen_tablejump (temp, table_label));
9829
9830 /* If we are generating PIC code or if the table is PC-relative, the
9831 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9832 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9833 emit_barrier ();
9834 }
9835
9836 int
9837 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9838 rtx table_label, rtx default_label)
9839 {
9840 rtx index;
9841
9842 if (! HAVE_tablejump)
9843 return 0;
9844
9845 index_expr = fold (build (MINUS_EXPR, index_type,
9846 convert (index_type, index_expr),
9847 convert (index_type, minval)));
9848 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9849 emit_queue ();
9850 index = protect_from_queue (index, 0);
9851 do_pending_stack_adjust ();
9852
9853 do_tablejump (index, TYPE_MODE (index_type),
9854 convert_modes (TYPE_MODE (index_type),
9855 TYPE_MODE (TREE_TYPE (range)),
9856 expand_expr (range, NULL_RTX,
9857 VOIDmode, 0),
9858 TREE_UNSIGNED (TREE_TYPE (range))),
9859 table_label, default_label);
9860 return 1;
9861 }
9862
9863 /* Nonzero if the mode is a valid vector mode for this architecture.
9864 This returns nonzero even if there is no hardware support for the
9865 vector mode, but we can emulate with narrower modes. */
9866
9867 int
9868 vector_mode_valid_p (enum machine_mode mode)
9869 {
9870 enum mode_class class = GET_MODE_CLASS (mode);
9871 enum machine_mode innermode;
9872
9873 /* Doh! What's going on? */
9874 if (class != MODE_VECTOR_INT
9875 && class != MODE_VECTOR_FLOAT)
9876 return 0;
9877
9878 /* Hardware support. Woo hoo! */
9879 if (VECTOR_MODE_SUPPORTED_P (mode))
9880 return 1;
9881
9882 innermode = GET_MODE_INNER (mode);
9883
9884 /* We should probably return 1 if requesting V4DI and we have no DI,
9885 but we have V2DI, but this is probably very unlikely. */
9886
9887 /* If we have support for the inner mode, we can safely emulate it.
9888 We may not have V2DI, but me can emulate with a pair of DIs. */
9889 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9890 }
9891
9892 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9893 static rtx
9894 const_vector_from_tree (tree exp)
9895 {
9896 rtvec v;
9897 int units, i;
9898 tree link, elt;
9899 enum machine_mode inner, mode;
9900
9901 mode = TYPE_MODE (TREE_TYPE (exp));
9902
9903 if (is_zeros_p (exp))
9904 return CONST0_RTX (mode);
9905
9906 units = GET_MODE_NUNITS (mode);
9907 inner = GET_MODE_INNER (mode);
9908
9909 v = rtvec_alloc (units);
9910
9911 link = TREE_VECTOR_CST_ELTS (exp);
9912 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9913 {
9914 elt = TREE_VALUE (link);
9915
9916 if (TREE_CODE (elt) == REAL_CST)
9917 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9918 inner);
9919 else
9920 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9921 TREE_INT_CST_HIGH (elt),
9922 inner);
9923 }
9924
9925 /* Initialize remaining elements to 0. */
9926 for (; i < units; ++i)
9927 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9928
9929 return gen_rtx_raw_CONST_VECTOR (mode, v);
9930 }
9931
9932 #include "gt-expr.h"