tm.texi (BLOCK_REG_PADDING): Describe.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
53
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
56
57 #ifdef PUSH_ROUNDING
58
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
62 #endif
63 #endif
64
65 #endif
66
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
70 #else
71 #define STACK_PUSH_CODE PRE_INC
72 #endif
73 #endif
74
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
78 #endif
79
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
84 #else
85 #define TARGET_MEM_FUNCTIONS 0
86 #endif
87
88
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
95 int cse_not_expected;
96
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 tree placeholder_list = 0;
99
100 /* This structure is used by move_by_pieces to describe the move to
101 be performed. */
102 struct move_by_pieces
103 {
104 rtx to;
105 rtx to_addr;
106 int autinc_to;
107 int explicit_inc_to;
108 rtx from;
109 rtx from_addr;
110 int autinc_from;
111 int explicit_inc_from;
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
114 int reverse;
115 };
116
117 /* This structure is used by store_by_pieces to describe the clear to
118 be performed. */
119
120 struct store_by_pieces
121 {
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
128 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
129 void *constfundata;
130 int reverse;
131 };
132
133 static rtx enqueue_insn (rtx, rtx);
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
135 unsigned int);
136 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
137 struct move_by_pieces *);
138 static bool block_move_libcall_safe_for_call_parm (void);
139 static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
140 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
141 static tree emit_block_move_libcall_fn (int);
142 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
143 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
144 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
145 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
146 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
147 struct store_by_pieces *);
148 static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
149 static rtx clear_storage_via_libcall (rtx, rtx);
150 static tree clear_storage_libcall_fn (int);
151 static rtx compress_float_constant (rtx, rtx);
152 static rtx get_subtarget (rtx);
153 static int is_zeros_p (tree);
154 static int mostly_zeros_p (tree);
155 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
156 HOST_WIDE_INT, enum machine_mode,
157 tree, tree, int, int);
158 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
159 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
160 tree, enum machine_mode, int, tree, int);
161 static rtx var_rtx (tree);
162
163 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
164 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type (tree, tree);
165
166 static int is_aligning_offset (tree, tree);
167 static rtx expand_increment (tree, int, int);
168 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
169 #ifdef PUSH_ROUNDING
170 static void emit_single_push_insn (enum machine_mode, rtx, tree);
171 #endif
172 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
173 static rtx const_vector_from_tree (tree);
174
175 /* Record for each mode whether we can move a register directly to or
176 from an object of that mode in memory. If we can't, we won't try
177 to use that mode directly when accessing a field of that mode. */
178
179 static char direct_load[NUM_MACHINE_MODES];
180 static char direct_store[NUM_MACHINE_MODES];
181
182 /* Record for each mode whether we can float-extend from memory. */
183
184 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
185
186 /* If a memory-to-memory move would take MOVE_RATIO or more simple
187 move-instruction sequences, we will do a movstr or libcall instead. */
188
189 #ifndef MOVE_RATIO
190 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
191 #define MOVE_RATIO 2
192 #else
193 /* If we are optimizing for space (-Os), cut down the default move ratio. */
194 #define MOVE_RATIO (optimize_size ? 3 : 15)
195 #endif
196 #endif
197
198 /* This macro is used to determine whether move_by_pieces should be called
199 to perform a structure copy. */
200 #ifndef MOVE_BY_PIECES_P
201 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
202 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
203 #endif
204
205 /* If a clear memory operation would take CLEAR_RATIO or more simple
206 move-instruction sequences, we will do a clrstr or libcall instead. */
207
208 #ifndef CLEAR_RATIO
209 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
210 #define CLEAR_RATIO 2
211 #else
212 /* If we are optimizing for space, cut down the default clear ratio. */
213 #define CLEAR_RATIO (optimize_size ? 3 : 15)
214 #endif
215 #endif
216
217 /* This macro is used to determine whether clear_by_pieces should be
218 called to clear storage. */
219 #ifndef CLEAR_BY_PIECES_P
220 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
221 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
222 #endif
223
224 /* This macro is used to determine whether store_by_pieces should be
225 called to "memset" storage with byte values other than zero, or
226 to "memcpy" storage when the source is a constant string. */
227 #ifndef STORE_BY_PIECES_P
228 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
229 #endif
230
231 /* This array records the insn_code of insns to perform block moves. */
232 enum insn_code movstr_optab[NUM_MACHINE_MODES];
233
234 /* This array records the insn_code of insns to perform block clears. */
235 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
236
237 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
238
239 #ifndef SLOW_UNALIGNED_ACCESS
240 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
241 #endif
242 \f
243 /* This is run once per compilation to set up which modes can be used
244 directly in memory and to initialize the block move optab. */
245
246 void
247 init_expr_once (void)
248 {
249 rtx insn, pat;
250 enum machine_mode mode;
251 int num_clobbers;
252 rtx mem, mem1;
253 rtx reg;
254
255 /* Try indexing by frame ptr and try by stack ptr.
256 It is known that on the Convex the stack ptr isn't a valid index.
257 With luck, one or the other is valid on any machine. */
258 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
259 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
260
261 /* A scratch register we can modify in-place below to avoid
262 useless RTL allocations. */
263 reg = gen_rtx_REG (VOIDmode, -1);
264
265 insn = rtx_alloc (INSN);
266 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
267 PATTERN (insn) = pat;
268
269 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
270 mode = (enum machine_mode) ((int) mode + 1))
271 {
272 int regno;
273
274 direct_load[(int) mode] = direct_store[(int) mode] = 0;
275 PUT_MODE (mem, mode);
276 PUT_MODE (mem1, mode);
277 PUT_MODE (reg, mode);
278
279 /* See if there is some register that can be used in this mode and
280 directly loaded or stored from memory. */
281
282 if (mode != VOIDmode && mode != BLKmode)
283 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
284 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
285 regno++)
286 {
287 if (! HARD_REGNO_MODE_OK (regno, mode))
288 continue;
289
290 REGNO (reg) = regno;
291
292 SET_SRC (pat) = mem;
293 SET_DEST (pat) = reg;
294 if (recog (pat, insn, &num_clobbers) >= 0)
295 direct_load[(int) mode] = 1;
296
297 SET_SRC (pat) = mem1;
298 SET_DEST (pat) = reg;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_load[(int) mode] = 1;
301
302 SET_SRC (pat) = reg;
303 SET_DEST (pat) = mem;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_store[(int) mode] = 1;
306
307 SET_SRC (pat) = reg;
308 SET_DEST (pat) = mem1;
309 if (recog (pat, insn, &num_clobbers) >= 0)
310 direct_store[(int) mode] = 1;
311 }
312 }
313
314 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
315
316 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
317 mode = GET_MODE_WIDER_MODE (mode))
318 {
319 enum machine_mode srcmode;
320 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
321 srcmode = GET_MODE_WIDER_MODE (srcmode))
322 {
323 enum insn_code ic;
324
325 ic = can_extend_p (mode, srcmode, 0);
326 if (ic == CODE_FOR_nothing)
327 continue;
328
329 PUT_MODE (mem, srcmode);
330
331 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
332 float_extend_from_mem[mode][srcmode] = true;
333 }
334 }
335 }
336
337 /* This is run at the start of compiling a function. */
338
339 void
340 init_expr (void)
341 {
342 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
343
344 pending_chain = 0;
345 pending_stack_adjust = 0;
346 stack_pointer_delta = 0;
347 inhibit_defer_pop = 0;
348 saveregs_value = 0;
349 apply_args_value = 0;
350 forced_labels = 0;
351 }
352
353 /* Small sanity check that the queue is empty at the end of a function. */
354
355 void
356 finish_expr_for_function (void)
357 {
358 if (pending_chain)
359 abort ();
360 }
361 \f
362 /* Manage the queue of increment instructions to be output
363 for POSTINCREMENT_EXPR expressions, etc. */
364
365 /* Queue up to increment (or change) VAR later. BODY says how:
366 BODY should be the same thing you would pass to emit_insn
367 to increment right away. It will go to emit_insn later on.
368
369 The value is a QUEUED expression to be used in place of VAR
370 where you want to guarantee the pre-incrementation value of VAR. */
371
372 static rtx
373 enqueue_insn (rtx var, rtx body)
374 {
375 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
376 body, pending_chain);
377 return pending_chain;
378 }
379
380 /* Use protect_from_queue to convert a QUEUED expression
381 into something that you can put immediately into an instruction.
382 If the queued incrementation has not happened yet,
383 protect_from_queue returns the variable itself.
384 If the incrementation has happened, protect_from_queue returns a temp
385 that contains a copy of the old value of the variable.
386
387 Any time an rtx which might possibly be a QUEUED is to be put
388 into an instruction, it must be passed through protect_from_queue first.
389 QUEUED expressions are not meaningful in instructions.
390
391 Do not pass a value through protect_from_queue and then hold
392 on to it for a while before putting it in an instruction!
393 If the queue is flushed in between, incorrect code will result. */
394
395 rtx
396 protect_from_queue (rtx x, int modify)
397 {
398 RTX_CODE code = GET_CODE (x);
399
400 #if 0 /* A QUEUED can hang around after the queue is forced out. */
401 /* Shortcut for most common case. */
402 if (pending_chain == 0)
403 return x;
404 #endif
405
406 if (code != QUEUED)
407 {
408 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
409 use of autoincrement. Make a copy of the contents of the memory
410 location rather than a copy of the address, but not if the value is
411 of mode BLKmode. Don't modify X in place since it might be
412 shared. */
413 if (code == MEM && GET_MODE (x) != BLKmode
414 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
415 {
416 rtx y = XEXP (x, 0);
417 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
418
419 if (QUEUED_INSN (y))
420 {
421 rtx temp = gen_reg_rtx (GET_MODE (x));
422
423 emit_insn_before (gen_move_insn (temp, new),
424 QUEUED_INSN (y));
425 return temp;
426 }
427
428 /* Copy the address into a pseudo, so that the returned value
429 remains correct across calls to emit_queue. */
430 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
431 }
432
433 /* Otherwise, recursively protect the subexpressions of all
434 the kinds of rtx's that can contain a QUEUED. */
435 if (code == MEM)
436 {
437 rtx tem = protect_from_queue (XEXP (x, 0), 0);
438 if (tem != XEXP (x, 0))
439 {
440 x = copy_rtx (x);
441 XEXP (x, 0) = tem;
442 }
443 }
444 else if (code == PLUS || code == MULT)
445 {
446 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
447 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
448 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
449 {
450 x = copy_rtx (x);
451 XEXP (x, 0) = new0;
452 XEXP (x, 1) = new1;
453 }
454 }
455 return x;
456 }
457 /* If the increment has not happened, use the variable itself. Copy it
458 into a new pseudo so that the value remains correct across calls to
459 emit_queue. */
460 if (QUEUED_INSN (x) == 0)
461 return copy_to_reg (QUEUED_VAR (x));
462 /* If the increment has happened and a pre-increment copy exists,
463 use that copy. */
464 if (QUEUED_COPY (x) != 0)
465 return QUEUED_COPY (x);
466 /* The increment has happened but we haven't set up a pre-increment copy.
467 Set one up now, and use it. */
468 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
469 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
470 QUEUED_INSN (x));
471 return QUEUED_COPY (x);
472 }
473
474 /* Return nonzero if X contains a QUEUED expression:
475 if it contains anything that will be altered by a queued increment.
476 We handle only combinations of MEM, PLUS, MINUS and MULT operators
477 since memory addresses generally contain only those. */
478
479 int
480 queued_subexp_p (rtx x)
481 {
482 enum rtx_code code = GET_CODE (x);
483 switch (code)
484 {
485 case QUEUED:
486 return 1;
487 case MEM:
488 return queued_subexp_p (XEXP (x, 0));
489 case MULT:
490 case PLUS:
491 case MINUS:
492 return (queued_subexp_p (XEXP (x, 0))
493 || queued_subexp_p (XEXP (x, 1)));
494 default:
495 return 0;
496 }
497 }
498
499 /* Perform all the pending incrementations. */
500
501 void
502 emit_queue (void)
503 {
504 rtx p;
505 while ((p = pending_chain))
506 {
507 rtx body = QUEUED_BODY (p);
508
509 switch (GET_CODE (body))
510 {
511 case INSN:
512 case JUMP_INSN:
513 case CALL_INSN:
514 case CODE_LABEL:
515 case BARRIER:
516 case NOTE:
517 QUEUED_INSN (p) = body;
518 emit_insn (body);
519 break;
520
521 #ifdef ENABLE_CHECKING
522 case SEQUENCE:
523 abort ();
524 break;
525 #endif
526
527 default:
528 QUEUED_INSN (p) = emit_insn (body);
529 break;
530 }
531
532 pending_chain = QUEUED_NEXT (p);
533 }
534 }
535 \f
536 /* Copy data from FROM to TO, where the machine modes are not the same.
537 Both modes may be integer, or both may be floating.
538 UNSIGNEDP should be nonzero if FROM is an unsigned type.
539 This causes zero-extension instead of sign-extension. */
540
541 void
542 convert_move (rtx to, rtx from, int unsignedp)
543 {
544 enum machine_mode to_mode = GET_MODE (to);
545 enum machine_mode from_mode = GET_MODE (from);
546 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
547 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
548 enum insn_code code;
549 rtx libcall;
550
551 /* rtx code for making an equivalent value. */
552 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
553 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
554
555 to = protect_from_queue (to, 1);
556 from = protect_from_queue (from, 0);
557
558 if (to_real != from_real)
559 abort ();
560
561 /* If FROM is a SUBREG that indicates that we have already done at least
562 the required extension, strip it. We don't handle such SUBREGs as
563 TO here. */
564
565 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
566 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
567 >= GET_MODE_SIZE (to_mode))
568 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
569 from = gen_lowpart (to_mode, from), from_mode = to_mode;
570
571 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
572 abort ();
573
574 if (to_mode == from_mode
575 || (from_mode == VOIDmode && CONSTANT_P (from)))
576 {
577 emit_move_insn (to, from);
578 return;
579 }
580
581 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
582 {
583 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
584 abort ();
585
586 if (VECTOR_MODE_P (to_mode))
587 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
588 else
589 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
590
591 emit_move_insn (to, from);
592 return;
593 }
594
595 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
596 {
597 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
598 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
599 return;
600 }
601
602 if (to_real != from_real)
603 abort ();
604
605 if (to_real)
606 {
607 rtx value, insns;
608
609 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
610 {
611 /* Try converting directly if the insn is supported. */
612 if ((code = can_extend_p (to_mode, from_mode, 0))
613 != CODE_FOR_nothing)
614 {
615 emit_unop_insn (code, to, from, UNKNOWN);
616 return;
617 }
618 }
619
620 #ifdef HAVE_trunchfqf2
621 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
622 {
623 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
624 return;
625 }
626 #endif
627 #ifdef HAVE_trunctqfqf2
628 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
629 {
630 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
631 return;
632 }
633 #endif
634 #ifdef HAVE_truncsfqf2
635 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
636 {
637 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
638 return;
639 }
640 #endif
641 #ifdef HAVE_truncdfqf2
642 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
643 {
644 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
645 return;
646 }
647 #endif
648 #ifdef HAVE_truncxfqf2
649 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
650 {
651 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
652 return;
653 }
654 #endif
655 #ifdef HAVE_trunctfqf2
656 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
657 {
658 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
659 return;
660 }
661 #endif
662
663 #ifdef HAVE_trunctqfhf2
664 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
665 {
666 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
667 return;
668 }
669 #endif
670 #ifdef HAVE_truncsfhf2
671 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
672 {
673 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
674 return;
675 }
676 #endif
677 #ifdef HAVE_truncdfhf2
678 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
679 {
680 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
681 return;
682 }
683 #endif
684 #ifdef HAVE_truncxfhf2
685 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
686 {
687 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
688 return;
689 }
690 #endif
691 #ifdef HAVE_trunctfhf2
692 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
693 {
694 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
695 return;
696 }
697 #endif
698
699 #ifdef HAVE_truncsftqf2
700 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
701 {
702 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
703 return;
704 }
705 #endif
706 #ifdef HAVE_truncdftqf2
707 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
708 {
709 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
710 return;
711 }
712 #endif
713 #ifdef HAVE_truncxftqf2
714 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
715 {
716 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
717 return;
718 }
719 #endif
720 #ifdef HAVE_trunctftqf2
721 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
722 {
723 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
724 return;
725 }
726 #endif
727
728 #ifdef HAVE_truncdfsf2
729 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
730 {
731 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
732 return;
733 }
734 #endif
735 #ifdef HAVE_truncxfsf2
736 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
737 {
738 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
739 return;
740 }
741 #endif
742 #ifdef HAVE_trunctfsf2
743 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
744 {
745 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
746 return;
747 }
748 #endif
749 #ifdef HAVE_truncxfdf2
750 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
751 {
752 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
753 return;
754 }
755 #endif
756 #ifdef HAVE_trunctfdf2
757 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
758 {
759 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
760 return;
761 }
762 #endif
763
764 libcall = (rtx) 0;
765 switch (from_mode)
766 {
767 case SFmode:
768 switch (to_mode)
769 {
770 case DFmode:
771 libcall = extendsfdf2_libfunc;
772 break;
773
774 case XFmode:
775 libcall = extendsfxf2_libfunc;
776 break;
777
778 case TFmode:
779 libcall = extendsftf2_libfunc;
780 break;
781
782 default:
783 break;
784 }
785 break;
786
787 case DFmode:
788 switch (to_mode)
789 {
790 case SFmode:
791 libcall = truncdfsf2_libfunc;
792 break;
793
794 case XFmode:
795 libcall = extenddfxf2_libfunc;
796 break;
797
798 case TFmode:
799 libcall = extenddftf2_libfunc;
800 break;
801
802 default:
803 break;
804 }
805 break;
806
807 case XFmode:
808 switch (to_mode)
809 {
810 case SFmode:
811 libcall = truncxfsf2_libfunc;
812 break;
813
814 case DFmode:
815 libcall = truncxfdf2_libfunc;
816 break;
817
818 default:
819 break;
820 }
821 break;
822
823 case TFmode:
824 switch (to_mode)
825 {
826 case SFmode:
827 libcall = trunctfsf2_libfunc;
828 break;
829
830 case DFmode:
831 libcall = trunctfdf2_libfunc;
832 break;
833
834 default:
835 break;
836 }
837 break;
838
839 default:
840 break;
841 }
842
843 if (libcall == (rtx) 0)
844 /* This conversion is not implemented yet. */
845 abort ();
846
847 start_sequence ();
848 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
849 1, from, from_mode);
850 insns = get_insns ();
851 end_sequence ();
852 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
853 from));
854 return;
855 }
856
857 /* Now both modes are integers. */
858
859 /* Handle expanding beyond a word. */
860 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
861 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
862 {
863 rtx insns;
864 rtx lowpart;
865 rtx fill_value;
866 rtx lowfrom;
867 int i;
868 enum machine_mode lowpart_mode;
869 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
870
871 /* Try converting directly if the insn is supported. */
872 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
873 != CODE_FOR_nothing)
874 {
875 /* If FROM is a SUBREG, put it into a register. Do this
876 so that we always generate the same set of insns for
877 better cse'ing; if an intermediate assignment occurred,
878 we won't be doing the operation directly on the SUBREG. */
879 if (optimize > 0 && GET_CODE (from) == SUBREG)
880 from = force_reg (from_mode, from);
881 emit_unop_insn (code, to, from, equiv_code);
882 return;
883 }
884 /* Next, try converting via full word. */
885 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
886 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
887 != CODE_FOR_nothing))
888 {
889 if (GET_CODE (to) == REG)
890 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
891 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
892 emit_unop_insn (code, to,
893 gen_lowpart (word_mode, to), equiv_code);
894 return;
895 }
896
897 /* No special multiword conversion insn; do it by hand. */
898 start_sequence ();
899
900 /* Since we will turn this into a no conflict block, we must ensure
901 that the source does not overlap the target. */
902
903 if (reg_overlap_mentioned_p (to, from))
904 from = force_reg (from_mode, from);
905
906 /* Get a copy of FROM widened to a word, if necessary. */
907 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
908 lowpart_mode = word_mode;
909 else
910 lowpart_mode = from_mode;
911
912 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
913
914 lowpart = gen_lowpart (lowpart_mode, to);
915 emit_move_insn (lowpart, lowfrom);
916
917 /* Compute the value to put in each remaining word. */
918 if (unsignedp)
919 fill_value = const0_rtx;
920 else
921 {
922 #ifdef HAVE_slt
923 if (HAVE_slt
924 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
925 && STORE_FLAG_VALUE == -1)
926 {
927 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
928 lowpart_mode, 0);
929 fill_value = gen_reg_rtx (word_mode);
930 emit_insn (gen_slt (fill_value));
931 }
932 else
933 #endif
934 {
935 fill_value
936 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
937 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
938 NULL_RTX, 0);
939 fill_value = convert_to_mode (word_mode, fill_value, 1);
940 }
941 }
942
943 /* Fill the remaining words. */
944 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
945 {
946 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
947 rtx subword = operand_subword (to, index, 1, to_mode);
948
949 if (subword == 0)
950 abort ();
951
952 if (fill_value != subword)
953 emit_move_insn (subword, fill_value);
954 }
955
956 insns = get_insns ();
957 end_sequence ();
958
959 emit_no_conflict_block (insns, to, from, NULL_RTX,
960 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
961 return;
962 }
963
964 /* Truncating multi-word to a word or less. */
965 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
966 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
967 {
968 if (!((GET_CODE (from) == MEM
969 && ! MEM_VOLATILE_P (from)
970 && direct_load[(int) to_mode]
971 && ! mode_dependent_address_p (XEXP (from, 0)))
972 || GET_CODE (from) == REG
973 || GET_CODE (from) == SUBREG))
974 from = force_reg (from_mode, from);
975 convert_move (to, gen_lowpart (word_mode, from), 0);
976 return;
977 }
978
979 /* Handle pointer conversion. */ /* SPEE 900220. */
980 if (to_mode == PQImode)
981 {
982 if (from_mode != QImode)
983 from = convert_to_mode (QImode, from, unsignedp);
984
985 #ifdef HAVE_truncqipqi2
986 if (HAVE_truncqipqi2)
987 {
988 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
989 return;
990 }
991 #endif /* HAVE_truncqipqi2 */
992 abort ();
993 }
994
995 if (from_mode == PQImode)
996 {
997 if (to_mode != QImode)
998 {
999 from = convert_to_mode (QImode, from, unsignedp);
1000 from_mode = QImode;
1001 }
1002 else
1003 {
1004 #ifdef HAVE_extendpqiqi2
1005 if (HAVE_extendpqiqi2)
1006 {
1007 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1008 return;
1009 }
1010 #endif /* HAVE_extendpqiqi2 */
1011 abort ();
1012 }
1013 }
1014
1015 if (to_mode == PSImode)
1016 {
1017 if (from_mode != SImode)
1018 from = convert_to_mode (SImode, from, unsignedp);
1019
1020 #ifdef HAVE_truncsipsi2
1021 if (HAVE_truncsipsi2)
1022 {
1023 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1024 return;
1025 }
1026 #endif /* HAVE_truncsipsi2 */
1027 abort ();
1028 }
1029
1030 if (from_mode == PSImode)
1031 {
1032 if (to_mode != SImode)
1033 {
1034 from = convert_to_mode (SImode, from, unsignedp);
1035 from_mode = SImode;
1036 }
1037 else
1038 {
1039 #ifdef HAVE_extendpsisi2
1040 if (! unsignedp && HAVE_extendpsisi2)
1041 {
1042 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1043 return;
1044 }
1045 #endif /* HAVE_extendpsisi2 */
1046 #ifdef HAVE_zero_extendpsisi2
1047 if (unsignedp && HAVE_zero_extendpsisi2)
1048 {
1049 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1050 return;
1051 }
1052 #endif /* HAVE_zero_extendpsisi2 */
1053 abort ();
1054 }
1055 }
1056
1057 if (to_mode == PDImode)
1058 {
1059 if (from_mode != DImode)
1060 from = convert_to_mode (DImode, from, unsignedp);
1061
1062 #ifdef HAVE_truncdipdi2
1063 if (HAVE_truncdipdi2)
1064 {
1065 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1066 return;
1067 }
1068 #endif /* HAVE_truncdipdi2 */
1069 abort ();
1070 }
1071
1072 if (from_mode == PDImode)
1073 {
1074 if (to_mode != DImode)
1075 {
1076 from = convert_to_mode (DImode, from, unsignedp);
1077 from_mode = DImode;
1078 }
1079 else
1080 {
1081 #ifdef HAVE_extendpdidi2
1082 if (HAVE_extendpdidi2)
1083 {
1084 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1085 return;
1086 }
1087 #endif /* HAVE_extendpdidi2 */
1088 abort ();
1089 }
1090 }
1091
1092 /* Now follow all the conversions between integers
1093 no more than a word long. */
1094
1095 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1096 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1097 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1098 GET_MODE_BITSIZE (from_mode)))
1099 {
1100 if (!((GET_CODE (from) == MEM
1101 && ! MEM_VOLATILE_P (from)
1102 && direct_load[(int) to_mode]
1103 && ! mode_dependent_address_p (XEXP (from, 0)))
1104 || GET_CODE (from) == REG
1105 || GET_CODE (from) == SUBREG))
1106 from = force_reg (from_mode, from);
1107 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1108 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1109 from = copy_to_reg (from);
1110 emit_move_insn (to, gen_lowpart (to_mode, from));
1111 return;
1112 }
1113
1114 /* Handle extension. */
1115 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1116 {
1117 /* Convert directly if that works. */
1118 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1119 != CODE_FOR_nothing)
1120 {
1121 if (flag_force_mem)
1122 from = force_not_mem (from);
1123
1124 emit_unop_insn (code, to, from, equiv_code);
1125 return;
1126 }
1127 else
1128 {
1129 enum machine_mode intermediate;
1130 rtx tmp;
1131 tree shift_amount;
1132
1133 /* Search for a mode to convert via. */
1134 for (intermediate = from_mode; intermediate != VOIDmode;
1135 intermediate = GET_MODE_WIDER_MODE (intermediate))
1136 if (((can_extend_p (to_mode, intermediate, unsignedp)
1137 != CODE_FOR_nothing)
1138 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1139 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1140 GET_MODE_BITSIZE (intermediate))))
1141 && (can_extend_p (intermediate, from_mode, unsignedp)
1142 != CODE_FOR_nothing))
1143 {
1144 convert_move (to, convert_to_mode (intermediate, from,
1145 unsignedp), unsignedp);
1146 return;
1147 }
1148
1149 /* No suitable intermediate mode.
1150 Generate what we need with shifts. */
1151 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1152 - GET_MODE_BITSIZE (from_mode), 0);
1153 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1154 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1155 to, unsignedp);
1156 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1157 to, unsignedp);
1158 if (tmp != to)
1159 emit_move_insn (to, tmp);
1160 return;
1161 }
1162 }
1163
1164 /* Support special truncate insns for certain modes. */
1165
1166 if (from_mode == DImode && to_mode == SImode)
1167 {
1168 #ifdef HAVE_truncdisi2
1169 if (HAVE_truncdisi2)
1170 {
1171 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1172 return;
1173 }
1174 #endif
1175 convert_move (to, force_reg (from_mode, from), unsignedp);
1176 return;
1177 }
1178
1179 if (from_mode == DImode && to_mode == HImode)
1180 {
1181 #ifdef HAVE_truncdihi2
1182 if (HAVE_truncdihi2)
1183 {
1184 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1185 return;
1186 }
1187 #endif
1188 convert_move (to, force_reg (from_mode, from), unsignedp);
1189 return;
1190 }
1191
1192 if (from_mode == DImode && to_mode == QImode)
1193 {
1194 #ifdef HAVE_truncdiqi2
1195 if (HAVE_truncdiqi2)
1196 {
1197 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1198 return;
1199 }
1200 #endif
1201 convert_move (to, force_reg (from_mode, from), unsignedp);
1202 return;
1203 }
1204
1205 if (from_mode == SImode && to_mode == HImode)
1206 {
1207 #ifdef HAVE_truncsihi2
1208 if (HAVE_truncsihi2)
1209 {
1210 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1211 return;
1212 }
1213 #endif
1214 convert_move (to, force_reg (from_mode, from), unsignedp);
1215 return;
1216 }
1217
1218 if (from_mode == SImode && to_mode == QImode)
1219 {
1220 #ifdef HAVE_truncsiqi2
1221 if (HAVE_truncsiqi2)
1222 {
1223 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1224 return;
1225 }
1226 #endif
1227 convert_move (to, force_reg (from_mode, from), unsignedp);
1228 return;
1229 }
1230
1231 if (from_mode == HImode && to_mode == QImode)
1232 {
1233 #ifdef HAVE_trunchiqi2
1234 if (HAVE_trunchiqi2)
1235 {
1236 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1237 return;
1238 }
1239 #endif
1240 convert_move (to, force_reg (from_mode, from), unsignedp);
1241 return;
1242 }
1243
1244 if (from_mode == TImode && to_mode == DImode)
1245 {
1246 #ifdef HAVE_trunctidi2
1247 if (HAVE_trunctidi2)
1248 {
1249 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1250 return;
1251 }
1252 #endif
1253 convert_move (to, force_reg (from_mode, from), unsignedp);
1254 return;
1255 }
1256
1257 if (from_mode == TImode && to_mode == SImode)
1258 {
1259 #ifdef HAVE_trunctisi2
1260 if (HAVE_trunctisi2)
1261 {
1262 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1263 return;
1264 }
1265 #endif
1266 convert_move (to, force_reg (from_mode, from), unsignedp);
1267 return;
1268 }
1269
1270 if (from_mode == TImode && to_mode == HImode)
1271 {
1272 #ifdef HAVE_trunctihi2
1273 if (HAVE_trunctihi2)
1274 {
1275 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1276 return;
1277 }
1278 #endif
1279 convert_move (to, force_reg (from_mode, from), unsignedp);
1280 return;
1281 }
1282
1283 if (from_mode == TImode && to_mode == QImode)
1284 {
1285 #ifdef HAVE_trunctiqi2
1286 if (HAVE_trunctiqi2)
1287 {
1288 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1289 return;
1290 }
1291 #endif
1292 convert_move (to, force_reg (from_mode, from), unsignedp);
1293 return;
1294 }
1295
1296 /* Handle truncation of volatile memrefs, and so on;
1297 the things that couldn't be truncated directly,
1298 and for which there was no special instruction. */
1299 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1300 {
1301 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1302 emit_move_insn (to, temp);
1303 return;
1304 }
1305
1306 /* Mode combination is not recognized. */
1307 abort ();
1308 }
1309
1310 /* Return an rtx for a value that would result
1311 from converting X to mode MODE.
1312 Both X and MODE may be floating, or both integer.
1313 UNSIGNEDP is nonzero if X is an unsigned value.
1314 This can be done by referring to a part of X in place
1315 or by copying to a new temporary with conversion.
1316
1317 This function *must not* call protect_from_queue
1318 except when putting X into an insn (in which case convert_move does it). */
1319
1320 rtx
1321 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
1322 {
1323 return convert_modes (mode, VOIDmode, x, unsignedp);
1324 }
1325
1326 /* Return an rtx for a value that would result
1327 from converting X from mode OLDMODE to mode MODE.
1328 Both modes may be floating, or both integer.
1329 UNSIGNEDP is nonzero if X is an unsigned value.
1330
1331 This can be done by referring to a part of X in place
1332 or by copying to a new temporary with conversion.
1333
1334 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1335
1336 This function *must not* call protect_from_queue
1337 except when putting X into an insn (in which case convert_move does it). */
1338
1339 rtx
1340 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
1341 {
1342 rtx temp;
1343
1344 /* If FROM is a SUBREG that indicates that we have already done at least
1345 the required extension, strip it. */
1346
1347 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1348 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1349 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1350 x = gen_lowpart (mode, x);
1351
1352 if (GET_MODE (x) != VOIDmode)
1353 oldmode = GET_MODE (x);
1354
1355 if (mode == oldmode)
1356 return x;
1357
1358 /* There is one case that we must handle specially: If we are converting
1359 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1360 we are to interpret the constant as unsigned, gen_lowpart will do
1361 the wrong if the constant appears negative. What we want to do is
1362 make the high-order word of the constant zero, not all ones. */
1363
1364 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1365 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1366 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1367 {
1368 HOST_WIDE_INT val = INTVAL (x);
1369
1370 if (oldmode != VOIDmode
1371 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1372 {
1373 int width = GET_MODE_BITSIZE (oldmode);
1374
1375 /* We need to zero extend VAL. */
1376 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1377 }
1378
1379 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1380 }
1381
1382 /* We can do this with a gen_lowpart if both desired and current modes
1383 are integer, and this is either a constant integer, a register, or a
1384 non-volatile MEM. Except for the constant case where MODE is no
1385 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1386
1387 if ((GET_CODE (x) == CONST_INT
1388 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1389 || (GET_MODE_CLASS (mode) == MODE_INT
1390 && GET_MODE_CLASS (oldmode) == MODE_INT
1391 && (GET_CODE (x) == CONST_DOUBLE
1392 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1393 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1394 && direct_load[(int) mode])
1395 || (GET_CODE (x) == REG
1396 && (! HARD_REGISTER_P (x)
1397 || HARD_REGNO_MODE_OK (REGNO (x), mode))
1398 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1399 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1400 {
1401 /* ?? If we don't know OLDMODE, we have to assume here that
1402 X does not need sign- or zero-extension. This may not be
1403 the case, but it's the best we can do. */
1404 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1405 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1406 {
1407 HOST_WIDE_INT val = INTVAL (x);
1408 int width = GET_MODE_BITSIZE (oldmode);
1409
1410 /* We must sign or zero-extend in this case. Start by
1411 zero-extending, then sign extend if we need to. */
1412 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1413 if (! unsignedp
1414 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1415 val |= (HOST_WIDE_INT) (-1) << width;
1416
1417 return gen_int_mode (val, mode);
1418 }
1419
1420 return gen_lowpart (mode, x);
1421 }
1422
1423 temp = gen_reg_rtx (mode);
1424 convert_move (temp, x, unsignedp);
1425 return temp;
1426 }
1427 \f
1428 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1429 store efficiently. Due to internal GCC limitations, this is
1430 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1431 for an immediate constant. */
1432
1433 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1434
1435 /* Determine whether the LEN bytes can be moved by using several move
1436 instructions. Return nonzero if a call to move_by_pieces should
1437 succeed. */
1438
1439 int
1440 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1441 unsigned int align ATTRIBUTE_UNUSED)
1442 {
1443 return MOVE_BY_PIECES_P (len, align);
1444 }
1445
1446 /* Generate several move instructions to copy LEN bytes from block FROM to
1447 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1448 and TO through protect_from_queue before calling.
1449
1450 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1451 used to push FROM to the stack.
1452
1453 ALIGN is maximum stack alignment we can assume.
1454
1455 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1456 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1457 stpcpy. */
1458
1459 rtx
1460 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1461 unsigned int align, int endp)
1462 {
1463 struct move_by_pieces data;
1464 rtx to_addr, from_addr = XEXP (from, 0);
1465 unsigned int max_size = MOVE_MAX_PIECES + 1;
1466 enum machine_mode mode = VOIDmode, tmode;
1467 enum insn_code icode;
1468
1469 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1470
1471 data.offset = 0;
1472 data.from_addr = from_addr;
1473 if (to)
1474 {
1475 to_addr = XEXP (to, 0);
1476 data.to = to;
1477 data.autinc_to
1478 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1479 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1480 data.reverse
1481 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1482 }
1483 else
1484 {
1485 to_addr = NULL_RTX;
1486 data.to = NULL_RTX;
1487 data.autinc_to = 1;
1488 #ifdef STACK_GROWS_DOWNWARD
1489 data.reverse = 1;
1490 #else
1491 data.reverse = 0;
1492 #endif
1493 }
1494 data.to_addr = to_addr;
1495 data.from = from;
1496 data.autinc_from
1497 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1498 || GET_CODE (from_addr) == POST_INC
1499 || GET_CODE (from_addr) == POST_DEC);
1500
1501 data.explicit_inc_from = 0;
1502 data.explicit_inc_to = 0;
1503 if (data.reverse) data.offset = len;
1504 data.len = len;
1505
1506 /* If copying requires more than two move insns,
1507 copy addresses to registers (to make displacements shorter)
1508 and use post-increment if available. */
1509 if (!(data.autinc_from && data.autinc_to)
1510 && move_by_pieces_ninsns (len, align) > 2)
1511 {
1512 /* Find the mode of the largest move... */
1513 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1514 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1515 if (GET_MODE_SIZE (tmode) < max_size)
1516 mode = tmode;
1517
1518 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1519 {
1520 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1521 data.autinc_from = 1;
1522 data.explicit_inc_from = -1;
1523 }
1524 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1525 {
1526 data.from_addr = copy_addr_to_reg (from_addr);
1527 data.autinc_from = 1;
1528 data.explicit_inc_from = 1;
1529 }
1530 if (!data.autinc_from && CONSTANT_P (from_addr))
1531 data.from_addr = copy_addr_to_reg (from_addr);
1532 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1533 {
1534 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1535 data.autinc_to = 1;
1536 data.explicit_inc_to = -1;
1537 }
1538 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1539 {
1540 data.to_addr = copy_addr_to_reg (to_addr);
1541 data.autinc_to = 1;
1542 data.explicit_inc_to = 1;
1543 }
1544 if (!data.autinc_to && CONSTANT_P (to_addr))
1545 data.to_addr = copy_addr_to_reg (to_addr);
1546 }
1547
1548 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1549 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1550 align = MOVE_MAX * BITS_PER_UNIT;
1551
1552 /* First move what we can in the largest integer mode, then go to
1553 successively smaller modes. */
1554
1555 while (max_size > 1)
1556 {
1557 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1558 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1559 if (GET_MODE_SIZE (tmode) < max_size)
1560 mode = tmode;
1561
1562 if (mode == VOIDmode)
1563 break;
1564
1565 icode = mov_optab->handlers[(int) mode].insn_code;
1566 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1567 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1568
1569 max_size = GET_MODE_SIZE (mode);
1570 }
1571
1572 /* The code above should have handled everything. */
1573 if (data.len > 0)
1574 abort ();
1575
1576 if (endp)
1577 {
1578 rtx to1;
1579
1580 if (data.reverse)
1581 abort ();
1582 if (data.autinc_to)
1583 {
1584 if (endp == 2)
1585 {
1586 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1587 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1588 else
1589 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1590 -1));
1591 }
1592 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1593 data.offset);
1594 }
1595 else
1596 {
1597 if (endp == 2)
1598 --data.offset;
1599 to1 = adjust_address (data.to, QImode, data.offset);
1600 }
1601 return to1;
1602 }
1603 else
1604 return data.to;
1605 }
1606
1607 /* Return number of insns required to move L bytes by pieces.
1608 ALIGN (in bits) is maximum alignment we can assume. */
1609
1610 static unsigned HOST_WIDE_INT
1611 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1612 {
1613 unsigned HOST_WIDE_INT n_insns = 0;
1614 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1615
1616 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1617 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1618 align = MOVE_MAX * BITS_PER_UNIT;
1619
1620 while (max_size > 1)
1621 {
1622 enum machine_mode mode = VOIDmode, tmode;
1623 enum insn_code icode;
1624
1625 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1626 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1627 if (GET_MODE_SIZE (tmode) < max_size)
1628 mode = tmode;
1629
1630 if (mode == VOIDmode)
1631 break;
1632
1633 icode = mov_optab->handlers[(int) mode].insn_code;
1634 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1635 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1636
1637 max_size = GET_MODE_SIZE (mode);
1638 }
1639
1640 if (l)
1641 abort ();
1642 return n_insns;
1643 }
1644
1645 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1646 with move instructions for mode MODE. GENFUN is the gen_... function
1647 to make a move insn for that mode. DATA has all the other info. */
1648
1649 static void
1650 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1651 struct move_by_pieces *data)
1652 {
1653 unsigned int size = GET_MODE_SIZE (mode);
1654 rtx to1 = NULL_RTX, from1;
1655
1656 while (data->len >= size)
1657 {
1658 if (data->reverse)
1659 data->offset -= size;
1660
1661 if (data->to)
1662 {
1663 if (data->autinc_to)
1664 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1665 data->offset);
1666 else
1667 to1 = adjust_address (data->to, mode, data->offset);
1668 }
1669
1670 if (data->autinc_from)
1671 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1672 data->offset);
1673 else
1674 from1 = adjust_address (data->from, mode, data->offset);
1675
1676 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1677 emit_insn (gen_add2_insn (data->to_addr,
1678 GEN_INT (-(HOST_WIDE_INT)size)));
1679 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1680 emit_insn (gen_add2_insn (data->from_addr,
1681 GEN_INT (-(HOST_WIDE_INT)size)));
1682
1683 if (data->to)
1684 emit_insn ((*genfun) (to1, from1));
1685 else
1686 {
1687 #ifdef PUSH_ROUNDING
1688 emit_single_push_insn (mode, from1, NULL);
1689 #else
1690 abort ();
1691 #endif
1692 }
1693
1694 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1695 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1696 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1697 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1698
1699 if (! data->reverse)
1700 data->offset += size;
1701
1702 data->len -= size;
1703 }
1704 }
1705 \f
1706 /* Emit code to move a block Y to a block X. This may be done with
1707 string-move instructions, with multiple scalar move instructions,
1708 or with a library call.
1709
1710 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1711 SIZE is an rtx that says how long they are.
1712 ALIGN is the maximum alignment we can assume they have.
1713 METHOD describes what kind of copy this is, and what mechanisms may be used.
1714
1715 Return the address of the new block, if memcpy is called and returns it,
1716 0 otherwise. */
1717
1718 rtx
1719 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1720 {
1721 bool may_use_call;
1722 rtx retval = 0;
1723 unsigned int align;
1724
1725 switch (method)
1726 {
1727 case BLOCK_OP_NORMAL:
1728 may_use_call = true;
1729 break;
1730
1731 case BLOCK_OP_CALL_PARM:
1732 may_use_call = block_move_libcall_safe_for_call_parm ();
1733
1734 /* Make inhibit_defer_pop nonzero around the library call
1735 to force it to pop the arguments right away. */
1736 NO_DEFER_POP;
1737 break;
1738
1739 case BLOCK_OP_NO_LIBCALL:
1740 may_use_call = false;
1741 break;
1742
1743 default:
1744 abort ();
1745 }
1746
1747 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1748
1749 if (GET_MODE (x) != BLKmode)
1750 abort ();
1751 if (GET_MODE (y) != BLKmode)
1752 abort ();
1753
1754 x = protect_from_queue (x, 1);
1755 y = protect_from_queue (y, 0);
1756 size = protect_from_queue (size, 0);
1757
1758 if (GET_CODE (x) != MEM)
1759 abort ();
1760 if (GET_CODE (y) != MEM)
1761 abort ();
1762 if (size == 0)
1763 abort ();
1764
1765 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1766 can be incorrect is coming from __builtin_memcpy. */
1767 if (GET_CODE (size) == CONST_INT)
1768 {
1769 x = shallow_copy_rtx (x);
1770 y = shallow_copy_rtx (y);
1771 set_mem_size (x, size);
1772 set_mem_size (y, size);
1773 }
1774
1775 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1776 move_by_pieces (x, y, INTVAL (size), align, 0);
1777 else if (emit_block_move_via_movstr (x, y, size, align))
1778 ;
1779 else if (may_use_call)
1780 retval = emit_block_move_via_libcall (x, y, size);
1781 else
1782 emit_block_move_via_loop (x, y, size, align);
1783
1784 if (method == BLOCK_OP_CALL_PARM)
1785 OK_DEFER_POP;
1786
1787 return retval;
1788 }
1789
1790 /* A subroutine of emit_block_move. Returns true if calling the
1791 block move libcall will not clobber any parameters which may have
1792 already been placed on the stack. */
1793
1794 static bool
1795 block_move_libcall_safe_for_call_parm (void)
1796 {
1797 if (PUSH_ARGS)
1798 return true;
1799 else
1800 {
1801 /* Check to see whether memcpy takes all register arguments. */
1802 static enum {
1803 takes_regs_uninit, takes_regs_no, takes_regs_yes
1804 } takes_regs = takes_regs_uninit;
1805
1806 switch (takes_regs)
1807 {
1808 case takes_regs_uninit:
1809 {
1810 CUMULATIVE_ARGS args_so_far;
1811 tree fn, arg;
1812
1813 fn = emit_block_move_libcall_fn (false);
1814 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1815
1816 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1817 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1818 {
1819 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1820 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1821 if (!tmp || !REG_P (tmp))
1822 goto fail_takes_regs;
1823 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1824 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1825 NULL_TREE, 1))
1826 goto fail_takes_regs;
1827 #endif
1828 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1829 }
1830 }
1831 takes_regs = takes_regs_yes;
1832 /* FALLTHRU */
1833
1834 case takes_regs_yes:
1835 return true;
1836
1837 fail_takes_regs:
1838 takes_regs = takes_regs_no;
1839 /* FALLTHRU */
1840 case takes_regs_no:
1841 return false;
1842
1843 default:
1844 abort ();
1845 }
1846 }
1847 }
1848
1849 /* A subroutine of emit_block_move. Expand a movstr pattern;
1850 return true if successful. */
1851
1852 static bool
1853 emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
1854 {
1855 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1856 enum machine_mode mode;
1857
1858 /* Since this is a move insn, we don't care about volatility. */
1859 volatile_ok = 1;
1860
1861 /* Try the most limited insn first, because there's no point
1862 including more than one in the machine description unless
1863 the more limited one has some advantage. */
1864
1865 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1866 mode = GET_MODE_WIDER_MODE (mode))
1867 {
1868 enum insn_code code = movstr_optab[(int) mode];
1869 insn_operand_predicate_fn pred;
1870
1871 if (code != CODE_FOR_nothing
1872 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1873 here because if SIZE is less than the mode mask, as it is
1874 returned by the macro, it will definitely be less than the
1875 actual mode mask. */
1876 && ((GET_CODE (size) == CONST_INT
1877 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1878 <= (GET_MODE_MASK (mode) >> 1)))
1879 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1880 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1881 || (*pred) (x, BLKmode))
1882 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1883 || (*pred) (y, BLKmode))
1884 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1885 || (*pred) (opalign, VOIDmode)))
1886 {
1887 rtx op2;
1888 rtx last = get_last_insn ();
1889 rtx pat;
1890
1891 op2 = convert_to_mode (mode, size, 1);
1892 pred = insn_data[(int) code].operand[2].predicate;
1893 if (pred != 0 && ! (*pred) (op2, mode))
1894 op2 = copy_to_mode_reg (mode, op2);
1895
1896 /* ??? When called via emit_block_move_for_call, it'd be
1897 nice if there were some way to inform the backend, so
1898 that it doesn't fail the expansion because it thinks
1899 emitting the libcall would be more efficient. */
1900
1901 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1902 if (pat)
1903 {
1904 emit_insn (pat);
1905 volatile_ok = 0;
1906 return true;
1907 }
1908 else
1909 delete_insns_since (last);
1910 }
1911 }
1912
1913 volatile_ok = 0;
1914 return false;
1915 }
1916
1917 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1918 Return the return value from memcpy, 0 otherwise. */
1919
1920 static rtx
1921 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1922 {
1923 rtx dst_addr, src_addr;
1924 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1925 enum machine_mode size_mode;
1926 rtx retval;
1927
1928 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1929
1930 It is unsafe to save the value generated by protect_from_queue and reuse
1931 it later. Consider what happens if emit_queue is called before the
1932 return value from protect_from_queue is used.
1933
1934 Expansion of the CALL_EXPR below will call emit_queue before we are
1935 finished emitting RTL for argument setup. So if we are not careful we
1936 could get the wrong value for an argument.
1937
1938 To avoid this problem we go ahead and emit code to copy the addresses of
1939 DST and SRC and SIZE into new pseudos. We can then place those new
1940 pseudos into an RTL_EXPR and use them later, even after a call to
1941 emit_queue.
1942
1943 Note this is not strictly needed for library calls since they do not call
1944 emit_queue before loading their arguments. However, we may need to have
1945 library calls call emit_queue in the future since failing to do so could
1946 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1947 arguments in registers. */
1948
1949 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1950 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1951
1952 #ifdef POINTERS_EXTEND_UNSIGNED
1953 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1954 src_addr = convert_memory_address (ptr_mode, src_addr);
1955 #endif
1956
1957 dst_tree = make_tree (ptr_type_node, dst_addr);
1958 src_tree = make_tree (ptr_type_node, src_addr);
1959
1960 if (TARGET_MEM_FUNCTIONS)
1961 size_mode = TYPE_MODE (sizetype);
1962 else
1963 size_mode = TYPE_MODE (unsigned_type_node);
1964
1965 size = convert_to_mode (size_mode, size, 1);
1966 size = copy_to_mode_reg (size_mode, size);
1967
1968 /* It is incorrect to use the libcall calling conventions to call
1969 memcpy in this context. This could be a user call to memcpy and
1970 the user may wish to examine the return value from memcpy. For
1971 targets where libcalls and normal calls have different conventions
1972 for returning pointers, we could end up generating incorrect code.
1973
1974 For convenience, we generate the call to bcopy this way as well. */
1975
1976 if (TARGET_MEM_FUNCTIONS)
1977 size_tree = make_tree (sizetype, size);
1978 else
1979 size_tree = make_tree (unsigned_type_node, size);
1980
1981 fn = emit_block_move_libcall_fn (true);
1982 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1983 if (TARGET_MEM_FUNCTIONS)
1984 {
1985 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1986 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1987 }
1988 else
1989 {
1990 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1991 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1992 }
1993
1994 /* Now we have to build up the CALL_EXPR itself. */
1995 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1996 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1997 call_expr, arg_list, NULL_TREE);
1998 TREE_SIDE_EFFECTS (call_expr) = 1;
1999
2000 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2001
2002 /* If we are initializing a readonly value, show the above call clobbered
2003 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
2004 the delay slot scheduler might overlook conflicts and take nasty
2005 decisions. */
2006 if (RTX_UNCHANGING_P (dst))
2007 add_function_usage_to
2008 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
2009 gen_rtx_CLOBBER (VOIDmode, dst),
2010 NULL_RTX));
2011
2012 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
2013 }
2014
2015 /* A subroutine of emit_block_move_via_libcall. Create the tree node
2016 for the function we use for block copies. The first time FOR_CALL
2017 is true, we call assemble_external. */
2018
2019 static GTY(()) tree block_move_fn;
2020
2021 void
2022 init_block_move_fn (const char *asmspec)
2023 {
2024 if (!block_move_fn)
2025 {
2026 tree args, fn;
2027
2028 if (TARGET_MEM_FUNCTIONS)
2029 {
2030 fn = get_identifier ("memcpy");
2031 args = build_function_type_list (ptr_type_node, ptr_type_node,
2032 const_ptr_type_node, sizetype,
2033 NULL_TREE);
2034 }
2035 else
2036 {
2037 fn = get_identifier ("bcopy");
2038 args = build_function_type_list (void_type_node, const_ptr_type_node,
2039 ptr_type_node, unsigned_type_node,
2040 NULL_TREE);
2041 }
2042
2043 fn = build_decl (FUNCTION_DECL, fn, args);
2044 DECL_EXTERNAL (fn) = 1;
2045 TREE_PUBLIC (fn) = 1;
2046 DECL_ARTIFICIAL (fn) = 1;
2047 TREE_NOTHROW (fn) = 1;
2048
2049 block_move_fn = fn;
2050 }
2051
2052 if (asmspec)
2053 {
2054 SET_DECL_RTL (block_move_fn, NULL_RTX);
2055 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
2056 }
2057 }
2058
2059 static tree
2060 emit_block_move_libcall_fn (int for_call)
2061 {
2062 static bool emitted_extern;
2063
2064 if (!block_move_fn)
2065 init_block_move_fn (NULL);
2066
2067 if (for_call && !emitted_extern)
2068 {
2069 emitted_extern = true;
2070 make_decl_rtl (block_move_fn, NULL);
2071 assemble_external (block_move_fn);
2072 }
2073
2074 return block_move_fn;
2075 }
2076
2077 /* A subroutine of emit_block_move. Copy the data via an explicit
2078 loop. This is used only when libcalls are forbidden. */
2079 /* ??? It'd be nice to copy in hunks larger than QImode. */
2080
2081 static void
2082 emit_block_move_via_loop (rtx x, rtx y, rtx size,
2083 unsigned int align ATTRIBUTE_UNUSED)
2084 {
2085 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2086 enum machine_mode iter_mode;
2087
2088 iter_mode = GET_MODE (size);
2089 if (iter_mode == VOIDmode)
2090 iter_mode = word_mode;
2091
2092 top_label = gen_label_rtx ();
2093 cmp_label = gen_label_rtx ();
2094 iter = gen_reg_rtx (iter_mode);
2095
2096 emit_move_insn (iter, const0_rtx);
2097
2098 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2099 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2100 do_pending_stack_adjust ();
2101
2102 emit_note (NOTE_INSN_LOOP_BEG);
2103
2104 emit_jump (cmp_label);
2105 emit_label (top_label);
2106
2107 tmp = convert_modes (Pmode, iter_mode, iter, true);
2108 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2109 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2110 x = change_address (x, QImode, x_addr);
2111 y = change_address (y, QImode, y_addr);
2112
2113 emit_move_insn (x, y);
2114
2115 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2116 true, OPTAB_LIB_WIDEN);
2117 if (tmp != iter)
2118 emit_move_insn (iter, tmp);
2119
2120 emit_note (NOTE_INSN_LOOP_CONT);
2121 emit_label (cmp_label);
2122
2123 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2124 true, top_label);
2125
2126 emit_note (NOTE_INSN_LOOP_END);
2127 }
2128 \f
2129 /* Copy all or part of a value X into registers starting at REGNO.
2130 The number of registers to be filled is NREGS. */
2131
2132 void
2133 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
2134 {
2135 int i;
2136 #ifdef HAVE_load_multiple
2137 rtx pat;
2138 rtx last;
2139 #endif
2140
2141 if (nregs == 0)
2142 return;
2143
2144 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2145 x = validize_mem (force_const_mem (mode, x));
2146
2147 /* See if the machine can do this with a load multiple insn. */
2148 #ifdef HAVE_load_multiple
2149 if (HAVE_load_multiple)
2150 {
2151 last = get_last_insn ();
2152 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2153 GEN_INT (nregs));
2154 if (pat)
2155 {
2156 emit_insn (pat);
2157 return;
2158 }
2159 else
2160 delete_insns_since (last);
2161 }
2162 #endif
2163
2164 for (i = 0; i < nregs; i++)
2165 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2166 operand_subword_force (x, i, mode));
2167 }
2168
2169 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2170 The number of registers to be filled is NREGS. */
2171
2172 void
2173 move_block_from_reg (int regno, rtx x, int nregs)
2174 {
2175 int i;
2176
2177 if (nregs == 0)
2178 return;
2179
2180 /* See if the machine can do this with a store multiple insn. */
2181 #ifdef HAVE_store_multiple
2182 if (HAVE_store_multiple)
2183 {
2184 rtx last = get_last_insn ();
2185 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2186 GEN_INT (nregs));
2187 if (pat)
2188 {
2189 emit_insn (pat);
2190 return;
2191 }
2192 else
2193 delete_insns_since (last);
2194 }
2195 #endif
2196
2197 for (i = 0; i < nregs; i++)
2198 {
2199 rtx tem = operand_subword (x, i, 1, BLKmode);
2200
2201 if (tem == 0)
2202 abort ();
2203
2204 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2205 }
2206 }
2207
2208 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2209 ORIG, where ORIG is a non-consecutive group of registers represented by
2210 a PARALLEL. The clone is identical to the original except in that the
2211 original set of registers is replaced by a new set of pseudo registers.
2212 The new set has the same modes as the original set. */
2213
2214 rtx
2215 gen_group_rtx (rtx orig)
2216 {
2217 int i, length;
2218 rtx *tmps;
2219
2220 if (GET_CODE (orig) != PARALLEL)
2221 abort ();
2222
2223 length = XVECLEN (orig, 0);
2224 tmps = (rtx *) alloca (sizeof (rtx) * length);
2225
2226 /* Skip a NULL entry in first slot. */
2227 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2228
2229 if (i)
2230 tmps[0] = 0;
2231
2232 for (; i < length; i++)
2233 {
2234 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2235 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2236
2237 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2238 }
2239
2240 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2241 }
2242
2243 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
2244 where DST is non-consecutive registers represented by a PARALLEL.
2245 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
2246 if not known. */
2247
2248 void
2249 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
2250 {
2251 rtx *tmps, src;
2252 int start, i;
2253
2254 if (GET_CODE (dst) != PARALLEL)
2255 abort ();
2256
2257 /* Check for a NULL entry, used to indicate that the parameter goes
2258 both on the stack and in registers. */
2259 if (XEXP (XVECEXP (dst, 0, 0), 0))
2260 start = 0;
2261 else
2262 start = 1;
2263
2264 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2265
2266 /* Process the pieces. */
2267 for (i = start; i < XVECLEN (dst, 0); i++)
2268 {
2269 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2270 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2271 unsigned int bytelen = GET_MODE_SIZE (mode);
2272 int shift = 0;
2273
2274 /* Handle trailing fragments that run over the size of the struct. */
2275 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2276 {
2277 /* Arrange to shift the fragment to where it belongs.
2278 extract_bit_field loads to the lsb of the reg. */
2279 if (
2280 #ifdef BLOCK_REG_PADDING
2281 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
2282 == (BYTES_BIG_ENDIAN ? upward : downward)
2283 #else
2284 BYTES_BIG_ENDIAN
2285 #endif
2286 )
2287 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2288 bytelen = ssize - bytepos;
2289 if (bytelen <= 0)
2290 abort ();
2291 }
2292
2293 /* If we won't be loading directly from memory, protect the real source
2294 from strange tricks we might play; but make sure that the source can
2295 be loaded directly into the destination. */
2296 src = orig_src;
2297 if (GET_CODE (orig_src) != MEM
2298 && (!CONSTANT_P (orig_src)
2299 || (GET_MODE (orig_src) != mode
2300 && GET_MODE (orig_src) != VOIDmode)))
2301 {
2302 if (GET_MODE (orig_src) == VOIDmode)
2303 src = gen_reg_rtx (mode);
2304 else
2305 src = gen_reg_rtx (GET_MODE (orig_src));
2306
2307 emit_move_insn (src, orig_src);
2308 }
2309
2310 /* Optimize the access just a bit. */
2311 if (GET_CODE (src) == MEM
2312 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
2313 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
2314 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2315 && bytelen == GET_MODE_SIZE (mode))
2316 {
2317 tmps[i] = gen_reg_rtx (mode);
2318 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2319 }
2320 else if (GET_CODE (src) == CONCAT)
2321 {
2322 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2323 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2324
2325 if ((bytepos == 0 && bytelen == slen0)
2326 || (bytepos != 0 && bytepos + bytelen <= slen))
2327 {
2328 /* The following assumes that the concatenated objects all
2329 have the same size. In this case, a simple calculation
2330 can be used to determine the object and the bit field
2331 to be extracted. */
2332 tmps[i] = XEXP (src, bytepos / slen0);
2333 if (! CONSTANT_P (tmps[i])
2334 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2335 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2336 (bytepos % slen0) * BITS_PER_UNIT,
2337 1, NULL_RTX, mode, mode, ssize);
2338 }
2339 else if (bytepos == 0)
2340 {
2341 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2342 emit_move_insn (mem, src);
2343 tmps[i] = adjust_address (mem, mode, 0);
2344 }
2345 else
2346 abort ();
2347 }
2348 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2349 SIMD register, which is currently broken. While we get GCC
2350 to emit proper RTL for these cases, let's dump to memory. */
2351 else if (VECTOR_MODE_P (GET_MODE (dst))
2352 && GET_CODE (src) == REG)
2353 {
2354 int slen = GET_MODE_SIZE (GET_MODE (src));
2355 rtx mem;
2356
2357 mem = assign_stack_temp (GET_MODE (src), slen, 0);
2358 emit_move_insn (mem, src);
2359 tmps[i] = adjust_address (mem, mode, (int) bytepos);
2360 }
2361 else if (CONSTANT_P (src)
2362 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2363 tmps[i] = src;
2364 else
2365 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2366 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2367 mode, mode, ssize);
2368
2369 if (shift)
2370 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2371 tmps[i], 0, OPTAB_WIDEN);
2372 }
2373
2374 emit_queue ();
2375
2376 /* Copy the extracted pieces into the proper (probable) hard regs. */
2377 for (i = start; i < XVECLEN (dst, 0); i++)
2378 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2379 }
2380
2381 /* Emit code to move a block SRC to block DST, where SRC and DST are
2382 non-consecutive groups of registers, each represented by a PARALLEL. */
2383
2384 void
2385 emit_group_move (rtx dst, rtx src)
2386 {
2387 int i;
2388
2389 if (GET_CODE (src) != PARALLEL
2390 || GET_CODE (dst) != PARALLEL
2391 || XVECLEN (src, 0) != XVECLEN (dst, 0))
2392 abort ();
2393
2394 /* Skip first entry if NULL. */
2395 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2396 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2397 XEXP (XVECEXP (src, 0, i), 0));
2398 }
2399
2400 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
2401 where SRC is non-consecutive registers represented by a PARALLEL.
2402 SSIZE represents the total size of block ORIG_DST, or -1 if not
2403 known. */
2404
2405 void
2406 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
2407 {
2408 rtx *tmps, dst;
2409 int start, i;
2410
2411 if (GET_CODE (src) != PARALLEL)
2412 abort ();
2413
2414 /* Check for a NULL entry, used to indicate that the parameter goes
2415 both on the stack and in registers. */
2416 if (XEXP (XVECEXP (src, 0, 0), 0))
2417 start = 0;
2418 else
2419 start = 1;
2420
2421 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2422
2423 /* Copy the (probable) hard regs into pseudos. */
2424 for (i = start; i < XVECLEN (src, 0); i++)
2425 {
2426 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2427 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2428 emit_move_insn (tmps[i], reg);
2429 }
2430 emit_queue ();
2431
2432 /* If we won't be storing directly into memory, protect the real destination
2433 from strange tricks we might play. */
2434 dst = orig_dst;
2435 if (GET_CODE (dst) == PARALLEL)
2436 {
2437 rtx temp;
2438
2439 /* We can get a PARALLEL dst if there is a conditional expression in
2440 a return statement. In that case, the dst and src are the same,
2441 so no action is necessary. */
2442 if (rtx_equal_p (dst, src))
2443 return;
2444
2445 /* It is unclear if we can ever reach here, but we may as well handle
2446 it. Allocate a temporary, and split this into a store/load to/from
2447 the temporary. */
2448
2449 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2450 emit_group_store (temp, src, type, ssize);
2451 emit_group_load (dst, temp, type, ssize);
2452 return;
2453 }
2454 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2455 {
2456 dst = gen_reg_rtx (GET_MODE (orig_dst));
2457 /* Make life a bit easier for combine. */
2458 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2459 }
2460
2461 /* Process the pieces. */
2462 for (i = start; i < XVECLEN (src, 0); i++)
2463 {
2464 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2465 enum machine_mode mode = GET_MODE (tmps[i]);
2466 unsigned int bytelen = GET_MODE_SIZE (mode);
2467 rtx dest = dst;
2468
2469 /* Handle trailing fragments that run over the size of the struct. */
2470 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2471 {
2472 /* store_bit_field always takes its value from the lsb.
2473 Move the fragment to the lsb if it's not already there. */
2474 if (
2475 #ifdef BLOCK_REG_PADDING
2476 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2477 == (BYTES_BIG_ENDIAN ? upward : downward)
2478 #else
2479 BYTES_BIG_ENDIAN
2480 #endif
2481 )
2482 {
2483 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2484 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2485 tmps[i], 0, OPTAB_WIDEN);
2486 }
2487 bytelen = ssize - bytepos;
2488 }
2489
2490 if (GET_CODE (dst) == CONCAT)
2491 {
2492 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2493 dest = XEXP (dst, 0);
2494 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2495 {
2496 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2497 dest = XEXP (dst, 1);
2498 }
2499 else if (bytepos == 0 && XVECLEN (src, 0))
2500 {
2501 dest = assign_stack_temp (GET_MODE (dest),
2502 GET_MODE_SIZE (GET_MODE (dest)), 0);
2503 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2504 tmps[i]);
2505 dst = dest;
2506 break;
2507 }
2508 else
2509 abort ();
2510 }
2511
2512 /* Optimize the access just a bit. */
2513 if (GET_CODE (dest) == MEM
2514 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2515 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2516 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2517 && bytelen == GET_MODE_SIZE (mode))
2518 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2519 else
2520 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2521 mode, tmps[i], ssize);
2522 }
2523
2524 emit_queue ();
2525
2526 /* Copy from the pseudo into the (probable) hard reg. */
2527 if (orig_dst != dst)
2528 emit_move_insn (orig_dst, dst);
2529 }
2530
2531 /* Generate code to copy a BLKmode object of TYPE out of a
2532 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2533 is null, a stack temporary is created. TGTBLK is returned.
2534
2535 The primary purpose of this routine is to handle functions
2536 that return BLKmode structures in registers. Some machines
2537 (the PA for example) want to return all small structures
2538 in registers regardless of the structure's alignment. */
2539
2540 rtx
2541 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2542 {
2543 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2544 rtx src = NULL, dst = NULL;
2545 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2546 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2547
2548 if (tgtblk == 0)
2549 {
2550 tgtblk = assign_temp (build_qualified_type (type,
2551 (TYPE_QUALS (type)
2552 | TYPE_QUAL_CONST)),
2553 0, 1, 1);
2554 preserve_temp_slots (tgtblk);
2555 }
2556
2557 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2558 into a new pseudo which is a full word. */
2559
2560 if (GET_MODE (srcreg) != BLKmode
2561 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2562 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2563
2564 /* Structures whose size is not a multiple of a word are aligned
2565 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2566 machine, this means we must skip the empty high order bytes when
2567 calculating the bit offset. */
2568 if (BYTES_BIG_ENDIAN
2569 && bytes % UNITS_PER_WORD)
2570 big_endian_correction
2571 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2572
2573 /* Copy the structure BITSIZE bites at a time.
2574
2575 We could probably emit more efficient code for machines which do not use
2576 strict alignment, but it doesn't seem worth the effort at the current
2577 time. */
2578 for (bitpos = 0, xbitpos = big_endian_correction;
2579 bitpos < bytes * BITS_PER_UNIT;
2580 bitpos += bitsize, xbitpos += bitsize)
2581 {
2582 /* We need a new source operand each time xbitpos is on a
2583 word boundary and when xbitpos == big_endian_correction
2584 (the first time through). */
2585 if (xbitpos % BITS_PER_WORD == 0
2586 || xbitpos == big_endian_correction)
2587 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2588 GET_MODE (srcreg));
2589
2590 /* We need a new destination operand each time bitpos is on
2591 a word boundary. */
2592 if (bitpos % BITS_PER_WORD == 0)
2593 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2594
2595 /* Use xbitpos for the source extraction (right justified) and
2596 xbitpos for the destination store (left justified). */
2597 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2598 extract_bit_field (src, bitsize,
2599 xbitpos % BITS_PER_WORD, 1,
2600 NULL_RTX, word_mode, word_mode,
2601 BITS_PER_WORD),
2602 BITS_PER_WORD);
2603 }
2604
2605 return tgtblk;
2606 }
2607
2608 /* Add a USE expression for REG to the (possibly empty) list pointed
2609 to by CALL_FUSAGE. REG must denote a hard register. */
2610
2611 void
2612 use_reg (rtx *call_fusage, rtx reg)
2613 {
2614 if (GET_CODE (reg) != REG
2615 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2616 abort ();
2617
2618 *call_fusage
2619 = gen_rtx_EXPR_LIST (VOIDmode,
2620 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2621 }
2622
2623 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2624 starting at REGNO. All of these registers must be hard registers. */
2625
2626 void
2627 use_regs (rtx *call_fusage, int regno, int nregs)
2628 {
2629 int i;
2630
2631 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2632 abort ();
2633
2634 for (i = 0; i < nregs; i++)
2635 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2636 }
2637
2638 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2639 PARALLEL REGS. This is for calls that pass values in multiple
2640 non-contiguous locations. The Irix 6 ABI has examples of this. */
2641
2642 void
2643 use_group_regs (rtx *call_fusage, rtx regs)
2644 {
2645 int i;
2646
2647 for (i = 0; i < XVECLEN (regs, 0); i++)
2648 {
2649 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2650
2651 /* A NULL entry means the parameter goes both on the stack and in
2652 registers. This can also be a MEM for targets that pass values
2653 partially on the stack and partially in registers. */
2654 if (reg != 0 && GET_CODE (reg) == REG)
2655 use_reg (call_fusage, reg);
2656 }
2657 }
2658 \f
2659
2660 /* Determine whether the LEN bytes generated by CONSTFUN can be
2661 stored to memory using several move instructions. CONSTFUNDATA is
2662 a pointer which will be passed as argument in every CONSTFUN call.
2663 ALIGN is maximum alignment we can assume. Return nonzero if a
2664 call to store_by_pieces should succeed. */
2665
2666 int
2667 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2668 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2669 void *constfundata, unsigned int align)
2670 {
2671 unsigned HOST_WIDE_INT max_size, l;
2672 HOST_WIDE_INT offset = 0;
2673 enum machine_mode mode, tmode;
2674 enum insn_code icode;
2675 int reverse;
2676 rtx cst;
2677
2678 if (len == 0)
2679 return 1;
2680
2681 if (! STORE_BY_PIECES_P (len, align))
2682 return 0;
2683
2684 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2685 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2686 align = MOVE_MAX * BITS_PER_UNIT;
2687
2688 /* We would first store what we can in the largest integer mode, then go to
2689 successively smaller modes. */
2690
2691 for (reverse = 0;
2692 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2693 reverse++)
2694 {
2695 l = len;
2696 mode = VOIDmode;
2697 max_size = STORE_MAX_PIECES + 1;
2698 while (max_size > 1)
2699 {
2700 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2701 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2702 if (GET_MODE_SIZE (tmode) < max_size)
2703 mode = tmode;
2704
2705 if (mode == VOIDmode)
2706 break;
2707
2708 icode = mov_optab->handlers[(int) mode].insn_code;
2709 if (icode != CODE_FOR_nothing
2710 && align >= GET_MODE_ALIGNMENT (mode))
2711 {
2712 unsigned int size = GET_MODE_SIZE (mode);
2713
2714 while (l >= size)
2715 {
2716 if (reverse)
2717 offset -= size;
2718
2719 cst = (*constfun) (constfundata, offset, mode);
2720 if (!LEGITIMATE_CONSTANT_P (cst))
2721 return 0;
2722
2723 if (!reverse)
2724 offset += size;
2725
2726 l -= size;
2727 }
2728 }
2729
2730 max_size = GET_MODE_SIZE (mode);
2731 }
2732
2733 /* The code above should have handled everything. */
2734 if (l != 0)
2735 abort ();
2736 }
2737
2738 return 1;
2739 }
2740
2741 /* Generate several move instructions to store LEN bytes generated by
2742 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2743 pointer which will be passed as argument in every CONSTFUN call.
2744 ALIGN is maximum alignment we can assume.
2745 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2746 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2747 stpcpy. */
2748
2749 rtx
2750 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2751 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2752 void *constfundata, unsigned int align, int endp)
2753 {
2754 struct store_by_pieces data;
2755
2756 if (len == 0)
2757 {
2758 if (endp == 2)
2759 abort ();
2760 return to;
2761 }
2762
2763 if (! STORE_BY_PIECES_P (len, align))
2764 abort ();
2765 to = protect_from_queue (to, 1);
2766 data.constfun = constfun;
2767 data.constfundata = constfundata;
2768 data.len = len;
2769 data.to = to;
2770 store_by_pieces_1 (&data, align);
2771 if (endp)
2772 {
2773 rtx to1;
2774
2775 if (data.reverse)
2776 abort ();
2777 if (data.autinc_to)
2778 {
2779 if (endp == 2)
2780 {
2781 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2782 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2783 else
2784 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2785 -1));
2786 }
2787 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2788 data.offset);
2789 }
2790 else
2791 {
2792 if (endp == 2)
2793 --data.offset;
2794 to1 = adjust_address (data.to, QImode, data.offset);
2795 }
2796 return to1;
2797 }
2798 else
2799 return data.to;
2800 }
2801
2802 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2803 rtx with BLKmode). The caller must pass TO through protect_from_queue
2804 before calling. ALIGN is maximum alignment we can assume. */
2805
2806 static void
2807 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2808 {
2809 struct store_by_pieces data;
2810
2811 if (len == 0)
2812 return;
2813
2814 data.constfun = clear_by_pieces_1;
2815 data.constfundata = NULL;
2816 data.len = len;
2817 data.to = to;
2818 store_by_pieces_1 (&data, align);
2819 }
2820
2821 /* Callback routine for clear_by_pieces.
2822 Return const0_rtx unconditionally. */
2823
2824 static rtx
2825 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2826 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2827 enum machine_mode mode ATTRIBUTE_UNUSED)
2828 {
2829 return const0_rtx;
2830 }
2831
2832 /* Subroutine of clear_by_pieces and store_by_pieces.
2833 Generate several move instructions to store LEN bytes of block TO. (A MEM
2834 rtx with BLKmode). The caller must pass TO through protect_from_queue
2835 before calling. ALIGN is maximum alignment we can assume. */
2836
2837 static void
2838 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2839 unsigned int align ATTRIBUTE_UNUSED)
2840 {
2841 rtx to_addr = XEXP (data->to, 0);
2842 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2843 enum machine_mode mode = VOIDmode, tmode;
2844 enum insn_code icode;
2845
2846 data->offset = 0;
2847 data->to_addr = to_addr;
2848 data->autinc_to
2849 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2850 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2851
2852 data->explicit_inc_to = 0;
2853 data->reverse
2854 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2855 if (data->reverse)
2856 data->offset = data->len;
2857
2858 /* If storing requires more than two move insns,
2859 copy addresses to registers (to make displacements shorter)
2860 and use post-increment if available. */
2861 if (!data->autinc_to
2862 && move_by_pieces_ninsns (data->len, align) > 2)
2863 {
2864 /* Determine the main mode we'll be using. */
2865 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2866 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2867 if (GET_MODE_SIZE (tmode) < max_size)
2868 mode = tmode;
2869
2870 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2871 {
2872 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2873 data->autinc_to = 1;
2874 data->explicit_inc_to = -1;
2875 }
2876
2877 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2878 && ! data->autinc_to)
2879 {
2880 data->to_addr = copy_addr_to_reg (to_addr);
2881 data->autinc_to = 1;
2882 data->explicit_inc_to = 1;
2883 }
2884
2885 if ( !data->autinc_to && CONSTANT_P (to_addr))
2886 data->to_addr = copy_addr_to_reg (to_addr);
2887 }
2888
2889 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2890 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2891 align = MOVE_MAX * BITS_PER_UNIT;
2892
2893 /* First store what we can in the largest integer mode, then go to
2894 successively smaller modes. */
2895
2896 while (max_size > 1)
2897 {
2898 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2899 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2900 if (GET_MODE_SIZE (tmode) < max_size)
2901 mode = tmode;
2902
2903 if (mode == VOIDmode)
2904 break;
2905
2906 icode = mov_optab->handlers[(int) mode].insn_code;
2907 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2908 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2909
2910 max_size = GET_MODE_SIZE (mode);
2911 }
2912
2913 /* The code above should have handled everything. */
2914 if (data->len != 0)
2915 abort ();
2916 }
2917
2918 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2919 with move instructions for mode MODE. GENFUN is the gen_... function
2920 to make a move insn for that mode. DATA has all the other info. */
2921
2922 static void
2923 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2924 struct store_by_pieces *data)
2925 {
2926 unsigned int size = GET_MODE_SIZE (mode);
2927 rtx to1, cst;
2928
2929 while (data->len >= size)
2930 {
2931 if (data->reverse)
2932 data->offset -= size;
2933
2934 if (data->autinc_to)
2935 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2936 data->offset);
2937 else
2938 to1 = adjust_address (data->to, mode, data->offset);
2939
2940 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2941 emit_insn (gen_add2_insn (data->to_addr,
2942 GEN_INT (-(HOST_WIDE_INT) size)));
2943
2944 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2945 emit_insn ((*genfun) (to1, cst));
2946
2947 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2948 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2949
2950 if (! data->reverse)
2951 data->offset += size;
2952
2953 data->len -= size;
2954 }
2955 }
2956 \f
2957 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2958 its length in bytes. */
2959
2960 rtx
2961 clear_storage (rtx object, rtx size)
2962 {
2963 rtx retval = 0;
2964 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2965 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2966
2967 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2968 just move a zero. Otherwise, do this a piece at a time. */
2969 if (GET_MODE (object) != BLKmode
2970 && GET_CODE (size) == CONST_INT
2971 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2972 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2973 else
2974 {
2975 object = protect_from_queue (object, 1);
2976 size = protect_from_queue (size, 0);
2977
2978 if (GET_CODE (size) == CONST_INT && INTVAL (size) == 0)
2979 ;
2980 else if (GET_CODE (size) == CONST_INT
2981 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2982 clear_by_pieces (object, INTVAL (size), align);
2983 else if (clear_storage_via_clrstr (object, size, align))
2984 ;
2985 else
2986 retval = clear_storage_via_libcall (object, size);
2987 }
2988
2989 return retval;
2990 }
2991
2992 /* A subroutine of clear_storage. Expand a clrstr pattern;
2993 return true if successful. */
2994
2995 static bool
2996 clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
2997 {
2998 /* Try the most limited insn first, because there's no point
2999 including more than one in the machine description unless
3000 the more limited one has some advantage. */
3001
3002 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3003 enum machine_mode mode;
3004
3005 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
3006 mode = GET_MODE_WIDER_MODE (mode))
3007 {
3008 enum insn_code code = clrstr_optab[(int) mode];
3009 insn_operand_predicate_fn pred;
3010
3011 if (code != CODE_FOR_nothing
3012 /* We don't need MODE to be narrower than
3013 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
3014 the mode mask, as it is returned by the macro, it will
3015 definitely be less than the actual mode mask. */
3016 && ((GET_CODE (size) == CONST_INT
3017 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3018 <= (GET_MODE_MASK (mode) >> 1)))
3019 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3020 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
3021 || (*pred) (object, BLKmode))
3022 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
3023 || (*pred) (opalign, VOIDmode)))
3024 {
3025 rtx op1;
3026 rtx last = get_last_insn ();
3027 rtx pat;
3028
3029 op1 = convert_to_mode (mode, size, 1);
3030 pred = insn_data[(int) code].operand[1].predicate;
3031 if (pred != 0 && ! (*pred) (op1, mode))
3032 op1 = copy_to_mode_reg (mode, op1);
3033
3034 pat = GEN_FCN ((int) code) (object, op1, opalign);
3035 if (pat)
3036 {
3037 emit_insn (pat);
3038 return true;
3039 }
3040 else
3041 delete_insns_since (last);
3042 }
3043 }
3044
3045 return false;
3046 }
3047
3048 /* A subroutine of clear_storage. Expand a call to memset or bzero.
3049 Return the return value of memset, 0 otherwise. */
3050
3051 static rtx
3052 clear_storage_via_libcall (rtx object, rtx size)
3053 {
3054 tree call_expr, arg_list, fn, object_tree, size_tree;
3055 enum machine_mode size_mode;
3056 rtx retval;
3057
3058 /* OBJECT or SIZE may have been passed through protect_from_queue.
3059
3060 It is unsafe to save the value generated by protect_from_queue
3061 and reuse it later. Consider what happens if emit_queue is
3062 called before the return value from protect_from_queue is used.
3063
3064 Expansion of the CALL_EXPR below will call emit_queue before
3065 we are finished emitting RTL for argument setup. So if we are
3066 not careful we could get the wrong value for an argument.
3067
3068 To avoid this problem we go ahead and emit code to copy OBJECT
3069 and SIZE into new pseudos. We can then place those new pseudos
3070 into an RTL_EXPR and use them later, even after a call to
3071 emit_queue.
3072
3073 Note this is not strictly needed for library calls since they
3074 do not call emit_queue before loading their arguments. However,
3075 we may need to have library calls call emit_queue in the future
3076 since failing to do so could cause problems for targets which
3077 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3078
3079 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
3080
3081 if (TARGET_MEM_FUNCTIONS)
3082 size_mode = TYPE_MODE (sizetype);
3083 else
3084 size_mode = TYPE_MODE (unsigned_type_node);
3085 size = convert_to_mode (size_mode, size, 1);
3086 size = copy_to_mode_reg (size_mode, size);
3087
3088 /* It is incorrect to use the libcall calling conventions to call
3089 memset in this context. This could be a user call to memset and
3090 the user may wish to examine the return value from memset. For
3091 targets where libcalls and normal calls have different conventions
3092 for returning pointers, we could end up generating incorrect code.
3093
3094 For convenience, we generate the call to bzero this way as well. */
3095
3096 object_tree = make_tree (ptr_type_node, object);
3097 if (TARGET_MEM_FUNCTIONS)
3098 size_tree = make_tree (sizetype, size);
3099 else
3100 size_tree = make_tree (unsigned_type_node, size);
3101
3102 fn = clear_storage_libcall_fn (true);
3103 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3104 if (TARGET_MEM_FUNCTIONS)
3105 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3106 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3107
3108 /* Now we have to build up the CALL_EXPR itself. */
3109 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3110 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3111 call_expr, arg_list, NULL_TREE);
3112 TREE_SIDE_EFFECTS (call_expr) = 1;
3113
3114 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3115
3116 /* If we are initializing a readonly value, show the above call
3117 clobbered it. Otherwise, a load from it may erroneously be
3118 hoisted from a loop. */
3119 if (RTX_UNCHANGING_P (object))
3120 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3121
3122 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3123 }
3124
3125 /* A subroutine of clear_storage_via_libcall. Create the tree node
3126 for the function we use for block clears. The first time FOR_CALL
3127 is true, we call assemble_external. */
3128
3129 static GTY(()) tree block_clear_fn;
3130
3131 void
3132 init_block_clear_fn (const char *asmspec)
3133 {
3134 if (!block_clear_fn)
3135 {
3136 tree fn, args;
3137
3138 if (TARGET_MEM_FUNCTIONS)
3139 {
3140 fn = get_identifier ("memset");
3141 args = build_function_type_list (ptr_type_node, ptr_type_node,
3142 integer_type_node, sizetype,
3143 NULL_TREE);
3144 }
3145 else
3146 {
3147 fn = get_identifier ("bzero");
3148 args = build_function_type_list (void_type_node, ptr_type_node,
3149 unsigned_type_node, NULL_TREE);
3150 }
3151
3152 fn = build_decl (FUNCTION_DECL, fn, args);
3153 DECL_EXTERNAL (fn) = 1;
3154 TREE_PUBLIC (fn) = 1;
3155 DECL_ARTIFICIAL (fn) = 1;
3156 TREE_NOTHROW (fn) = 1;
3157
3158 block_clear_fn = fn;
3159 }
3160
3161 if (asmspec)
3162 {
3163 SET_DECL_RTL (block_clear_fn, NULL_RTX);
3164 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
3165 }
3166 }
3167
3168 static tree
3169 clear_storage_libcall_fn (int for_call)
3170 {
3171 static bool emitted_extern;
3172
3173 if (!block_clear_fn)
3174 init_block_clear_fn (NULL);
3175
3176 if (for_call && !emitted_extern)
3177 {
3178 emitted_extern = true;
3179 make_decl_rtl (block_clear_fn, NULL);
3180 assemble_external (block_clear_fn);
3181 }
3182
3183 return block_clear_fn;
3184 }
3185 \f
3186 /* Generate code to copy Y into X.
3187 Both Y and X must have the same mode, except that
3188 Y can be a constant with VOIDmode.
3189 This mode cannot be BLKmode; use emit_block_move for that.
3190
3191 Return the last instruction emitted. */
3192
3193 rtx
3194 emit_move_insn (rtx x, rtx y)
3195 {
3196 enum machine_mode mode = GET_MODE (x);
3197 rtx y_cst = NULL_RTX;
3198 rtx last_insn, set;
3199
3200 x = protect_from_queue (x, 1);
3201 y = protect_from_queue (y, 0);
3202
3203 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3204 abort ();
3205
3206 /* Never force constant_p_rtx to memory. */
3207 if (GET_CODE (y) == CONSTANT_P_RTX)
3208 ;
3209 else if (CONSTANT_P (y))
3210 {
3211 if (optimize
3212 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3213 && (last_insn = compress_float_constant (x, y)))
3214 return last_insn;
3215
3216 y_cst = y;
3217
3218 if (!LEGITIMATE_CONSTANT_P (y))
3219 {
3220 y = force_const_mem (mode, y);
3221
3222 /* If the target's cannot_force_const_mem prevented the spill,
3223 assume that the target's move expanders will also take care
3224 of the non-legitimate constant. */
3225 if (!y)
3226 y = y_cst;
3227 }
3228 }
3229
3230 /* If X or Y are memory references, verify that their addresses are valid
3231 for the machine. */
3232 if (GET_CODE (x) == MEM
3233 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3234 && ! push_operand (x, GET_MODE (x)))
3235 || (flag_force_addr
3236 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3237 x = validize_mem (x);
3238
3239 if (GET_CODE (y) == MEM
3240 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3241 || (flag_force_addr
3242 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3243 y = validize_mem (y);
3244
3245 if (mode == BLKmode)
3246 abort ();
3247
3248 last_insn = emit_move_insn_1 (x, y);
3249
3250 if (y_cst && GET_CODE (x) == REG
3251 && (set = single_set (last_insn)) != NULL_RTX
3252 && SET_DEST (set) == x
3253 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3254 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3255
3256 return last_insn;
3257 }
3258
3259 /* Low level part of emit_move_insn.
3260 Called just like emit_move_insn, but assumes X and Y
3261 are basically valid. */
3262
3263 rtx
3264 emit_move_insn_1 (rtx x, rtx y)
3265 {
3266 enum machine_mode mode = GET_MODE (x);
3267 enum machine_mode submode;
3268 enum mode_class class = GET_MODE_CLASS (mode);
3269
3270 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3271 abort ();
3272
3273 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3274 return
3275 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3276
3277 /* Expand complex moves by moving real part and imag part, if possible. */
3278 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3279 && BLKmode != (submode = GET_MODE_INNER (mode))
3280 && (mov_optab->handlers[(int) submode].insn_code
3281 != CODE_FOR_nothing))
3282 {
3283 /* Don't split destination if it is a stack push. */
3284 int stack = push_operand (x, GET_MODE (x));
3285
3286 #ifdef PUSH_ROUNDING
3287 /* In case we output to the stack, but the size is smaller than the
3288 machine can push exactly, we need to use move instructions. */
3289 if (stack
3290 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3291 != GET_MODE_SIZE (submode)))
3292 {
3293 rtx temp;
3294 HOST_WIDE_INT offset1, offset2;
3295
3296 /* Do not use anti_adjust_stack, since we don't want to update
3297 stack_pointer_delta. */
3298 temp = expand_binop (Pmode,
3299 #ifdef STACK_GROWS_DOWNWARD
3300 sub_optab,
3301 #else
3302 add_optab,
3303 #endif
3304 stack_pointer_rtx,
3305 GEN_INT
3306 (PUSH_ROUNDING
3307 (GET_MODE_SIZE (GET_MODE (x)))),
3308 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3309
3310 if (temp != stack_pointer_rtx)
3311 emit_move_insn (stack_pointer_rtx, temp);
3312
3313 #ifdef STACK_GROWS_DOWNWARD
3314 offset1 = 0;
3315 offset2 = GET_MODE_SIZE (submode);
3316 #else
3317 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3318 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3319 + GET_MODE_SIZE (submode));
3320 #endif
3321
3322 emit_move_insn (change_address (x, submode,
3323 gen_rtx_PLUS (Pmode,
3324 stack_pointer_rtx,
3325 GEN_INT (offset1))),
3326 gen_realpart (submode, y));
3327 emit_move_insn (change_address (x, submode,
3328 gen_rtx_PLUS (Pmode,
3329 stack_pointer_rtx,
3330 GEN_INT (offset2))),
3331 gen_imagpart (submode, y));
3332 }
3333 else
3334 #endif
3335 /* If this is a stack, push the highpart first, so it
3336 will be in the argument order.
3337
3338 In that case, change_address is used only to convert
3339 the mode, not to change the address. */
3340 if (stack)
3341 {
3342 /* Note that the real part always precedes the imag part in memory
3343 regardless of machine's endianness. */
3344 #ifdef STACK_GROWS_DOWNWARD
3345 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3346 gen_imagpart (submode, y));
3347 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3348 gen_realpart (submode, y));
3349 #else
3350 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3351 gen_realpart (submode, y));
3352 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3353 gen_imagpart (submode, y));
3354 #endif
3355 }
3356 else
3357 {
3358 rtx realpart_x, realpart_y;
3359 rtx imagpart_x, imagpart_y;
3360
3361 /* If this is a complex value with each part being smaller than a
3362 word, the usual calling sequence will likely pack the pieces into
3363 a single register. Unfortunately, SUBREG of hard registers only
3364 deals in terms of words, so we have a problem converting input
3365 arguments to the CONCAT of two registers that is used elsewhere
3366 for complex values. If this is before reload, we can copy it into
3367 memory and reload. FIXME, we should see about using extract and
3368 insert on integer registers, but complex short and complex char
3369 variables should be rarely used. */
3370 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3371 && (reload_in_progress | reload_completed) == 0)
3372 {
3373 int packed_dest_p
3374 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3375 int packed_src_p
3376 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3377
3378 if (packed_dest_p || packed_src_p)
3379 {
3380 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3381 ? MODE_FLOAT : MODE_INT);
3382
3383 enum machine_mode reg_mode
3384 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3385
3386 if (reg_mode != BLKmode)
3387 {
3388 rtx mem = assign_stack_temp (reg_mode,
3389 GET_MODE_SIZE (mode), 0);
3390 rtx cmem = adjust_address (mem, mode, 0);
3391
3392 cfun->cannot_inline
3393 = N_("function using short complex types cannot be inline");
3394
3395 if (packed_dest_p)
3396 {
3397 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3398
3399 emit_move_insn_1 (cmem, y);
3400 return emit_move_insn_1 (sreg, mem);
3401 }
3402 else
3403 {
3404 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3405
3406 emit_move_insn_1 (mem, sreg);
3407 return emit_move_insn_1 (x, cmem);
3408 }
3409 }
3410 }
3411 }
3412
3413 realpart_x = gen_realpart (submode, x);
3414 realpart_y = gen_realpart (submode, y);
3415 imagpart_x = gen_imagpart (submode, x);
3416 imagpart_y = gen_imagpart (submode, y);
3417
3418 /* Show the output dies here. This is necessary for SUBREGs
3419 of pseudos since we cannot track their lifetimes correctly;
3420 hard regs shouldn't appear here except as return values.
3421 We never want to emit such a clobber after reload. */
3422 if (x != y
3423 && ! (reload_in_progress || reload_completed)
3424 && (GET_CODE (realpart_x) == SUBREG
3425 || GET_CODE (imagpart_x) == SUBREG))
3426 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3427
3428 emit_move_insn (realpart_x, realpart_y);
3429 emit_move_insn (imagpart_x, imagpart_y);
3430 }
3431
3432 return get_last_insn ();
3433 }
3434
3435 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3436 find a mode to do it in. If we have a movcc, use it. Otherwise,
3437 find the MODE_INT mode of the same width. */
3438 else if (GET_MODE_CLASS (mode) == MODE_CC
3439 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3440 {
3441 enum insn_code insn_code;
3442 enum machine_mode tmode = VOIDmode;
3443 rtx x1 = x, y1 = y;
3444
3445 if (mode != CCmode
3446 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3447 tmode = CCmode;
3448 else
3449 for (tmode = QImode; tmode != VOIDmode;
3450 tmode = GET_MODE_WIDER_MODE (tmode))
3451 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3452 break;
3453
3454 if (tmode == VOIDmode)
3455 abort ();
3456
3457 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3458 may call change_address which is not appropriate if we were
3459 called when a reload was in progress. We don't have to worry
3460 about changing the address since the size in bytes is supposed to
3461 be the same. Copy the MEM to change the mode and move any
3462 substitutions from the old MEM to the new one. */
3463
3464 if (reload_in_progress)
3465 {
3466 x = gen_lowpart_common (tmode, x1);
3467 if (x == 0 && GET_CODE (x1) == MEM)
3468 {
3469 x = adjust_address_nv (x1, tmode, 0);
3470 copy_replacements (x1, x);
3471 }
3472
3473 y = gen_lowpart_common (tmode, y1);
3474 if (y == 0 && GET_CODE (y1) == MEM)
3475 {
3476 y = adjust_address_nv (y1, tmode, 0);
3477 copy_replacements (y1, y);
3478 }
3479 }
3480 else
3481 {
3482 x = gen_lowpart (tmode, x);
3483 y = gen_lowpart (tmode, y);
3484 }
3485
3486 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3487 return emit_insn (GEN_FCN (insn_code) (x, y));
3488 }
3489
3490 /* This will handle any multi-word or full-word mode that lacks a move_insn
3491 pattern. However, you will get better code if you define such patterns,
3492 even if they must turn into multiple assembler instructions. */
3493 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3494 {
3495 rtx last_insn = 0;
3496 rtx seq, inner;
3497 int need_clobber;
3498 int i;
3499
3500 #ifdef PUSH_ROUNDING
3501
3502 /* If X is a push on the stack, do the push now and replace
3503 X with a reference to the stack pointer. */
3504 if (push_operand (x, GET_MODE (x)))
3505 {
3506 rtx temp;
3507 enum rtx_code code;
3508
3509 /* Do not use anti_adjust_stack, since we don't want to update
3510 stack_pointer_delta. */
3511 temp = expand_binop (Pmode,
3512 #ifdef STACK_GROWS_DOWNWARD
3513 sub_optab,
3514 #else
3515 add_optab,
3516 #endif
3517 stack_pointer_rtx,
3518 GEN_INT
3519 (PUSH_ROUNDING
3520 (GET_MODE_SIZE (GET_MODE (x)))),
3521 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3522
3523 if (temp != stack_pointer_rtx)
3524 emit_move_insn (stack_pointer_rtx, temp);
3525
3526 code = GET_CODE (XEXP (x, 0));
3527
3528 /* Just hope that small offsets off SP are OK. */
3529 if (code == POST_INC)
3530 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3531 GEN_INT (-((HOST_WIDE_INT)
3532 GET_MODE_SIZE (GET_MODE (x)))));
3533 else if (code == POST_DEC)
3534 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3535 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3536 else
3537 temp = stack_pointer_rtx;
3538
3539 x = change_address (x, VOIDmode, temp);
3540 }
3541 #endif
3542
3543 /* If we are in reload, see if either operand is a MEM whose address
3544 is scheduled for replacement. */
3545 if (reload_in_progress && GET_CODE (x) == MEM
3546 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3547 x = replace_equiv_address_nv (x, inner);
3548 if (reload_in_progress && GET_CODE (y) == MEM
3549 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3550 y = replace_equiv_address_nv (y, inner);
3551
3552 start_sequence ();
3553
3554 need_clobber = 0;
3555 for (i = 0;
3556 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3557 i++)
3558 {
3559 rtx xpart = operand_subword (x, i, 1, mode);
3560 rtx ypart = operand_subword (y, i, 1, mode);
3561
3562 /* If we can't get a part of Y, put Y into memory if it is a
3563 constant. Otherwise, force it into a register. If we still
3564 can't get a part of Y, abort. */
3565 if (ypart == 0 && CONSTANT_P (y))
3566 {
3567 y = force_const_mem (mode, y);
3568 ypart = operand_subword (y, i, 1, mode);
3569 }
3570 else if (ypart == 0)
3571 ypart = operand_subword_force (y, i, mode);
3572
3573 if (xpart == 0 || ypart == 0)
3574 abort ();
3575
3576 need_clobber |= (GET_CODE (xpart) == SUBREG);
3577
3578 last_insn = emit_move_insn (xpart, ypart);
3579 }
3580
3581 seq = get_insns ();
3582 end_sequence ();
3583
3584 /* Show the output dies here. This is necessary for SUBREGs
3585 of pseudos since we cannot track their lifetimes correctly;
3586 hard regs shouldn't appear here except as return values.
3587 We never want to emit such a clobber after reload. */
3588 if (x != y
3589 && ! (reload_in_progress || reload_completed)
3590 && need_clobber != 0)
3591 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3592
3593 emit_insn (seq);
3594
3595 return last_insn;
3596 }
3597 else
3598 abort ();
3599 }
3600
3601 /* If Y is representable exactly in a narrower mode, and the target can
3602 perform the extension directly from constant or memory, then emit the
3603 move as an extension. */
3604
3605 static rtx
3606 compress_float_constant (rtx x, rtx y)
3607 {
3608 enum machine_mode dstmode = GET_MODE (x);
3609 enum machine_mode orig_srcmode = GET_MODE (y);
3610 enum machine_mode srcmode;
3611 REAL_VALUE_TYPE r;
3612
3613 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3614
3615 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3616 srcmode != orig_srcmode;
3617 srcmode = GET_MODE_WIDER_MODE (srcmode))
3618 {
3619 enum insn_code ic;
3620 rtx trunc_y, last_insn;
3621
3622 /* Skip if the target can't extend this way. */
3623 ic = can_extend_p (dstmode, srcmode, 0);
3624 if (ic == CODE_FOR_nothing)
3625 continue;
3626
3627 /* Skip if the narrowed value isn't exact. */
3628 if (! exact_real_truncate (srcmode, &r))
3629 continue;
3630
3631 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3632
3633 if (LEGITIMATE_CONSTANT_P (trunc_y))
3634 {
3635 /* Skip if the target needs extra instructions to perform
3636 the extension. */
3637 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3638 continue;
3639 }
3640 else if (float_extend_from_mem[dstmode][srcmode])
3641 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3642 else
3643 continue;
3644
3645 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3646 last_insn = get_last_insn ();
3647
3648 if (GET_CODE (x) == REG)
3649 set_unique_reg_note (last_insn, REG_EQUAL, y);
3650
3651 return last_insn;
3652 }
3653
3654 return NULL_RTX;
3655 }
3656 \f
3657 /* Pushing data onto the stack. */
3658
3659 /* Push a block of length SIZE (perhaps variable)
3660 and return an rtx to address the beginning of the block.
3661 Note that it is not possible for the value returned to be a QUEUED.
3662 The value may be virtual_outgoing_args_rtx.
3663
3664 EXTRA is the number of bytes of padding to push in addition to SIZE.
3665 BELOW nonzero means this padding comes at low addresses;
3666 otherwise, the padding comes at high addresses. */
3667
3668 rtx
3669 push_block (rtx size, int extra, int below)
3670 {
3671 rtx temp;
3672
3673 size = convert_modes (Pmode, ptr_mode, size, 1);
3674 if (CONSTANT_P (size))
3675 anti_adjust_stack (plus_constant (size, extra));
3676 else if (GET_CODE (size) == REG && extra == 0)
3677 anti_adjust_stack (size);
3678 else
3679 {
3680 temp = copy_to_mode_reg (Pmode, size);
3681 if (extra != 0)
3682 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3683 temp, 0, OPTAB_LIB_WIDEN);
3684 anti_adjust_stack (temp);
3685 }
3686
3687 #ifndef STACK_GROWS_DOWNWARD
3688 if (0)
3689 #else
3690 if (1)
3691 #endif
3692 {
3693 temp = virtual_outgoing_args_rtx;
3694 if (extra != 0 && below)
3695 temp = plus_constant (temp, extra);
3696 }
3697 else
3698 {
3699 if (GET_CODE (size) == CONST_INT)
3700 temp = plus_constant (virtual_outgoing_args_rtx,
3701 -INTVAL (size) - (below ? 0 : extra));
3702 else if (extra != 0 && !below)
3703 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3704 negate_rtx (Pmode, plus_constant (size, extra)));
3705 else
3706 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3707 negate_rtx (Pmode, size));
3708 }
3709
3710 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3711 }
3712
3713 #ifdef PUSH_ROUNDING
3714
3715 /* Emit single push insn. */
3716
3717 static void
3718 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3719 {
3720 rtx dest_addr;
3721 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3722 rtx dest;
3723 enum insn_code icode;
3724 insn_operand_predicate_fn pred;
3725
3726 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3727 /* If there is push pattern, use it. Otherwise try old way of throwing
3728 MEM representing push operation to move expander. */
3729 icode = push_optab->handlers[(int) mode].insn_code;
3730 if (icode != CODE_FOR_nothing)
3731 {
3732 if (((pred = insn_data[(int) icode].operand[0].predicate)
3733 && !((*pred) (x, mode))))
3734 x = force_reg (mode, x);
3735 emit_insn (GEN_FCN (icode) (x));
3736 return;
3737 }
3738 if (GET_MODE_SIZE (mode) == rounded_size)
3739 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3740 /* If we are to pad downward, adjust the stack pointer first and
3741 then store X into the stack location using an offset. This is
3742 because emit_move_insn does not know how to pad; it does not have
3743 access to type. */
3744 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3745 {
3746 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3747 HOST_WIDE_INT offset;
3748
3749 emit_move_insn (stack_pointer_rtx,
3750 expand_binop (Pmode,
3751 #ifdef STACK_GROWS_DOWNWARD
3752 sub_optab,
3753 #else
3754 add_optab,
3755 #endif
3756 stack_pointer_rtx,
3757 GEN_INT (rounded_size),
3758 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3759
3760 offset = (HOST_WIDE_INT) padding_size;
3761 #ifdef STACK_GROWS_DOWNWARD
3762 if (STACK_PUSH_CODE == POST_DEC)
3763 /* We have already decremented the stack pointer, so get the
3764 previous value. */
3765 offset += (HOST_WIDE_INT) rounded_size;
3766 #else
3767 if (STACK_PUSH_CODE == POST_INC)
3768 /* We have already incremented the stack pointer, so get the
3769 previous value. */
3770 offset -= (HOST_WIDE_INT) rounded_size;
3771 #endif
3772 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3773 }
3774 else
3775 {
3776 #ifdef STACK_GROWS_DOWNWARD
3777 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3778 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3779 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3780 #else
3781 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3782 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3783 GEN_INT (rounded_size));
3784 #endif
3785 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3786 }
3787
3788 dest = gen_rtx_MEM (mode, dest_addr);
3789
3790 if (type != 0)
3791 {
3792 set_mem_attributes (dest, type, 1);
3793
3794 if (flag_optimize_sibling_calls)
3795 /* Function incoming arguments may overlap with sibling call
3796 outgoing arguments and we cannot allow reordering of reads
3797 from function arguments with stores to outgoing arguments
3798 of sibling calls. */
3799 set_mem_alias_set (dest, 0);
3800 }
3801 emit_move_insn (dest, x);
3802 }
3803 #endif
3804
3805 /* Generate code to push X onto the stack, assuming it has mode MODE and
3806 type TYPE.
3807 MODE is redundant except when X is a CONST_INT (since they don't
3808 carry mode info).
3809 SIZE is an rtx for the size of data to be copied (in bytes),
3810 needed only if X is BLKmode.
3811
3812 ALIGN (in bits) is maximum alignment we can assume.
3813
3814 If PARTIAL and REG are both nonzero, then copy that many of the first
3815 words of X into registers starting with REG, and push the rest of X.
3816 The amount of space pushed is decreased by PARTIAL words,
3817 rounded *down* to a multiple of PARM_BOUNDARY.
3818 REG must be a hard register in this case.
3819 If REG is zero but PARTIAL is not, take any all others actions for an
3820 argument partially in registers, but do not actually load any
3821 registers.
3822
3823 EXTRA is the amount in bytes of extra space to leave next to this arg.
3824 This is ignored if an argument block has already been allocated.
3825
3826 On a machine that lacks real push insns, ARGS_ADDR is the address of
3827 the bottom of the argument block for this call. We use indexing off there
3828 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3829 argument block has not been preallocated.
3830
3831 ARGS_SO_FAR is the size of args previously pushed for this call.
3832
3833 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3834 for arguments passed in registers. If nonzero, it will be the number
3835 of bytes required. */
3836
3837 void
3838 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3839 unsigned int align, int partial, rtx reg, int extra,
3840 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3841 rtx alignment_pad)
3842 {
3843 rtx xinner;
3844 enum direction stack_direction
3845 #ifdef STACK_GROWS_DOWNWARD
3846 = downward;
3847 #else
3848 = upward;
3849 #endif
3850
3851 /* Decide where to pad the argument: `downward' for below,
3852 `upward' for above, or `none' for don't pad it.
3853 Default is below for small data on big-endian machines; else above. */
3854 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3855
3856 /* Invert direction if stack is post-decrement.
3857 FIXME: why? */
3858 if (STACK_PUSH_CODE == POST_DEC)
3859 if (where_pad != none)
3860 where_pad = (where_pad == downward ? upward : downward);
3861
3862 xinner = x = protect_from_queue (x, 0);
3863
3864 if (mode == BLKmode)
3865 {
3866 /* Copy a block into the stack, entirely or partially. */
3867
3868 rtx temp;
3869 int used = partial * UNITS_PER_WORD;
3870 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3871 int skip;
3872
3873 if (size == 0)
3874 abort ();
3875
3876 used -= offset;
3877
3878 /* USED is now the # of bytes we need not copy to the stack
3879 because registers will take care of them. */
3880
3881 if (partial != 0)
3882 xinner = adjust_address (xinner, BLKmode, used);
3883
3884 /* If the partial register-part of the arg counts in its stack size,
3885 skip the part of stack space corresponding to the registers.
3886 Otherwise, start copying to the beginning of the stack space,
3887 by setting SKIP to 0. */
3888 skip = (reg_parm_stack_space == 0) ? 0 : used;
3889
3890 #ifdef PUSH_ROUNDING
3891 /* Do it with several push insns if that doesn't take lots of insns
3892 and if there is no difficulty with push insns that skip bytes
3893 on the stack for alignment purposes. */
3894 if (args_addr == 0
3895 && PUSH_ARGS
3896 && GET_CODE (size) == CONST_INT
3897 && skip == 0
3898 && MEM_ALIGN (xinner) >= align
3899 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3900 /* Here we avoid the case of a structure whose weak alignment
3901 forces many pushes of a small amount of data,
3902 and such small pushes do rounding that causes trouble. */
3903 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3904 || align >= BIGGEST_ALIGNMENT
3905 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3906 == (align / BITS_PER_UNIT)))
3907 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3908 {
3909 /* Push padding now if padding above and stack grows down,
3910 or if padding below and stack grows up.
3911 But if space already allocated, this has already been done. */
3912 if (extra && args_addr == 0
3913 && where_pad != none && where_pad != stack_direction)
3914 anti_adjust_stack (GEN_INT (extra));
3915
3916 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3917 }
3918 else
3919 #endif /* PUSH_ROUNDING */
3920 {
3921 rtx target;
3922
3923 /* Otherwise make space on the stack and copy the data
3924 to the address of that space. */
3925
3926 /* Deduct words put into registers from the size we must copy. */
3927 if (partial != 0)
3928 {
3929 if (GET_CODE (size) == CONST_INT)
3930 size = GEN_INT (INTVAL (size) - used);
3931 else
3932 size = expand_binop (GET_MODE (size), sub_optab, size,
3933 GEN_INT (used), NULL_RTX, 0,
3934 OPTAB_LIB_WIDEN);
3935 }
3936
3937 /* Get the address of the stack space.
3938 In this case, we do not deal with EXTRA separately.
3939 A single stack adjust will do. */
3940 if (! args_addr)
3941 {
3942 temp = push_block (size, extra, where_pad == downward);
3943 extra = 0;
3944 }
3945 else if (GET_CODE (args_so_far) == CONST_INT)
3946 temp = memory_address (BLKmode,
3947 plus_constant (args_addr,
3948 skip + INTVAL (args_so_far)));
3949 else
3950 temp = memory_address (BLKmode,
3951 plus_constant (gen_rtx_PLUS (Pmode,
3952 args_addr,
3953 args_so_far),
3954 skip));
3955
3956 if (!ACCUMULATE_OUTGOING_ARGS)
3957 {
3958 /* If the source is referenced relative to the stack pointer,
3959 copy it to another register to stabilize it. We do not need
3960 to do this if we know that we won't be changing sp. */
3961
3962 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3963 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3964 temp = copy_to_reg (temp);
3965 }
3966
3967 target = gen_rtx_MEM (BLKmode, temp);
3968
3969 if (type != 0)
3970 {
3971 set_mem_attributes (target, type, 1);
3972 /* Function incoming arguments may overlap with sibling call
3973 outgoing arguments and we cannot allow reordering of reads
3974 from function arguments with stores to outgoing arguments
3975 of sibling calls. */
3976 set_mem_alias_set (target, 0);
3977 }
3978
3979 /* ALIGN may well be better aligned than TYPE, e.g. due to
3980 PARM_BOUNDARY. Assume the caller isn't lying. */
3981 set_mem_align (target, align);
3982
3983 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3984 }
3985 }
3986 else if (partial > 0)
3987 {
3988 /* Scalar partly in registers. */
3989
3990 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3991 int i;
3992 int not_stack;
3993 /* # words of start of argument
3994 that we must make space for but need not store. */
3995 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3996 int args_offset = INTVAL (args_so_far);
3997 int skip;
3998
3999 /* Push padding now if padding above and stack grows down,
4000 or if padding below and stack grows up.
4001 But if space already allocated, this has already been done. */
4002 if (extra && args_addr == 0
4003 && where_pad != none && where_pad != stack_direction)
4004 anti_adjust_stack (GEN_INT (extra));
4005
4006 /* If we make space by pushing it, we might as well push
4007 the real data. Otherwise, we can leave OFFSET nonzero
4008 and leave the space uninitialized. */
4009 if (args_addr == 0)
4010 offset = 0;
4011
4012 /* Now NOT_STACK gets the number of words that we don't need to
4013 allocate on the stack. */
4014 not_stack = partial - offset;
4015
4016 /* If the partial register-part of the arg counts in its stack size,
4017 skip the part of stack space corresponding to the registers.
4018 Otherwise, start copying to the beginning of the stack space,
4019 by setting SKIP to 0. */
4020 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4021
4022 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
4023 x = validize_mem (force_const_mem (mode, x));
4024
4025 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4026 SUBREGs of such registers are not allowed. */
4027 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
4028 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4029 x = copy_to_reg (x);
4030
4031 /* Loop over all the words allocated on the stack for this arg. */
4032 /* We can do it by words, because any scalar bigger than a word
4033 has a size a multiple of a word. */
4034 #ifndef PUSH_ARGS_REVERSED
4035 for (i = not_stack; i < size; i++)
4036 #else
4037 for (i = size - 1; i >= not_stack; i--)
4038 #endif
4039 if (i >= not_stack + offset)
4040 emit_push_insn (operand_subword_force (x, i, mode),
4041 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4042 0, args_addr,
4043 GEN_INT (args_offset + ((i - not_stack + skip)
4044 * UNITS_PER_WORD)),
4045 reg_parm_stack_space, alignment_pad);
4046 }
4047 else
4048 {
4049 rtx addr;
4050 rtx dest;
4051
4052 /* Push padding now if padding above and stack grows down,
4053 or if padding below and stack grows up.
4054 But if space already allocated, this has already been done. */
4055 if (extra && args_addr == 0
4056 && where_pad != none && where_pad != stack_direction)
4057 anti_adjust_stack (GEN_INT (extra));
4058
4059 #ifdef PUSH_ROUNDING
4060 if (args_addr == 0 && PUSH_ARGS)
4061 emit_single_push_insn (mode, x, type);
4062 else
4063 #endif
4064 {
4065 if (GET_CODE (args_so_far) == CONST_INT)
4066 addr
4067 = memory_address (mode,
4068 plus_constant (args_addr,
4069 INTVAL (args_so_far)));
4070 else
4071 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4072 args_so_far));
4073 dest = gen_rtx_MEM (mode, addr);
4074 if (type != 0)
4075 {
4076 set_mem_attributes (dest, type, 1);
4077 /* Function incoming arguments may overlap with sibling call
4078 outgoing arguments and we cannot allow reordering of reads
4079 from function arguments with stores to outgoing arguments
4080 of sibling calls. */
4081 set_mem_alias_set (dest, 0);
4082 }
4083
4084 emit_move_insn (dest, x);
4085 }
4086 }
4087
4088 /* If part should go in registers, copy that part
4089 into the appropriate registers. Do this now, at the end,
4090 since mem-to-mem copies above may do function calls. */
4091 if (partial > 0 && reg != 0)
4092 {
4093 /* Handle calls that pass values in multiple non-contiguous locations.
4094 The Irix 6 ABI has examples of this. */
4095 if (GET_CODE (reg) == PARALLEL)
4096 emit_group_load (reg, x, type, -1);
4097 else
4098 move_block_to_reg (REGNO (reg), x, partial, mode);
4099 }
4100
4101 if (extra && args_addr == 0 && where_pad == stack_direction)
4102 anti_adjust_stack (GEN_INT (extra));
4103
4104 if (alignment_pad && args_addr == 0)
4105 anti_adjust_stack (alignment_pad);
4106 }
4107 \f
4108 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4109 operations. */
4110
4111 static rtx
4112 get_subtarget (rtx x)
4113 {
4114 return ((x == 0
4115 /* Only registers can be subtargets. */
4116 || GET_CODE (x) != REG
4117 /* If the register is readonly, it can't be set more than once. */
4118 || RTX_UNCHANGING_P (x)
4119 /* Don't use hard regs to avoid extending their life. */
4120 || REGNO (x) < FIRST_PSEUDO_REGISTER
4121 /* Avoid subtargets inside loops,
4122 since they hide some invariant expressions. */
4123 || preserve_subexpressions_p ())
4124 ? 0 : x);
4125 }
4126
4127 /* Expand an assignment that stores the value of FROM into TO.
4128 If WANT_VALUE is nonzero, return an rtx for the value of TO.
4129 (This may contain a QUEUED rtx;
4130 if the value is constant, this rtx is a constant.)
4131 Otherwise, the returned value is NULL_RTX.
4132
4133 SUGGEST_REG is no longer actually used.
4134 It used to mean, copy the value through a register
4135 and return that register, if that is possible.
4136 We now use WANT_VALUE to decide whether to do this. */
4137
4138 rtx
4139 expand_assignment (tree to, tree from, int want_value,
4140 int suggest_reg ATTRIBUTE_UNUSED)
4141 {
4142 rtx to_rtx = 0;
4143 rtx result;
4144
4145 /* Don't crash if the lhs of the assignment was erroneous. */
4146
4147 if (TREE_CODE (to) == ERROR_MARK)
4148 {
4149 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4150 return want_value ? result : NULL_RTX;
4151 }
4152
4153 /* Assignment of a structure component needs special treatment
4154 if the structure component's rtx is not simply a MEM.
4155 Assignment of an array element at a constant index, and assignment of
4156 an array element in an unaligned packed structure field, has the same
4157 problem. */
4158
4159 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
4160 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
4161 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4162 {
4163 enum machine_mode mode1;
4164 HOST_WIDE_INT bitsize, bitpos;
4165 rtx orig_to_rtx;
4166 tree offset;
4167 int unsignedp;
4168 int volatilep = 0;
4169 tree tem;
4170
4171 push_temp_slots ();
4172 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4173 &unsignedp, &volatilep);
4174
4175 /* If we are going to use store_bit_field and extract_bit_field,
4176 make sure to_rtx will be safe for multiple use. */
4177
4178 if (mode1 == VOIDmode && want_value)
4179 tem = stabilize_reference (tem);
4180
4181 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4182
4183 if (offset != 0)
4184 {
4185 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4186
4187 if (GET_CODE (to_rtx) != MEM)
4188 abort ();
4189
4190 #ifdef POINTERS_EXTEND_UNSIGNED
4191 if (GET_MODE (offset_rtx) != Pmode)
4192 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4193 #else
4194 if (GET_MODE (offset_rtx) != ptr_mode)
4195 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4196 #endif
4197
4198 /* A constant address in TO_RTX can have VOIDmode, we must not try
4199 to call force_reg for that case. Avoid that case. */
4200 if (GET_CODE (to_rtx) == MEM
4201 && GET_MODE (to_rtx) == BLKmode
4202 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4203 && bitsize > 0
4204 && (bitpos % bitsize) == 0
4205 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4206 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4207 {
4208 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4209 bitpos = 0;
4210 }
4211
4212 to_rtx = offset_address (to_rtx, offset_rtx,
4213 highest_pow2_factor_for_type (TREE_TYPE (to),
4214 offset));
4215 }
4216
4217 if (GET_CODE (to_rtx) == MEM)
4218 {
4219 /* If the field is at offset zero, we could have been given the
4220 DECL_RTX of the parent struct. Don't munge it. */
4221 to_rtx = shallow_copy_rtx (to_rtx);
4222
4223 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4224 }
4225
4226 /* Deal with volatile and readonly fields. The former is only done
4227 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4228 if (volatilep && GET_CODE (to_rtx) == MEM)
4229 {
4230 if (to_rtx == orig_to_rtx)
4231 to_rtx = copy_rtx (to_rtx);
4232 MEM_VOLATILE_P (to_rtx) = 1;
4233 }
4234
4235 if (TREE_CODE (to) == COMPONENT_REF
4236 && TREE_READONLY (TREE_OPERAND (to, 1)))
4237 {
4238 if (to_rtx == orig_to_rtx)
4239 to_rtx = copy_rtx (to_rtx);
4240 RTX_UNCHANGING_P (to_rtx) = 1;
4241 }
4242
4243 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4244 {
4245 if (to_rtx == orig_to_rtx)
4246 to_rtx = copy_rtx (to_rtx);
4247 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4248 }
4249
4250 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4251 (want_value
4252 /* Spurious cast for HPUX compiler. */
4253 ? ((enum machine_mode)
4254 TYPE_MODE (TREE_TYPE (to)))
4255 : VOIDmode),
4256 unsignedp, TREE_TYPE (tem), get_alias_set (to));
4257
4258 preserve_temp_slots (result);
4259 free_temp_slots ();
4260 pop_temp_slots ();
4261
4262 /* If the value is meaningful, convert RESULT to the proper mode.
4263 Otherwise, return nothing. */
4264 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4265 TYPE_MODE (TREE_TYPE (from)),
4266 result,
4267 TREE_UNSIGNED (TREE_TYPE (to)))
4268 : NULL_RTX);
4269 }
4270
4271 /* If the rhs is a function call and its value is not an aggregate,
4272 call the function before we start to compute the lhs.
4273 This is needed for correct code for cases such as
4274 val = setjmp (buf) on machines where reference to val
4275 requires loading up part of an address in a separate insn.
4276
4277 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4278 since it might be a promoted variable where the zero- or sign- extension
4279 needs to be done. Handling this in the normal way is safe because no
4280 computation is done before the call. */
4281 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
4282 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4283 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4284 && GET_CODE (DECL_RTL (to)) == REG))
4285 {
4286 rtx value;
4287
4288 push_temp_slots ();
4289 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4290 if (to_rtx == 0)
4291 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4292
4293 /* Handle calls that return values in multiple non-contiguous locations.
4294 The Irix 6 ABI has examples of this. */
4295 if (GET_CODE (to_rtx) == PARALLEL)
4296 emit_group_load (to_rtx, value, TREE_TYPE (from),
4297 int_size_in_bytes (TREE_TYPE (from)));
4298 else if (GET_MODE (to_rtx) == BLKmode)
4299 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4300 else
4301 {
4302 #ifdef POINTERS_EXTEND_UNSIGNED
4303 if (POINTER_TYPE_P (TREE_TYPE (to))
4304 && GET_MODE (to_rtx) != GET_MODE (value))
4305 value = convert_memory_address (GET_MODE (to_rtx), value);
4306 #endif
4307 emit_move_insn (to_rtx, value);
4308 }
4309 preserve_temp_slots (to_rtx);
4310 free_temp_slots ();
4311 pop_temp_slots ();
4312 return want_value ? to_rtx : NULL_RTX;
4313 }
4314
4315 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4316 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4317
4318 if (to_rtx == 0)
4319 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4320
4321 /* Don't move directly into a return register. */
4322 if (TREE_CODE (to) == RESULT_DECL
4323 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4324 {
4325 rtx temp;
4326
4327 push_temp_slots ();
4328 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4329
4330 if (GET_CODE (to_rtx) == PARALLEL)
4331 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4332 int_size_in_bytes (TREE_TYPE (from)));
4333 else
4334 emit_move_insn (to_rtx, temp);
4335
4336 preserve_temp_slots (to_rtx);
4337 free_temp_slots ();
4338 pop_temp_slots ();
4339 return want_value ? to_rtx : NULL_RTX;
4340 }
4341
4342 /* In case we are returning the contents of an object which overlaps
4343 the place the value is being stored, use a safe function when copying
4344 a value through a pointer into a structure value return block. */
4345 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4346 && current_function_returns_struct
4347 && !current_function_returns_pcc_struct)
4348 {
4349 rtx from_rtx, size;
4350
4351 push_temp_slots ();
4352 size = expr_size (from);
4353 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4354
4355 if (TARGET_MEM_FUNCTIONS)
4356 emit_library_call (memmove_libfunc, LCT_NORMAL,
4357 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4358 XEXP (from_rtx, 0), Pmode,
4359 convert_to_mode (TYPE_MODE (sizetype),
4360 size, TREE_UNSIGNED (sizetype)),
4361 TYPE_MODE (sizetype));
4362 else
4363 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4364 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4365 XEXP (to_rtx, 0), Pmode,
4366 convert_to_mode (TYPE_MODE (integer_type_node),
4367 size,
4368 TREE_UNSIGNED (integer_type_node)),
4369 TYPE_MODE (integer_type_node));
4370
4371 preserve_temp_slots (to_rtx);
4372 free_temp_slots ();
4373 pop_temp_slots ();
4374 return want_value ? to_rtx : NULL_RTX;
4375 }
4376
4377 /* Compute FROM and store the value in the rtx we got. */
4378
4379 push_temp_slots ();
4380 result = store_expr (from, to_rtx, want_value);
4381 preserve_temp_slots (result);
4382 free_temp_slots ();
4383 pop_temp_slots ();
4384 return want_value ? result : NULL_RTX;
4385 }
4386
4387 /* Generate code for computing expression EXP,
4388 and storing the value into TARGET.
4389 TARGET may contain a QUEUED rtx.
4390
4391 If WANT_VALUE & 1 is nonzero, return a copy of the value
4392 not in TARGET, so that we can be sure to use the proper
4393 value in a containing expression even if TARGET has something
4394 else stored in it. If possible, we copy the value through a pseudo
4395 and return that pseudo. Or, if the value is constant, we try to
4396 return the constant. In some cases, we return a pseudo
4397 copied *from* TARGET.
4398
4399 If the mode is BLKmode then we may return TARGET itself.
4400 It turns out that in BLKmode it doesn't cause a problem.
4401 because C has no operators that could combine two different
4402 assignments into the same BLKmode object with different values
4403 with no sequence point. Will other languages need this to
4404 be more thorough?
4405
4406 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4407 to catch quickly any cases where the caller uses the value
4408 and fails to set WANT_VALUE.
4409
4410 If WANT_VALUE & 2 is set, this is a store into a call param on the
4411 stack, and block moves may need to be treated specially. */
4412
4413 rtx
4414 store_expr (tree exp, rtx target, int want_value)
4415 {
4416 rtx temp;
4417 int dont_return_target = 0;
4418 int dont_store_target = 0;
4419
4420 if (VOID_TYPE_P (TREE_TYPE (exp)))
4421 {
4422 /* C++ can generate ?: expressions with a throw expression in one
4423 branch and an rvalue in the other. Here, we resolve attempts to
4424 store the throw expression's nonexistent result. */
4425 if (want_value)
4426 abort ();
4427 expand_expr (exp, const0_rtx, VOIDmode, 0);
4428 return NULL_RTX;
4429 }
4430 if (TREE_CODE (exp) == COMPOUND_EXPR)
4431 {
4432 /* Perform first part of compound expression, then assign from second
4433 part. */
4434 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4435 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4436 emit_queue ();
4437 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4438 }
4439 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4440 {
4441 /* For conditional expression, get safe form of the target. Then
4442 test the condition, doing the appropriate assignment on either
4443 side. This avoids the creation of unnecessary temporaries.
4444 For non-BLKmode, it is more efficient not to do this. */
4445
4446 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4447
4448 emit_queue ();
4449 target = protect_from_queue (target, 1);
4450
4451 do_pending_stack_adjust ();
4452 NO_DEFER_POP;
4453 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4454 start_cleanup_deferral ();
4455 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4456 end_cleanup_deferral ();
4457 emit_queue ();
4458 emit_jump_insn (gen_jump (lab2));
4459 emit_barrier ();
4460 emit_label (lab1);
4461 start_cleanup_deferral ();
4462 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4463 end_cleanup_deferral ();
4464 emit_queue ();
4465 emit_label (lab2);
4466 OK_DEFER_POP;
4467
4468 return want_value & 1 ? target : NULL_RTX;
4469 }
4470 else if (queued_subexp_p (target))
4471 /* If target contains a postincrement, let's not risk
4472 using it as the place to generate the rhs. */
4473 {
4474 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4475 {
4476 /* Expand EXP into a new pseudo. */
4477 temp = gen_reg_rtx (GET_MODE (target));
4478 temp = expand_expr (exp, temp, GET_MODE (target),
4479 (want_value & 2
4480 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4481 }
4482 else
4483 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4484 (want_value & 2
4485 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4486
4487 /* If target is volatile, ANSI requires accessing the value
4488 *from* the target, if it is accessed. So make that happen.
4489 In no case return the target itself. */
4490 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4491 dont_return_target = 1;
4492 }
4493 else if ((want_value & 1) != 0
4494 && GET_CODE (target) == MEM
4495 && ! MEM_VOLATILE_P (target)
4496 && GET_MODE (target) != BLKmode)
4497 /* If target is in memory and caller wants value in a register instead,
4498 arrange that. Pass TARGET as target for expand_expr so that,
4499 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4500 We know expand_expr will not use the target in that case.
4501 Don't do this if TARGET is volatile because we are supposed
4502 to write it and then read it. */
4503 {
4504 temp = expand_expr (exp, target, GET_MODE (target),
4505 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4506 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4507 {
4508 /* If TEMP is already in the desired TARGET, only copy it from
4509 memory and don't store it there again. */
4510 if (temp == target
4511 || (rtx_equal_p (temp, target)
4512 && ! side_effects_p (temp) && ! side_effects_p (target)))
4513 dont_store_target = 1;
4514 temp = copy_to_reg (temp);
4515 }
4516 dont_return_target = 1;
4517 }
4518 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4519 /* If this is a scalar in a register that is stored in a wider mode
4520 than the declared mode, compute the result into its declared mode
4521 and then convert to the wider mode. Our value is the computed
4522 expression. */
4523 {
4524 rtx inner_target = 0;
4525
4526 /* If we don't want a value, we can do the conversion inside EXP,
4527 which will often result in some optimizations. Do the conversion
4528 in two steps: first change the signedness, if needed, then
4529 the extend. But don't do this if the type of EXP is a subtype
4530 of something else since then the conversion might involve
4531 more than just converting modes. */
4532 if ((want_value & 1) == 0
4533 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4534 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4535 {
4536 if (TREE_UNSIGNED (TREE_TYPE (exp))
4537 != SUBREG_PROMOTED_UNSIGNED_P (target))
4538 exp = convert
4539 ((*lang_hooks.types.signed_or_unsigned_type)
4540 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4541
4542 exp = convert ((*lang_hooks.types.type_for_mode)
4543 (GET_MODE (SUBREG_REG (target)),
4544 SUBREG_PROMOTED_UNSIGNED_P (target)),
4545 exp);
4546
4547 inner_target = SUBREG_REG (target);
4548 }
4549
4550 temp = expand_expr (exp, inner_target, VOIDmode,
4551 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4552
4553 /* If TEMP is a MEM and we want a result value, make the access
4554 now so it gets done only once. Strictly speaking, this is
4555 only necessary if the MEM is volatile, or if the address
4556 overlaps TARGET. But not performing the load twice also
4557 reduces the amount of rtl we generate and then have to CSE. */
4558 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4559 temp = copy_to_reg (temp);
4560
4561 /* If TEMP is a VOIDmode constant, use convert_modes to make
4562 sure that we properly convert it. */
4563 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4564 {
4565 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4566 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4567 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4568 GET_MODE (target), temp,
4569 SUBREG_PROMOTED_UNSIGNED_P (target));
4570 }
4571
4572 convert_move (SUBREG_REG (target), temp,
4573 SUBREG_PROMOTED_UNSIGNED_P (target));
4574
4575 /* If we promoted a constant, change the mode back down to match
4576 target. Otherwise, the caller might get confused by a result whose
4577 mode is larger than expected. */
4578
4579 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4580 {
4581 if (GET_MODE (temp) != VOIDmode)
4582 {
4583 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4584 SUBREG_PROMOTED_VAR_P (temp) = 1;
4585 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4586 SUBREG_PROMOTED_UNSIGNED_P (target));
4587 }
4588 else
4589 temp = convert_modes (GET_MODE (target),
4590 GET_MODE (SUBREG_REG (target)),
4591 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4592 }
4593
4594 return want_value & 1 ? temp : NULL_RTX;
4595 }
4596 else
4597 {
4598 temp = expand_expr (exp, target, GET_MODE (target),
4599 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4600 /* Return TARGET if it's a specified hardware register.
4601 If TARGET is a volatile mem ref, either return TARGET
4602 or return a reg copied *from* TARGET; ANSI requires this.
4603
4604 Otherwise, if TEMP is not TARGET, return TEMP
4605 if it is constant (for efficiency),
4606 or if we really want the correct value. */
4607 if (!(target && GET_CODE (target) == REG
4608 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4609 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4610 && ! rtx_equal_p (temp, target)
4611 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4612 dont_return_target = 1;
4613 }
4614
4615 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4616 the same as that of TARGET, adjust the constant. This is needed, for
4617 example, in case it is a CONST_DOUBLE and we want only a word-sized
4618 value. */
4619 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4620 && TREE_CODE (exp) != ERROR_MARK
4621 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4622 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4623 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4624
4625 /* If value was not generated in the target, store it there.
4626 Convert the value to TARGET's type first if necessary.
4627 If TEMP and TARGET compare equal according to rtx_equal_p, but
4628 one or both of them are volatile memory refs, we have to distinguish
4629 two cases:
4630 - expand_expr has used TARGET. In this case, we must not generate
4631 another copy. This can be detected by TARGET being equal according
4632 to == .
4633 - expand_expr has not used TARGET - that means that the source just
4634 happens to have the same RTX form. Since temp will have been created
4635 by expand_expr, it will compare unequal according to == .
4636 We must generate a copy in this case, to reach the correct number
4637 of volatile memory references. */
4638
4639 if ((! rtx_equal_p (temp, target)
4640 || (temp != target && (side_effects_p (temp)
4641 || side_effects_p (target))))
4642 && TREE_CODE (exp) != ERROR_MARK
4643 && ! dont_store_target
4644 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4645 but TARGET is not valid memory reference, TEMP will differ
4646 from TARGET although it is really the same location. */
4647 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4648 || target != DECL_RTL_IF_SET (exp))
4649 /* If there's nothing to copy, don't bother. Don't call expr_size
4650 unless necessary, because some front-ends (C++) expr_size-hook
4651 aborts on objects that are not supposed to be bit-copied or
4652 bit-initialized. */
4653 && expr_size (exp) != const0_rtx)
4654 {
4655 target = protect_from_queue (target, 1);
4656 if (GET_MODE (temp) != GET_MODE (target)
4657 && GET_MODE (temp) != VOIDmode)
4658 {
4659 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4660 if (dont_return_target)
4661 {
4662 /* In this case, we will return TEMP,
4663 so make sure it has the proper mode.
4664 But don't forget to store the value into TARGET. */
4665 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4666 emit_move_insn (target, temp);
4667 }
4668 else
4669 convert_move (target, temp, unsignedp);
4670 }
4671
4672 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4673 {
4674 /* Handle copying a string constant into an array. The string
4675 constant may be shorter than the array. So copy just the string's
4676 actual length, and clear the rest. First get the size of the data
4677 type of the string, which is actually the size of the target. */
4678 rtx size = expr_size (exp);
4679
4680 if (GET_CODE (size) == CONST_INT
4681 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4682 emit_block_move (target, temp, size,
4683 (want_value & 2
4684 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4685 else
4686 {
4687 /* Compute the size of the data to copy from the string. */
4688 tree copy_size
4689 = size_binop (MIN_EXPR,
4690 make_tree (sizetype, size),
4691 size_int (TREE_STRING_LENGTH (exp)));
4692 rtx copy_size_rtx
4693 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4694 (want_value & 2
4695 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4696 rtx label = 0;
4697
4698 /* Copy that much. */
4699 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4700 TREE_UNSIGNED (sizetype));
4701 emit_block_move (target, temp, copy_size_rtx,
4702 (want_value & 2
4703 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4704
4705 /* Figure out how much is left in TARGET that we have to clear.
4706 Do all calculations in ptr_mode. */
4707 if (GET_CODE (copy_size_rtx) == CONST_INT)
4708 {
4709 size = plus_constant (size, -INTVAL (copy_size_rtx));
4710 target = adjust_address (target, BLKmode,
4711 INTVAL (copy_size_rtx));
4712 }
4713 else
4714 {
4715 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4716 copy_size_rtx, NULL_RTX, 0,
4717 OPTAB_LIB_WIDEN);
4718
4719 #ifdef POINTERS_EXTEND_UNSIGNED
4720 if (GET_MODE (copy_size_rtx) != Pmode)
4721 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4722 TREE_UNSIGNED (sizetype));
4723 #endif
4724
4725 target = offset_address (target, copy_size_rtx,
4726 highest_pow2_factor (copy_size));
4727 label = gen_label_rtx ();
4728 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4729 GET_MODE (size), 0, label);
4730 }
4731
4732 if (size != const0_rtx)
4733 clear_storage (target, size);
4734
4735 if (label)
4736 emit_label (label);
4737 }
4738 }
4739 /* Handle calls that return values in multiple non-contiguous locations.
4740 The Irix 6 ABI has examples of this. */
4741 else if (GET_CODE (target) == PARALLEL)
4742 emit_group_load (target, temp, TREE_TYPE (exp),
4743 int_size_in_bytes (TREE_TYPE (exp)));
4744 else if (GET_MODE (temp) == BLKmode)
4745 emit_block_move (target, temp, expr_size (exp),
4746 (want_value & 2
4747 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4748 else
4749 emit_move_insn (target, temp);
4750 }
4751
4752 /* If we don't want a value, return NULL_RTX. */
4753 if ((want_value & 1) == 0)
4754 return NULL_RTX;
4755
4756 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4757 ??? The latter test doesn't seem to make sense. */
4758 else if (dont_return_target && GET_CODE (temp) != MEM)
4759 return temp;
4760
4761 /* Return TARGET itself if it is a hard register. */
4762 else if ((want_value & 1) != 0
4763 && GET_MODE (target) != BLKmode
4764 && ! (GET_CODE (target) == REG
4765 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4766 return copy_to_reg (target);
4767
4768 else
4769 return target;
4770 }
4771 \f
4772 /* Return 1 if EXP just contains zeros. */
4773
4774 static int
4775 is_zeros_p (tree exp)
4776 {
4777 tree elt;
4778
4779 switch (TREE_CODE (exp))
4780 {
4781 case CONVERT_EXPR:
4782 case NOP_EXPR:
4783 case NON_LVALUE_EXPR:
4784 case VIEW_CONVERT_EXPR:
4785 return is_zeros_p (TREE_OPERAND (exp, 0));
4786
4787 case INTEGER_CST:
4788 return integer_zerop (exp);
4789
4790 case COMPLEX_CST:
4791 return
4792 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4793
4794 case REAL_CST:
4795 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4796
4797 case VECTOR_CST:
4798 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4799 elt = TREE_CHAIN (elt))
4800 if (!is_zeros_p (TREE_VALUE (elt)))
4801 return 0;
4802
4803 return 1;
4804
4805 case CONSTRUCTOR:
4806 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4807 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4808 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4809 if (! is_zeros_p (TREE_VALUE (elt)))
4810 return 0;
4811
4812 return 1;
4813
4814 default:
4815 return 0;
4816 }
4817 }
4818
4819 /* Return 1 if EXP contains mostly (3/4) zeros. */
4820
4821 static int
4822 mostly_zeros_p (tree exp)
4823 {
4824 if (TREE_CODE (exp) == CONSTRUCTOR)
4825 {
4826 int elts = 0, zeros = 0;
4827 tree elt = CONSTRUCTOR_ELTS (exp);
4828 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4829 {
4830 /* If there are no ranges of true bits, it is all zero. */
4831 return elt == NULL_TREE;
4832 }
4833 for (; elt; elt = TREE_CHAIN (elt))
4834 {
4835 /* We do not handle the case where the index is a RANGE_EXPR,
4836 so the statistic will be somewhat inaccurate.
4837 We do make a more accurate count in store_constructor itself,
4838 so since this function is only used for nested array elements,
4839 this should be close enough. */
4840 if (mostly_zeros_p (TREE_VALUE (elt)))
4841 zeros++;
4842 elts++;
4843 }
4844
4845 return 4 * zeros >= 3 * elts;
4846 }
4847
4848 return is_zeros_p (exp);
4849 }
4850 \f
4851 /* Helper function for store_constructor.
4852 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4853 TYPE is the type of the CONSTRUCTOR, not the element type.
4854 CLEARED is as for store_constructor.
4855 ALIAS_SET is the alias set to use for any stores.
4856
4857 This provides a recursive shortcut back to store_constructor when it isn't
4858 necessary to go through store_field. This is so that we can pass through
4859 the cleared field to let store_constructor know that we may not have to
4860 clear a substructure if the outer structure has already been cleared. */
4861
4862 static void
4863 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4864 HOST_WIDE_INT bitpos, enum machine_mode mode,
4865 tree exp, tree type, int cleared, int alias_set)
4866 {
4867 if (TREE_CODE (exp) == CONSTRUCTOR
4868 && bitpos % BITS_PER_UNIT == 0
4869 /* If we have a nonzero bitpos for a register target, then we just
4870 let store_field do the bitfield handling. This is unlikely to
4871 generate unnecessary clear instructions anyways. */
4872 && (bitpos == 0 || GET_CODE (target) == MEM))
4873 {
4874 if (GET_CODE (target) == MEM)
4875 target
4876 = adjust_address (target,
4877 GET_MODE (target) == BLKmode
4878 || 0 != (bitpos
4879 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4880 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4881
4882
4883 /* Update the alias set, if required. */
4884 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4885 && MEM_ALIAS_SET (target) != 0)
4886 {
4887 target = copy_rtx (target);
4888 set_mem_alias_set (target, alias_set);
4889 }
4890
4891 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4892 }
4893 else
4894 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4895 alias_set);
4896 }
4897
4898 /* Store the value of constructor EXP into the rtx TARGET.
4899 TARGET is either a REG or a MEM; we know it cannot conflict, since
4900 safe_from_p has been called.
4901 CLEARED is true if TARGET is known to have been zero'd.
4902 SIZE is the number of bytes of TARGET we are allowed to modify: this
4903 may not be the same as the size of EXP if we are assigning to a field
4904 which has been packed to exclude padding bits. */
4905
4906 static void
4907 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4908 {
4909 tree type = TREE_TYPE (exp);
4910 #ifdef WORD_REGISTER_OPERATIONS
4911 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4912 #endif
4913
4914 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4915 || TREE_CODE (type) == QUAL_UNION_TYPE)
4916 {
4917 tree elt;
4918
4919 /* If size is zero or the target is already cleared, do nothing. */
4920 if (size == 0 || cleared)
4921 cleared = 1;
4922 /* We either clear the aggregate or indicate the value is dead. */
4923 else if ((TREE_CODE (type) == UNION_TYPE
4924 || TREE_CODE (type) == QUAL_UNION_TYPE)
4925 && ! CONSTRUCTOR_ELTS (exp))
4926 /* If the constructor is empty, clear the union. */
4927 {
4928 clear_storage (target, expr_size (exp));
4929 cleared = 1;
4930 }
4931
4932 /* If we are building a static constructor into a register,
4933 set the initial value as zero so we can fold the value into
4934 a constant. But if more than one register is involved,
4935 this probably loses. */
4936 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4937 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4938 {
4939 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4940 cleared = 1;
4941 }
4942
4943 /* If the constructor has fewer fields than the structure
4944 or if we are initializing the structure to mostly zeros,
4945 clear the whole structure first. Don't do this if TARGET is a
4946 register whose mode size isn't equal to SIZE since clear_storage
4947 can't handle this case. */
4948 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4949 || mostly_zeros_p (exp))
4950 && (GET_CODE (target) != REG
4951 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4952 == size)))
4953 {
4954 rtx xtarget = target;
4955
4956 if (readonly_fields_p (type))
4957 {
4958 xtarget = copy_rtx (xtarget);
4959 RTX_UNCHANGING_P (xtarget) = 1;
4960 }
4961
4962 clear_storage (xtarget, GEN_INT (size));
4963 cleared = 1;
4964 }
4965
4966 if (! cleared)
4967 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4968
4969 /* Store each element of the constructor into
4970 the corresponding field of TARGET. */
4971
4972 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4973 {
4974 tree field = TREE_PURPOSE (elt);
4975 tree value = TREE_VALUE (elt);
4976 enum machine_mode mode;
4977 HOST_WIDE_INT bitsize;
4978 HOST_WIDE_INT bitpos = 0;
4979 tree offset;
4980 rtx to_rtx = target;
4981
4982 /* Just ignore missing fields.
4983 We cleared the whole structure, above,
4984 if any fields are missing. */
4985 if (field == 0)
4986 continue;
4987
4988 if (cleared && is_zeros_p (value))
4989 continue;
4990
4991 if (host_integerp (DECL_SIZE (field), 1))
4992 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4993 else
4994 bitsize = -1;
4995
4996 mode = DECL_MODE (field);
4997 if (DECL_BIT_FIELD (field))
4998 mode = VOIDmode;
4999
5000 offset = DECL_FIELD_OFFSET (field);
5001 if (host_integerp (offset, 0)
5002 && host_integerp (bit_position (field), 0))
5003 {
5004 bitpos = int_bit_position (field);
5005 offset = 0;
5006 }
5007 else
5008 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5009
5010 if (offset)
5011 {
5012 rtx offset_rtx;
5013
5014 if (CONTAINS_PLACEHOLDER_P (offset))
5015 offset = build (WITH_RECORD_EXPR, sizetype,
5016 offset, make_tree (TREE_TYPE (exp), target));
5017
5018 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5019 if (GET_CODE (to_rtx) != MEM)
5020 abort ();
5021
5022 #ifdef POINTERS_EXTEND_UNSIGNED
5023 if (GET_MODE (offset_rtx) != Pmode)
5024 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5025 #else
5026 if (GET_MODE (offset_rtx) != ptr_mode)
5027 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5028 #endif
5029
5030 to_rtx = offset_address (to_rtx, offset_rtx,
5031 highest_pow2_factor (offset));
5032 }
5033
5034 if (TREE_READONLY (field))
5035 {
5036 if (GET_CODE (to_rtx) == MEM)
5037 to_rtx = copy_rtx (to_rtx);
5038
5039 RTX_UNCHANGING_P (to_rtx) = 1;
5040 }
5041
5042 #ifdef WORD_REGISTER_OPERATIONS
5043 /* If this initializes a field that is smaller than a word, at the
5044 start of a word, try to widen it to a full word.
5045 This special case allows us to output C++ member function
5046 initializations in a form that the optimizers can understand. */
5047 if (GET_CODE (target) == REG
5048 && bitsize < BITS_PER_WORD
5049 && bitpos % BITS_PER_WORD == 0
5050 && GET_MODE_CLASS (mode) == MODE_INT
5051 && TREE_CODE (value) == INTEGER_CST
5052 && exp_size >= 0
5053 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5054 {
5055 tree type = TREE_TYPE (value);
5056
5057 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5058 {
5059 type = (*lang_hooks.types.type_for_size)
5060 (BITS_PER_WORD, TREE_UNSIGNED (type));
5061 value = convert (type, value);
5062 }
5063
5064 if (BYTES_BIG_ENDIAN)
5065 value
5066 = fold (build (LSHIFT_EXPR, type, value,
5067 build_int_2 (BITS_PER_WORD - bitsize, 0)));
5068 bitsize = BITS_PER_WORD;
5069 mode = word_mode;
5070 }
5071 #endif
5072
5073 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5074 && DECL_NONADDRESSABLE_P (field))
5075 {
5076 to_rtx = copy_rtx (to_rtx);
5077 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5078 }
5079
5080 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5081 value, type, cleared,
5082 get_alias_set (TREE_TYPE (field)));
5083 }
5084 }
5085 else if (TREE_CODE (type) == ARRAY_TYPE
5086 || TREE_CODE (type) == VECTOR_TYPE)
5087 {
5088 tree elt;
5089 int i;
5090 int need_to_clear;
5091 tree domain = TYPE_DOMAIN (type);
5092 tree elttype = TREE_TYPE (type);
5093 int const_bounds_p;
5094 HOST_WIDE_INT minelt = 0;
5095 HOST_WIDE_INT maxelt = 0;
5096
5097 /* Vectors are like arrays, but the domain is stored via an array
5098 type indirectly. */
5099 if (TREE_CODE (type) == VECTOR_TYPE)
5100 {
5101 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
5102 the same field as TYPE_DOMAIN, we are not guaranteed that
5103 it always will. */
5104 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
5105 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
5106 }
5107
5108 const_bounds_p = (TYPE_MIN_VALUE (domain)
5109 && TYPE_MAX_VALUE (domain)
5110 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5111 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5112
5113 /* If we have constant bounds for the range of the type, get them. */
5114 if (const_bounds_p)
5115 {
5116 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5117 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5118 }
5119
5120 /* If the constructor has fewer elements than the array,
5121 clear the whole array first. Similarly if this is
5122 static constructor of a non-BLKmode object. */
5123 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
5124 need_to_clear = 1;
5125 else
5126 {
5127 HOST_WIDE_INT count = 0, zero_count = 0;
5128 need_to_clear = ! const_bounds_p;
5129
5130 /* This loop is a more accurate version of the loop in
5131 mostly_zeros_p (it handles RANGE_EXPR in an index).
5132 It is also needed to check for missing elements. */
5133 for (elt = CONSTRUCTOR_ELTS (exp);
5134 elt != NULL_TREE && ! need_to_clear;
5135 elt = TREE_CHAIN (elt))
5136 {
5137 tree index = TREE_PURPOSE (elt);
5138 HOST_WIDE_INT this_node_count;
5139
5140 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5141 {
5142 tree lo_index = TREE_OPERAND (index, 0);
5143 tree hi_index = TREE_OPERAND (index, 1);
5144
5145 if (! host_integerp (lo_index, 1)
5146 || ! host_integerp (hi_index, 1))
5147 {
5148 need_to_clear = 1;
5149 break;
5150 }
5151
5152 this_node_count = (tree_low_cst (hi_index, 1)
5153 - tree_low_cst (lo_index, 1) + 1);
5154 }
5155 else
5156 this_node_count = 1;
5157
5158 count += this_node_count;
5159 if (mostly_zeros_p (TREE_VALUE (elt)))
5160 zero_count += this_node_count;
5161 }
5162
5163 /* Clear the entire array first if there are any missing elements,
5164 or if the incidence of zero elements is >= 75%. */
5165 if (! need_to_clear
5166 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
5167 need_to_clear = 1;
5168 }
5169
5170 if (need_to_clear && size > 0)
5171 {
5172 if (! cleared)
5173 {
5174 if (REG_P (target))
5175 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5176 else
5177 clear_storage (target, GEN_INT (size));
5178 }
5179 cleared = 1;
5180 }
5181 else if (REG_P (target))
5182 /* Inform later passes that the old value is dead. */
5183 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5184
5185 /* Store each element of the constructor into
5186 the corresponding element of TARGET, determined
5187 by counting the elements. */
5188 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5189 elt;
5190 elt = TREE_CHAIN (elt), i++)
5191 {
5192 enum machine_mode mode;
5193 HOST_WIDE_INT bitsize;
5194 HOST_WIDE_INT bitpos;
5195 int unsignedp;
5196 tree value = TREE_VALUE (elt);
5197 tree index = TREE_PURPOSE (elt);
5198 rtx xtarget = target;
5199
5200 if (cleared && is_zeros_p (value))
5201 continue;
5202
5203 unsignedp = TREE_UNSIGNED (elttype);
5204 mode = TYPE_MODE (elttype);
5205 if (mode == BLKmode)
5206 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5207 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5208 : -1);
5209 else
5210 bitsize = GET_MODE_BITSIZE (mode);
5211
5212 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5213 {
5214 tree lo_index = TREE_OPERAND (index, 0);
5215 tree hi_index = TREE_OPERAND (index, 1);
5216 rtx index_r, pos_rtx, loop_end;
5217 struct nesting *loop;
5218 HOST_WIDE_INT lo, hi, count;
5219 tree position;
5220
5221 /* If the range is constant and "small", unroll the loop. */
5222 if (const_bounds_p
5223 && host_integerp (lo_index, 0)
5224 && host_integerp (hi_index, 0)
5225 && (lo = tree_low_cst (lo_index, 0),
5226 hi = tree_low_cst (hi_index, 0),
5227 count = hi - lo + 1,
5228 (GET_CODE (target) != MEM
5229 || count <= 2
5230 || (host_integerp (TYPE_SIZE (elttype), 1)
5231 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5232 <= 40 * 8)))))
5233 {
5234 lo -= minelt; hi -= minelt;
5235 for (; lo <= hi; lo++)
5236 {
5237 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5238
5239 if (GET_CODE (target) == MEM
5240 && !MEM_KEEP_ALIAS_SET_P (target)
5241 && TREE_CODE (type) == ARRAY_TYPE
5242 && TYPE_NONALIASED_COMPONENT (type))
5243 {
5244 target = copy_rtx (target);
5245 MEM_KEEP_ALIAS_SET_P (target) = 1;
5246 }
5247
5248 store_constructor_field
5249 (target, bitsize, bitpos, mode, value, type, cleared,
5250 get_alias_set (elttype));
5251 }
5252 }
5253 else
5254 {
5255 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5256 loop_end = gen_label_rtx ();
5257
5258 unsignedp = TREE_UNSIGNED (domain);
5259
5260 index = build_decl (VAR_DECL, NULL_TREE, domain);
5261
5262 index_r
5263 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5264 &unsignedp, 0));
5265 SET_DECL_RTL (index, index_r);
5266 if (TREE_CODE (value) == SAVE_EXPR
5267 && SAVE_EXPR_RTL (value) == 0)
5268 {
5269 /* Make sure value gets expanded once before the
5270 loop. */
5271 expand_expr (value, const0_rtx, VOIDmode, 0);
5272 emit_queue ();
5273 }
5274 store_expr (lo_index, index_r, 0);
5275 loop = expand_start_loop (0);
5276
5277 /* Assign value to element index. */
5278 position
5279 = convert (ssizetype,
5280 fold (build (MINUS_EXPR, TREE_TYPE (index),
5281 index, TYPE_MIN_VALUE (domain))));
5282 position = size_binop (MULT_EXPR, position,
5283 convert (ssizetype,
5284 TYPE_SIZE_UNIT (elttype)));
5285
5286 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5287 xtarget = offset_address (target, pos_rtx,
5288 highest_pow2_factor (position));
5289 xtarget = adjust_address (xtarget, mode, 0);
5290 if (TREE_CODE (value) == CONSTRUCTOR)
5291 store_constructor (value, xtarget, cleared,
5292 bitsize / BITS_PER_UNIT);
5293 else
5294 store_expr (value, xtarget, 0);
5295
5296 expand_exit_loop_if_false (loop,
5297 build (LT_EXPR, integer_type_node,
5298 index, hi_index));
5299
5300 expand_increment (build (PREINCREMENT_EXPR,
5301 TREE_TYPE (index),
5302 index, integer_one_node), 0, 0);
5303 expand_end_loop ();
5304 emit_label (loop_end);
5305 }
5306 }
5307 else if ((index != 0 && ! host_integerp (index, 0))
5308 || ! host_integerp (TYPE_SIZE (elttype), 1))
5309 {
5310 tree position;
5311
5312 if (index == 0)
5313 index = ssize_int (1);
5314
5315 if (minelt)
5316 index = convert (ssizetype,
5317 fold (build (MINUS_EXPR, index,
5318 TYPE_MIN_VALUE (domain))));
5319
5320 position = size_binop (MULT_EXPR, index,
5321 convert (ssizetype,
5322 TYPE_SIZE_UNIT (elttype)));
5323 xtarget = offset_address (target,
5324 expand_expr (position, 0, VOIDmode, 0),
5325 highest_pow2_factor (position));
5326 xtarget = adjust_address (xtarget, mode, 0);
5327 store_expr (value, xtarget, 0);
5328 }
5329 else
5330 {
5331 if (index != 0)
5332 bitpos = ((tree_low_cst (index, 0) - minelt)
5333 * tree_low_cst (TYPE_SIZE (elttype), 1));
5334 else
5335 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5336
5337 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5338 && TREE_CODE (type) == ARRAY_TYPE
5339 && TYPE_NONALIASED_COMPONENT (type))
5340 {
5341 target = copy_rtx (target);
5342 MEM_KEEP_ALIAS_SET_P (target) = 1;
5343 }
5344
5345 store_constructor_field (target, bitsize, bitpos, mode, value,
5346 type, cleared, get_alias_set (elttype));
5347
5348 }
5349 }
5350 }
5351
5352 /* Set constructor assignments. */
5353 else if (TREE_CODE (type) == SET_TYPE)
5354 {
5355 tree elt = CONSTRUCTOR_ELTS (exp);
5356 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5357 tree domain = TYPE_DOMAIN (type);
5358 tree domain_min, domain_max, bitlength;
5359
5360 /* The default implementation strategy is to extract the constant
5361 parts of the constructor, use that to initialize the target,
5362 and then "or" in whatever non-constant ranges we need in addition.
5363
5364 If a large set is all zero or all ones, it is
5365 probably better to set it using memset (if available) or bzero.
5366 Also, if a large set has just a single range, it may also be
5367 better to first clear all the first clear the set (using
5368 bzero/memset), and set the bits we want. */
5369
5370 /* Check for all zeros. */
5371 if (elt == NULL_TREE && size > 0)
5372 {
5373 if (!cleared)
5374 clear_storage (target, GEN_INT (size));
5375 return;
5376 }
5377
5378 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5379 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5380 bitlength = size_binop (PLUS_EXPR,
5381 size_diffop (domain_max, domain_min),
5382 ssize_int (1));
5383
5384 nbits = tree_low_cst (bitlength, 1);
5385
5386 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5387 are "complicated" (more than one range), initialize (the
5388 constant parts) by copying from a constant. */
5389 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5390 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5391 {
5392 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5393 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5394 char *bit_buffer = (char *) alloca (nbits);
5395 HOST_WIDE_INT word = 0;
5396 unsigned int bit_pos = 0;
5397 unsigned int ibit = 0;
5398 unsigned int offset = 0; /* In bytes from beginning of set. */
5399
5400 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5401 for (;;)
5402 {
5403 if (bit_buffer[ibit])
5404 {
5405 if (BYTES_BIG_ENDIAN)
5406 word |= (1 << (set_word_size - 1 - bit_pos));
5407 else
5408 word |= 1 << bit_pos;
5409 }
5410
5411 bit_pos++; ibit++;
5412 if (bit_pos >= set_word_size || ibit == nbits)
5413 {
5414 if (word != 0 || ! cleared)
5415 {
5416 rtx datum = GEN_INT (word);
5417 rtx to_rtx;
5418
5419 /* The assumption here is that it is safe to use
5420 XEXP if the set is multi-word, but not if
5421 it's single-word. */
5422 if (GET_CODE (target) == MEM)
5423 to_rtx = adjust_address (target, mode, offset);
5424 else if (offset == 0)
5425 to_rtx = target;
5426 else
5427 abort ();
5428 emit_move_insn (to_rtx, datum);
5429 }
5430
5431 if (ibit == nbits)
5432 break;
5433 word = 0;
5434 bit_pos = 0;
5435 offset += set_word_size / BITS_PER_UNIT;
5436 }
5437 }
5438 }
5439 else if (!cleared)
5440 /* Don't bother clearing storage if the set is all ones. */
5441 if (TREE_CHAIN (elt) != NULL_TREE
5442 || (TREE_PURPOSE (elt) == NULL_TREE
5443 ? nbits != 1
5444 : ( ! host_integerp (TREE_VALUE (elt), 0)
5445 || ! host_integerp (TREE_PURPOSE (elt), 0)
5446 || (tree_low_cst (TREE_VALUE (elt), 0)
5447 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5448 != (HOST_WIDE_INT) nbits))))
5449 clear_storage (target, expr_size (exp));
5450
5451 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5452 {
5453 /* Start of range of element or NULL. */
5454 tree startbit = TREE_PURPOSE (elt);
5455 /* End of range of element, or element value. */
5456 tree endbit = TREE_VALUE (elt);
5457 HOST_WIDE_INT startb, endb;
5458 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5459
5460 bitlength_rtx = expand_expr (bitlength,
5461 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5462
5463 /* Handle non-range tuple element like [ expr ]. */
5464 if (startbit == NULL_TREE)
5465 {
5466 startbit = save_expr (endbit);
5467 endbit = startbit;
5468 }
5469
5470 startbit = convert (sizetype, startbit);
5471 endbit = convert (sizetype, endbit);
5472 if (! integer_zerop (domain_min))
5473 {
5474 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5475 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5476 }
5477 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5478 EXPAND_CONST_ADDRESS);
5479 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5480 EXPAND_CONST_ADDRESS);
5481
5482 if (REG_P (target))
5483 {
5484 targetx
5485 = assign_temp
5486 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5487 (GET_MODE (target), 0),
5488 TYPE_QUAL_CONST)),
5489 0, 1, 1);
5490 emit_move_insn (targetx, target);
5491 }
5492
5493 else if (GET_CODE (target) == MEM)
5494 targetx = target;
5495 else
5496 abort ();
5497
5498 /* Optimization: If startbit and endbit are constants divisible
5499 by BITS_PER_UNIT, call memset instead. */
5500 if (TARGET_MEM_FUNCTIONS
5501 && TREE_CODE (startbit) == INTEGER_CST
5502 && TREE_CODE (endbit) == INTEGER_CST
5503 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5504 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5505 {
5506 emit_library_call (memset_libfunc, LCT_NORMAL,
5507 VOIDmode, 3,
5508 plus_constant (XEXP (targetx, 0),
5509 startb / BITS_PER_UNIT),
5510 Pmode,
5511 constm1_rtx, TYPE_MODE (integer_type_node),
5512 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5513 TYPE_MODE (sizetype));
5514 }
5515 else
5516 emit_library_call (setbits_libfunc, LCT_NORMAL,
5517 VOIDmode, 4, XEXP (targetx, 0),
5518 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5519 startbit_rtx, TYPE_MODE (sizetype),
5520 endbit_rtx, TYPE_MODE (sizetype));
5521
5522 if (REG_P (target))
5523 emit_move_insn (target, targetx);
5524 }
5525 }
5526
5527 else
5528 abort ();
5529 }
5530
5531 /* Store the value of EXP (an expression tree)
5532 into a subfield of TARGET which has mode MODE and occupies
5533 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5534 If MODE is VOIDmode, it means that we are storing into a bit-field.
5535
5536 If VALUE_MODE is VOIDmode, return nothing in particular.
5537 UNSIGNEDP is not used in this case.
5538
5539 Otherwise, return an rtx for the value stored. This rtx
5540 has mode VALUE_MODE if that is convenient to do.
5541 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5542
5543 TYPE is the type of the underlying object,
5544
5545 ALIAS_SET is the alias set for the destination. This value will
5546 (in general) be different from that for TARGET, since TARGET is a
5547 reference to the containing structure. */
5548
5549 static rtx
5550 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5551 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5552 int unsignedp, tree type, int alias_set)
5553 {
5554 HOST_WIDE_INT width_mask = 0;
5555
5556 if (TREE_CODE (exp) == ERROR_MARK)
5557 return const0_rtx;
5558
5559 /* If we have nothing to store, do nothing unless the expression has
5560 side-effects. */
5561 if (bitsize == 0)
5562 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5563 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5564 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5565
5566 /* If we are storing into an unaligned field of an aligned union that is
5567 in a register, we may have the mode of TARGET being an integer mode but
5568 MODE == BLKmode. In that case, get an aligned object whose size and
5569 alignment are the same as TARGET and store TARGET into it (we can avoid
5570 the store if the field being stored is the entire width of TARGET). Then
5571 call ourselves recursively to store the field into a BLKmode version of
5572 that object. Finally, load from the object into TARGET. This is not
5573 very efficient in general, but should only be slightly more expensive
5574 than the otherwise-required unaligned accesses. Perhaps this can be
5575 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5576 twice, once with emit_move_insn and once via store_field. */
5577
5578 if (mode == BLKmode
5579 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5580 {
5581 rtx object = assign_temp (type, 0, 1, 1);
5582 rtx blk_object = adjust_address (object, BLKmode, 0);
5583
5584 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5585 emit_move_insn (object, target);
5586
5587 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5588 alias_set);
5589
5590 emit_move_insn (target, object);
5591
5592 /* We want to return the BLKmode version of the data. */
5593 return blk_object;
5594 }
5595
5596 if (GET_CODE (target) == CONCAT)
5597 {
5598 /* We're storing into a struct containing a single __complex. */
5599
5600 if (bitpos != 0)
5601 abort ();
5602 return store_expr (exp, target, 0);
5603 }
5604
5605 /* If the structure is in a register or if the component
5606 is a bit field, we cannot use addressing to access it.
5607 Use bit-field techniques or SUBREG to store in it. */
5608
5609 if (mode == VOIDmode
5610 || (mode != BLKmode && ! direct_store[(int) mode]
5611 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5612 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5613 || GET_CODE (target) == REG
5614 || GET_CODE (target) == SUBREG
5615 /* If the field isn't aligned enough to store as an ordinary memref,
5616 store it as a bit field. */
5617 || (mode != BLKmode
5618 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5619 || bitpos % GET_MODE_ALIGNMENT (mode))
5620 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5621 || (bitpos % BITS_PER_UNIT != 0)))
5622 /* If the RHS and field are a constant size and the size of the
5623 RHS isn't the same size as the bitfield, we must use bitfield
5624 operations. */
5625 || (bitsize >= 0
5626 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5627 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5628 {
5629 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5630
5631 /* If BITSIZE is narrower than the size of the type of EXP
5632 we will be narrowing TEMP. Normally, what's wanted are the
5633 low-order bits. However, if EXP's type is a record and this is
5634 big-endian machine, we want the upper BITSIZE bits. */
5635 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5636 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5637 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5638 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5639 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5640 - bitsize),
5641 NULL_RTX, 1);
5642
5643 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5644 MODE. */
5645 if (mode != VOIDmode && mode != BLKmode
5646 && mode != TYPE_MODE (TREE_TYPE (exp)))
5647 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5648
5649 /* If the modes of TARGET and TEMP are both BLKmode, both
5650 must be in memory and BITPOS must be aligned on a byte
5651 boundary. If so, we simply do a block copy. */
5652 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5653 {
5654 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5655 || bitpos % BITS_PER_UNIT != 0)
5656 abort ();
5657
5658 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5659 emit_block_move (target, temp,
5660 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5661 / BITS_PER_UNIT),
5662 BLOCK_OP_NORMAL);
5663
5664 return value_mode == VOIDmode ? const0_rtx : target;
5665 }
5666
5667 /* Store the value in the bitfield. */
5668 store_bit_field (target, bitsize, bitpos, mode, temp,
5669 int_size_in_bytes (type));
5670
5671 if (value_mode != VOIDmode)
5672 {
5673 /* The caller wants an rtx for the value.
5674 If possible, avoid refetching from the bitfield itself. */
5675 if (width_mask != 0
5676 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5677 {
5678 tree count;
5679 enum machine_mode tmode;
5680
5681 tmode = GET_MODE (temp);
5682 if (tmode == VOIDmode)
5683 tmode = value_mode;
5684
5685 if (unsignedp)
5686 return expand_and (tmode, temp,
5687 gen_int_mode (width_mask, tmode),
5688 NULL_RTX);
5689
5690 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5691 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5692 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5693 }
5694
5695 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5696 NULL_RTX, value_mode, VOIDmode,
5697 int_size_in_bytes (type));
5698 }
5699 return const0_rtx;
5700 }
5701 else
5702 {
5703 rtx addr = XEXP (target, 0);
5704 rtx to_rtx = target;
5705
5706 /* If a value is wanted, it must be the lhs;
5707 so make the address stable for multiple use. */
5708
5709 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5710 && ! CONSTANT_ADDRESS_P (addr)
5711 /* A frame-pointer reference is already stable. */
5712 && ! (GET_CODE (addr) == PLUS
5713 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5714 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5715 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5716 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5717
5718 /* Now build a reference to just the desired component. */
5719
5720 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5721
5722 if (to_rtx == target)
5723 to_rtx = copy_rtx (to_rtx);
5724
5725 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5726 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5727 set_mem_alias_set (to_rtx, alias_set);
5728
5729 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5730 }
5731 }
5732 \f
5733 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5734 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5735 codes and find the ultimate containing object, which we return.
5736
5737 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5738 bit position, and *PUNSIGNEDP to the signedness of the field.
5739 If the position of the field is variable, we store a tree
5740 giving the variable offset (in units) in *POFFSET.
5741 This offset is in addition to the bit position.
5742 If the position is not variable, we store 0 in *POFFSET.
5743
5744 If any of the extraction expressions is volatile,
5745 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5746
5747 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5748 is a mode that can be used to access the field. In that case, *PBITSIZE
5749 is redundant.
5750
5751 If the field describes a variable-sized object, *PMODE is set to
5752 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5753 this case, but the address of the object can be found. */
5754
5755 tree
5756 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5757 HOST_WIDE_INT *pbitpos, tree *poffset,
5758 enum machine_mode *pmode, int *punsignedp,
5759 int *pvolatilep)
5760 {
5761 tree size_tree = 0;
5762 enum machine_mode mode = VOIDmode;
5763 tree offset = size_zero_node;
5764 tree bit_offset = bitsize_zero_node;
5765 tree placeholder_ptr = 0;
5766 tree tem;
5767
5768 /* First get the mode, signedness, and size. We do this from just the
5769 outermost expression. */
5770 if (TREE_CODE (exp) == COMPONENT_REF)
5771 {
5772 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5773 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5774 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5775
5776 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5777 }
5778 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5779 {
5780 size_tree = TREE_OPERAND (exp, 1);
5781 *punsignedp = TREE_UNSIGNED (exp);
5782 }
5783 else
5784 {
5785 mode = TYPE_MODE (TREE_TYPE (exp));
5786 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5787
5788 if (mode == BLKmode)
5789 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5790 else
5791 *pbitsize = GET_MODE_BITSIZE (mode);
5792 }
5793
5794 if (size_tree != 0)
5795 {
5796 if (! host_integerp (size_tree, 1))
5797 mode = BLKmode, *pbitsize = -1;
5798 else
5799 *pbitsize = tree_low_cst (size_tree, 1);
5800 }
5801
5802 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5803 and find the ultimate containing object. */
5804 while (1)
5805 {
5806 if (TREE_CODE (exp) == BIT_FIELD_REF)
5807 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5808 else if (TREE_CODE (exp) == COMPONENT_REF)
5809 {
5810 tree field = TREE_OPERAND (exp, 1);
5811 tree this_offset = DECL_FIELD_OFFSET (field);
5812
5813 /* If this field hasn't been filled in yet, don't go
5814 past it. This should only happen when folding expressions
5815 made during type construction. */
5816 if (this_offset == 0)
5817 break;
5818 else if (CONTAINS_PLACEHOLDER_P (this_offset))
5819 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5820
5821 offset = size_binop (PLUS_EXPR, offset, this_offset);
5822 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5823 DECL_FIELD_BIT_OFFSET (field));
5824
5825 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5826 }
5827
5828 else if (TREE_CODE (exp) == ARRAY_REF
5829 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5830 {
5831 tree index = TREE_OPERAND (exp, 1);
5832 tree array = TREE_OPERAND (exp, 0);
5833 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5834 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5835 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5836
5837 /* We assume all arrays have sizes that are a multiple of a byte.
5838 First subtract the lower bound, if any, in the type of the
5839 index, then convert to sizetype and multiply by the size of the
5840 array element. */
5841 if (low_bound != 0 && ! integer_zerop (low_bound))
5842 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5843 index, low_bound));
5844
5845 /* If the index has a self-referential type, pass it to a
5846 WITH_RECORD_EXPR; if the component size is, pass our
5847 component to one. */
5848 if (CONTAINS_PLACEHOLDER_P (index))
5849 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5850 if (CONTAINS_PLACEHOLDER_P (unit_size))
5851 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5852
5853 offset = size_binop (PLUS_EXPR, offset,
5854 size_binop (MULT_EXPR,
5855 convert (sizetype, index),
5856 unit_size));
5857 }
5858
5859 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5860 {
5861 tree new = find_placeholder (exp, &placeholder_ptr);
5862
5863 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5864 We might have been called from tree optimization where we
5865 haven't set up an object yet. */
5866 if (new == 0)
5867 break;
5868 else
5869 exp = new;
5870
5871 continue;
5872 }
5873
5874 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5875 conversions that don't change the mode, and all view conversions
5876 except those that need to "step up" the alignment. */
5877 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5878 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5879 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5880 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5881 && STRICT_ALIGNMENT
5882 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5883 < BIGGEST_ALIGNMENT)
5884 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5885 || TYPE_ALIGN_OK (TREE_TYPE
5886 (TREE_OPERAND (exp, 0))))))
5887 && ! ((TREE_CODE (exp) == NOP_EXPR
5888 || TREE_CODE (exp) == CONVERT_EXPR)
5889 && (TYPE_MODE (TREE_TYPE (exp))
5890 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5891 break;
5892
5893 /* If any reference in the chain is volatile, the effect is volatile. */
5894 if (TREE_THIS_VOLATILE (exp))
5895 *pvolatilep = 1;
5896
5897 exp = TREE_OPERAND (exp, 0);
5898 }
5899
5900 /* If OFFSET is constant, see if we can return the whole thing as a
5901 constant bit position. Otherwise, split it up. */
5902 if (host_integerp (offset, 0)
5903 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5904 bitsize_unit_node))
5905 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5906 && host_integerp (tem, 0))
5907 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5908 else
5909 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5910
5911 *pmode = mode;
5912 return exp;
5913 }
5914
5915 /* Return 1 if T is an expression that get_inner_reference handles. */
5916
5917 int
5918 handled_component_p (tree t)
5919 {
5920 switch (TREE_CODE (t))
5921 {
5922 case BIT_FIELD_REF:
5923 case COMPONENT_REF:
5924 case ARRAY_REF:
5925 case ARRAY_RANGE_REF:
5926 case NON_LVALUE_EXPR:
5927 case VIEW_CONVERT_EXPR:
5928 return 1;
5929
5930 /* ??? Sure they are handled, but get_inner_reference may return
5931 a different PBITSIZE, depending upon whether the expression is
5932 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5933 case NOP_EXPR:
5934 case CONVERT_EXPR:
5935 return (TYPE_MODE (TREE_TYPE (t))
5936 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5937
5938 default:
5939 return 0;
5940 }
5941 }
5942 \f
5943 /* Given an rtx VALUE that may contain additions and multiplications, return
5944 an equivalent value that just refers to a register, memory, or constant.
5945 This is done by generating instructions to perform the arithmetic and
5946 returning a pseudo-register containing the value.
5947
5948 The returned value may be a REG, SUBREG, MEM or constant. */
5949
5950 rtx
5951 force_operand (rtx value, rtx target)
5952 {
5953 rtx op1, op2;
5954 /* Use subtarget as the target for operand 0 of a binary operation. */
5955 rtx subtarget = get_subtarget (target);
5956 enum rtx_code code = GET_CODE (value);
5957
5958 /* Check for a PIC address load. */
5959 if ((code == PLUS || code == MINUS)
5960 && XEXP (value, 0) == pic_offset_table_rtx
5961 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5962 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5963 || GET_CODE (XEXP (value, 1)) == CONST))
5964 {
5965 if (!subtarget)
5966 subtarget = gen_reg_rtx (GET_MODE (value));
5967 emit_move_insn (subtarget, value);
5968 return subtarget;
5969 }
5970
5971 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5972 {
5973 if (!target)
5974 target = gen_reg_rtx (GET_MODE (value));
5975 convert_move (target, force_operand (XEXP (value, 0), NULL),
5976 code == ZERO_EXTEND);
5977 return target;
5978 }
5979
5980 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5981 {
5982 op2 = XEXP (value, 1);
5983 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5984 subtarget = 0;
5985 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5986 {
5987 code = PLUS;
5988 op2 = negate_rtx (GET_MODE (value), op2);
5989 }
5990
5991 /* Check for an addition with OP2 a constant integer and our first
5992 operand a PLUS of a virtual register and something else. In that
5993 case, we want to emit the sum of the virtual register and the
5994 constant first and then add the other value. This allows virtual
5995 register instantiation to simply modify the constant rather than
5996 creating another one around this addition. */
5997 if (code == PLUS && GET_CODE (op2) == CONST_INT
5998 && GET_CODE (XEXP (value, 0)) == PLUS
5999 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
6000 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6001 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6002 {
6003 rtx temp = expand_simple_binop (GET_MODE (value), code,
6004 XEXP (XEXP (value, 0), 0), op2,
6005 subtarget, 0, OPTAB_LIB_WIDEN);
6006 return expand_simple_binop (GET_MODE (value), code, temp,
6007 force_operand (XEXP (XEXP (value,
6008 0), 1), 0),
6009 target, 0, OPTAB_LIB_WIDEN);
6010 }
6011
6012 op1 = force_operand (XEXP (value, 0), subtarget);
6013 op2 = force_operand (op2, NULL_RTX);
6014 switch (code)
6015 {
6016 case MULT:
6017 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6018 case DIV:
6019 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6020 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6021 target, 1, OPTAB_LIB_WIDEN);
6022 else
6023 return expand_divmod (0,
6024 FLOAT_MODE_P (GET_MODE (value))
6025 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6026 GET_MODE (value), op1, op2, target, 0);
6027 break;
6028 case MOD:
6029 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6030 target, 0);
6031 break;
6032 case UDIV:
6033 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6034 target, 1);
6035 break;
6036 case UMOD:
6037 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6038 target, 1);
6039 break;
6040 case ASHIFTRT:
6041 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6042 target, 0, OPTAB_LIB_WIDEN);
6043 break;
6044 default:
6045 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6046 target, 1, OPTAB_LIB_WIDEN);
6047 }
6048 }
6049 if (GET_RTX_CLASS (code) == '1')
6050 {
6051 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6052 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6053 }
6054
6055 #ifdef INSN_SCHEDULING
6056 /* On machines that have insn scheduling, we want all memory reference to be
6057 explicit, so we need to deal with such paradoxical SUBREGs. */
6058 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
6059 && (GET_MODE_SIZE (GET_MODE (value))
6060 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6061 value
6062 = simplify_gen_subreg (GET_MODE (value),
6063 force_reg (GET_MODE (SUBREG_REG (value)),
6064 force_operand (SUBREG_REG (value),
6065 NULL_RTX)),
6066 GET_MODE (SUBREG_REG (value)),
6067 SUBREG_BYTE (value));
6068 #endif
6069
6070 return value;
6071 }
6072 \f
6073 /* Subroutine of expand_expr: return nonzero iff there is no way that
6074 EXP can reference X, which is being modified. TOP_P is nonzero if this
6075 call is going to be used to determine whether we need a temporary
6076 for EXP, as opposed to a recursive call to this function.
6077
6078 It is always safe for this routine to return zero since it merely
6079 searches for optimization opportunities. */
6080
6081 int
6082 safe_from_p (rtx x, tree exp, int top_p)
6083 {
6084 rtx exp_rtl = 0;
6085 int i, nops;
6086 static tree save_expr_list;
6087
6088 if (x == 0
6089 /* If EXP has varying size, we MUST use a target since we currently
6090 have no way of allocating temporaries of variable size
6091 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6092 So we assume here that something at a higher level has prevented a
6093 clash. This is somewhat bogus, but the best we can do. Only
6094 do this when X is BLKmode and when we are at the top level. */
6095 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6096 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6097 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6098 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6099 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6100 != INTEGER_CST)
6101 && GET_MODE (x) == BLKmode)
6102 /* If X is in the outgoing argument area, it is always safe. */
6103 || (GET_CODE (x) == MEM
6104 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6105 || (GET_CODE (XEXP (x, 0)) == PLUS
6106 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6107 return 1;
6108
6109 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6110 find the underlying pseudo. */
6111 if (GET_CODE (x) == SUBREG)
6112 {
6113 x = SUBREG_REG (x);
6114 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6115 return 0;
6116 }
6117
6118 /* A SAVE_EXPR might appear many times in the expression passed to the
6119 top-level safe_from_p call, and if it has a complex subexpression,
6120 examining it multiple times could result in a combinatorial explosion.
6121 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6122 with optimization took about 28 minutes to compile -- even though it was
6123 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6124 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6125 we have processed. Note that the only test of top_p was above. */
6126
6127 if (top_p)
6128 {
6129 int rtn;
6130 tree t;
6131
6132 save_expr_list = 0;
6133
6134 rtn = safe_from_p (x, exp, 0);
6135
6136 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
6137 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
6138
6139 return rtn;
6140 }
6141
6142 /* Now look at our tree code and possibly recurse. */
6143 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6144 {
6145 case 'd':
6146 exp_rtl = DECL_RTL_IF_SET (exp);
6147 break;
6148
6149 case 'c':
6150 return 1;
6151
6152 case 'x':
6153 if (TREE_CODE (exp) == TREE_LIST)
6154 {
6155 while (1)
6156 {
6157 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6158 return 0;
6159 exp = TREE_CHAIN (exp);
6160 if (!exp)
6161 return 1;
6162 if (TREE_CODE (exp) != TREE_LIST)
6163 return safe_from_p (x, exp, 0);
6164 }
6165 }
6166 else if (TREE_CODE (exp) == ERROR_MARK)
6167 return 1; /* An already-visited SAVE_EXPR? */
6168 else
6169 return 0;
6170
6171 case '2':
6172 case '<':
6173 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6174 return 0;
6175 /* FALLTHRU */
6176
6177 case '1':
6178 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6179
6180 case 'e':
6181 case 'r':
6182 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6183 the expression. If it is set, we conflict iff we are that rtx or
6184 both are in memory. Otherwise, we check all operands of the
6185 expression recursively. */
6186
6187 switch (TREE_CODE (exp))
6188 {
6189 case ADDR_EXPR:
6190 /* If the operand is static or we are static, we can't conflict.
6191 Likewise if we don't conflict with the operand at all. */
6192 if (staticp (TREE_OPERAND (exp, 0))
6193 || TREE_STATIC (exp)
6194 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6195 return 1;
6196
6197 /* Otherwise, the only way this can conflict is if we are taking
6198 the address of a DECL a that address if part of X, which is
6199 very rare. */
6200 exp = TREE_OPERAND (exp, 0);
6201 if (DECL_P (exp))
6202 {
6203 if (!DECL_RTL_SET_P (exp)
6204 || GET_CODE (DECL_RTL (exp)) != MEM)
6205 return 0;
6206 else
6207 exp_rtl = XEXP (DECL_RTL (exp), 0);
6208 }
6209 break;
6210
6211 case INDIRECT_REF:
6212 if (GET_CODE (x) == MEM
6213 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6214 get_alias_set (exp)))
6215 return 0;
6216 break;
6217
6218 case CALL_EXPR:
6219 /* Assume that the call will clobber all hard registers and
6220 all of memory. */
6221 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6222 || GET_CODE (x) == MEM)
6223 return 0;
6224 break;
6225
6226 case RTL_EXPR:
6227 /* If a sequence exists, we would have to scan every instruction
6228 in the sequence to see if it was safe. This is probably not
6229 worthwhile. */
6230 if (RTL_EXPR_SEQUENCE (exp))
6231 return 0;
6232
6233 exp_rtl = RTL_EXPR_RTL (exp);
6234 break;
6235
6236 case WITH_CLEANUP_EXPR:
6237 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6238 break;
6239
6240 case CLEANUP_POINT_EXPR:
6241 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6242
6243 case SAVE_EXPR:
6244 exp_rtl = SAVE_EXPR_RTL (exp);
6245 if (exp_rtl)
6246 break;
6247
6248 /* If we've already scanned this, don't do it again. Otherwise,
6249 show we've scanned it and record for clearing the flag if we're
6250 going on. */
6251 if (TREE_PRIVATE (exp))
6252 return 1;
6253
6254 TREE_PRIVATE (exp) = 1;
6255 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6256 {
6257 TREE_PRIVATE (exp) = 0;
6258 return 0;
6259 }
6260
6261 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6262 return 1;
6263
6264 case BIND_EXPR:
6265 /* The only operand we look at is operand 1. The rest aren't
6266 part of the expression. */
6267 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6268
6269 case METHOD_CALL_EXPR:
6270 /* This takes an rtx argument, but shouldn't appear here. */
6271 abort ();
6272
6273 default:
6274 break;
6275 }
6276
6277 /* If we have an rtx, we do not need to scan our operands. */
6278 if (exp_rtl)
6279 break;
6280
6281 nops = first_rtl_op (TREE_CODE (exp));
6282 for (i = 0; i < nops; i++)
6283 if (TREE_OPERAND (exp, i) != 0
6284 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6285 return 0;
6286
6287 /* If this is a language-specific tree code, it may require
6288 special handling. */
6289 if ((unsigned int) TREE_CODE (exp)
6290 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6291 && !(*lang_hooks.safe_from_p) (x, exp))
6292 return 0;
6293 }
6294
6295 /* If we have an rtl, find any enclosed object. Then see if we conflict
6296 with it. */
6297 if (exp_rtl)
6298 {
6299 if (GET_CODE (exp_rtl) == SUBREG)
6300 {
6301 exp_rtl = SUBREG_REG (exp_rtl);
6302 if (GET_CODE (exp_rtl) == REG
6303 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6304 return 0;
6305 }
6306
6307 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6308 are memory and they conflict. */
6309 return ! (rtx_equal_p (x, exp_rtl)
6310 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6311 && true_dependence (exp_rtl, VOIDmode, x,
6312 rtx_addr_varies_p)));
6313 }
6314
6315 /* If we reach here, it is safe. */
6316 return 1;
6317 }
6318
6319 /* Subroutine of expand_expr: return rtx if EXP is a
6320 variable or parameter; else return 0. */
6321
6322 static rtx
6323 var_rtx (tree exp)
6324 {
6325 STRIP_NOPS (exp);
6326 switch (TREE_CODE (exp))
6327 {
6328 case PARM_DECL:
6329 case VAR_DECL:
6330 return DECL_RTL (exp);
6331 default:
6332 return 0;
6333 }
6334 }
6335
6336 #ifdef MAX_INTEGER_COMPUTATION_MODE
6337
6338 void
6339 check_max_integer_computation_mode (tree exp)
6340 {
6341 enum tree_code code;
6342 enum machine_mode mode;
6343
6344 /* Strip any NOPs that don't change the mode. */
6345 STRIP_NOPS (exp);
6346 code = TREE_CODE (exp);
6347
6348 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6349 if (code == NOP_EXPR
6350 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6351 return;
6352
6353 /* First check the type of the overall operation. We need only look at
6354 unary, binary and relational operations. */
6355 if (TREE_CODE_CLASS (code) == '1'
6356 || TREE_CODE_CLASS (code) == '2'
6357 || TREE_CODE_CLASS (code) == '<')
6358 {
6359 mode = TYPE_MODE (TREE_TYPE (exp));
6360 if (GET_MODE_CLASS (mode) == MODE_INT
6361 && mode > MAX_INTEGER_COMPUTATION_MODE)
6362 internal_error ("unsupported wide integer operation");
6363 }
6364
6365 /* Check operand of a unary op. */
6366 if (TREE_CODE_CLASS (code) == '1')
6367 {
6368 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6369 if (GET_MODE_CLASS (mode) == MODE_INT
6370 && mode > MAX_INTEGER_COMPUTATION_MODE)
6371 internal_error ("unsupported wide integer operation");
6372 }
6373
6374 /* Check operands of a binary/comparison op. */
6375 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6376 {
6377 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6378 if (GET_MODE_CLASS (mode) == MODE_INT
6379 && mode > MAX_INTEGER_COMPUTATION_MODE)
6380 internal_error ("unsupported wide integer operation");
6381
6382 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6383 if (GET_MODE_CLASS (mode) == MODE_INT
6384 && mode > MAX_INTEGER_COMPUTATION_MODE)
6385 internal_error ("unsupported wide integer operation");
6386 }
6387 }
6388 #endif
6389 \f
6390 /* Return the highest power of two that EXP is known to be a multiple of.
6391 This is used in updating alignment of MEMs in array references. */
6392
6393 static unsigned HOST_WIDE_INT
6394 highest_pow2_factor (tree exp)
6395 {
6396 unsigned HOST_WIDE_INT c0, c1;
6397
6398 switch (TREE_CODE (exp))
6399 {
6400 case INTEGER_CST:
6401 /* We can find the lowest bit that's a one. If the low
6402 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6403 We need to handle this case since we can find it in a COND_EXPR,
6404 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6405 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6406 later ICE. */
6407 if (TREE_CONSTANT_OVERFLOW (exp))
6408 return BIGGEST_ALIGNMENT;
6409 else
6410 {
6411 /* Note: tree_low_cst is intentionally not used here,
6412 we don't care about the upper bits. */
6413 c0 = TREE_INT_CST_LOW (exp);
6414 c0 &= -c0;
6415 return c0 ? c0 : BIGGEST_ALIGNMENT;
6416 }
6417 break;
6418
6419 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6420 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6421 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6422 return MIN (c0, c1);
6423
6424 case MULT_EXPR:
6425 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6426 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6427 return c0 * c1;
6428
6429 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6430 case CEIL_DIV_EXPR:
6431 if (integer_pow2p (TREE_OPERAND (exp, 1))
6432 && host_integerp (TREE_OPERAND (exp, 1), 1))
6433 {
6434 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6435 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6436 return MAX (1, c0 / c1);
6437 }
6438 break;
6439
6440 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6441 case SAVE_EXPR: case WITH_RECORD_EXPR:
6442 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6443
6444 case COMPOUND_EXPR:
6445 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6446
6447 case COND_EXPR:
6448 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6449 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6450 return MIN (c0, c1);
6451
6452 default:
6453 break;
6454 }
6455
6456 return 1;
6457 }
6458
6459 /* Similar, except that it is known that the expression must be a multiple
6460 of the alignment of TYPE. */
6461
6462 static unsigned HOST_WIDE_INT
6463 highest_pow2_factor_for_type (tree type, tree exp)
6464 {
6465 unsigned HOST_WIDE_INT type_align, factor;
6466
6467 factor = highest_pow2_factor (exp);
6468 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6469 return MAX (factor, type_align);
6470 }
6471 \f
6472 /* Return an object on the placeholder list that matches EXP, a
6473 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6474 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6475 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6476 is a location which initially points to a starting location in the
6477 placeholder list (zero means start of the list) and where a pointer into
6478 the placeholder list at which the object is found is placed. */
6479
6480 tree
6481 find_placeholder (tree exp, tree *plist)
6482 {
6483 tree type = TREE_TYPE (exp);
6484 tree placeholder_expr;
6485
6486 for (placeholder_expr
6487 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6488 placeholder_expr != 0;
6489 placeholder_expr = TREE_CHAIN (placeholder_expr))
6490 {
6491 tree need_type = TYPE_MAIN_VARIANT (type);
6492 tree elt;
6493
6494 /* Find the outermost reference that is of the type we want. If none,
6495 see if any object has a type that is a pointer to the type we
6496 want. */
6497 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6498 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6499 || TREE_CODE (elt) == COND_EXPR)
6500 ? TREE_OPERAND (elt, 1)
6501 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6502 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6503 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6504 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6505 ? TREE_OPERAND (elt, 0) : 0))
6506 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6507 {
6508 if (plist)
6509 *plist = placeholder_expr;
6510 return elt;
6511 }
6512
6513 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6514 elt
6515 = ((TREE_CODE (elt) == COMPOUND_EXPR
6516 || TREE_CODE (elt) == COND_EXPR)
6517 ? TREE_OPERAND (elt, 1)
6518 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6519 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6520 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6521 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6522 ? TREE_OPERAND (elt, 0) : 0))
6523 if (POINTER_TYPE_P (TREE_TYPE (elt))
6524 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6525 == need_type))
6526 {
6527 if (plist)
6528 *plist = placeholder_expr;
6529 return build1 (INDIRECT_REF, need_type, elt);
6530 }
6531 }
6532
6533 return 0;
6534 }
6535 \f
6536 /* expand_expr: generate code for computing expression EXP.
6537 An rtx for the computed value is returned. The value is never null.
6538 In the case of a void EXP, const0_rtx is returned.
6539
6540 The value may be stored in TARGET if TARGET is nonzero.
6541 TARGET is just a suggestion; callers must assume that
6542 the rtx returned may not be the same as TARGET.
6543
6544 If TARGET is CONST0_RTX, it means that the value will be ignored.
6545
6546 If TMODE is not VOIDmode, it suggests generating the
6547 result in mode TMODE. But this is done only when convenient.
6548 Otherwise, TMODE is ignored and the value generated in its natural mode.
6549 TMODE is just a suggestion; callers must assume that
6550 the rtx returned may not have mode TMODE.
6551
6552 Note that TARGET may have neither TMODE nor MODE. In that case, it
6553 probably will not be used.
6554
6555 If MODIFIER is EXPAND_SUM then when EXP is an addition
6556 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6557 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6558 products as above, or REG or MEM, or constant.
6559 Ordinarily in such cases we would output mul or add instructions
6560 and then return a pseudo reg containing the sum.
6561
6562 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6563 it also marks a label as absolutely required (it can't be dead).
6564 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6565 This is used for outputting expressions used in initializers.
6566
6567 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6568 with a constant address even if that address is not normally legitimate.
6569 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6570
6571 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6572 a call parameter. Such targets require special care as we haven't yet
6573 marked TARGET so that it's safe from being trashed by libcalls. We
6574 don't want to use TARGET for anything but the final result;
6575 Intermediate values must go elsewhere. Additionally, calls to
6576 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6577
6578 rtx
6579 expand_expr (tree exp, rtx target, enum machine_mode tmode, enum expand_modifier modifier)
6580 {
6581 rtx op0, op1, temp;
6582 tree type = TREE_TYPE (exp);
6583 int unsignedp = TREE_UNSIGNED (type);
6584 enum machine_mode mode;
6585 enum tree_code code = TREE_CODE (exp);
6586 optab this_optab;
6587 rtx subtarget, original_target;
6588 int ignore;
6589 tree context;
6590
6591 /* Handle ERROR_MARK before anybody tries to access its type. */
6592 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6593 {
6594 op0 = CONST0_RTX (tmode);
6595 if (op0 != 0)
6596 return op0;
6597 return const0_rtx;
6598 }
6599
6600 mode = TYPE_MODE (type);
6601 /* Use subtarget as the target for operand 0 of a binary operation. */
6602 subtarget = get_subtarget (target);
6603 original_target = target;
6604 ignore = (target == const0_rtx
6605 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6606 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6607 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6608 && TREE_CODE (type) == VOID_TYPE));
6609
6610 /* If we are going to ignore this result, we need only do something
6611 if there is a side-effect somewhere in the expression. If there
6612 is, short-circuit the most common cases here. Note that we must
6613 not call expand_expr with anything but const0_rtx in case this
6614 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6615
6616 if (ignore)
6617 {
6618 if (! TREE_SIDE_EFFECTS (exp))
6619 return const0_rtx;
6620
6621 /* Ensure we reference a volatile object even if value is ignored, but
6622 don't do this if all we are doing is taking its address. */
6623 if (TREE_THIS_VOLATILE (exp)
6624 && TREE_CODE (exp) != FUNCTION_DECL
6625 && mode != VOIDmode && mode != BLKmode
6626 && modifier != EXPAND_CONST_ADDRESS)
6627 {
6628 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6629 if (GET_CODE (temp) == MEM)
6630 temp = copy_to_reg (temp);
6631 return const0_rtx;
6632 }
6633
6634 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6635 || code == INDIRECT_REF || code == BUFFER_REF)
6636 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6637 modifier);
6638
6639 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6640 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6641 {
6642 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6643 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6644 return const0_rtx;
6645 }
6646 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6647 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6648 /* If the second operand has no side effects, just evaluate
6649 the first. */
6650 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6651 modifier);
6652 else if (code == BIT_FIELD_REF)
6653 {
6654 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6655 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6656 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6657 return const0_rtx;
6658 }
6659
6660 target = 0;
6661 }
6662
6663 #ifdef MAX_INTEGER_COMPUTATION_MODE
6664 /* Only check stuff here if the mode we want is different from the mode
6665 of the expression; if it's the same, check_max_integer_computation_mode
6666 will handle it. Do we really need to check this stuff at all? */
6667
6668 if (target
6669 && GET_MODE (target) != mode
6670 && TREE_CODE (exp) != INTEGER_CST
6671 && TREE_CODE (exp) != PARM_DECL
6672 && TREE_CODE (exp) != ARRAY_REF
6673 && TREE_CODE (exp) != ARRAY_RANGE_REF
6674 && TREE_CODE (exp) != COMPONENT_REF
6675 && TREE_CODE (exp) != BIT_FIELD_REF
6676 && TREE_CODE (exp) != INDIRECT_REF
6677 && TREE_CODE (exp) != CALL_EXPR
6678 && TREE_CODE (exp) != VAR_DECL
6679 && TREE_CODE (exp) != RTL_EXPR)
6680 {
6681 enum machine_mode mode = GET_MODE (target);
6682
6683 if (GET_MODE_CLASS (mode) == MODE_INT
6684 && mode > MAX_INTEGER_COMPUTATION_MODE)
6685 internal_error ("unsupported wide integer operation");
6686 }
6687
6688 if (tmode != mode
6689 && TREE_CODE (exp) != INTEGER_CST
6690 && TREE_CODE (exp) != PARM_DECL
6691 && TREE_CODE (exp) != ARRAY_REF
6692 && TREE_CODE (exp) != ARRAY_RANGE_REF
6693 && TREE_CODE (exp) != COMPONENT_REF
6694 && TREE_CODE (exp) != BIT_FIELD_REF
6695 && TREE_CODE (exp) != INDIRECT_REF
6696 && TREE_CODE (exp) != VAR_DECL
6697 && TREE_CODE (exp) != CALL_EXPR
6698 && TREE_CODE (exp) != RTL_EXPR
6699 && GET_MODE_CLASS (tmode) == MODE_INT
6700 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6701 internal_error ("unsupported wide integer operation");
6702
6703 check_max_integer_computation_mode (exp);
6704 #endif
6705
6706 /* If will do cse, generate all results into pseudo registers
6707 since 1) that allows cse to find more things
6708 and 2) otherwise cse could produce an insn the machine
6709 cannot support. An exception is a CONSTRUCTOR into a multi-word
6710 MEM: that's much more likely to be most efficient into the MEM.
6711 Another is a CALL_EXPR which must return in memory. */
6712
6713 if (! cse_not_expected && mode != BLKmode && target
6714 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6715 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6716 && ! (code == CALL_EXPR && aggregate_value_p (exp)))
6717 target = 0;
6718
6719 switch (code)
6720 {
6721 case LABEL_DECL:
6722 {
6723 tree function = decl_function_context (exp);
6724 /* Labels in containing functions, or labels used from initializers,
6725 must be forced. */
6726 if (modifier == EXPAND_INITIALIZER
6727 || (function != current_function_decl
6728 && function != inline_function_decl
6729 && function != 0))
6730 temp = force_label_rtx (exp);
6731 else
6732 temp = label_rtx (exp);
6733
6734 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6735 if (function != current_function_decl
6736 && function != inline_function_decl && function != 0)
6737 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6738 return temp;
6739 }
6740
6741 case PARM_DECL:
6742 if (!DECL_RTL_SET_P (exp))
6743 {
6744 error_with_decl (exp, "prior parameter's size depends on `%s'");
6745 return CONST0_RTX (mode);
6746 }
6747
6748 /* ... fall through ... */
6749
6750 case VAR_DECL:
6751 /* If a static var's type was incomplete when the decl was written,
6752 but the type is complete now, lay out the decl now. */
6753 if (DECL_SIZE (exp) == 0
6754 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6755 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6756 layout_decl (exp, 0);
6757
6758 /* ... fall through ... */
6759
6760 case FUNCTION_DECL:
6761 case RESULT_DECL:
6762 if (DECL_RTL (exp) == 0)
6763 abort ();
6764
6765 /* Ensure variable marked as used even if it doesn't go through
6766 a parser. If it hasn't be used yet, write out an external
6767 definition. */
6768 if (! TREE_USED (exp))
6769 {
6770 assemble_external (exp);
6771 TREE_USED (exp) = 1;
6772 }
6773
6774 /* Show we haven't gotten RTL for this yet. */
6775 temp = 0;
6776
6777 /* Handle variables inherited from containing functions. */
6778 context = decl_function_context (exp);
6779
6780 /* We treat inline_function_decl as an alias for the current function
6781 because that is the inline function whose vars, types, etc.
6782 are being merged into the current function.
6783 See expand_inline_function. */
6784
6785 if (context != 0 && context != current_function_decl
6786 && context != inline_function_decl
6787 /* If var is static, we don't need a static chain to access it. */
6788 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6789 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6790 {
6791 rtx addr;
6792
6793 /* Mark as non-local and addressable. */
6794 DECL_NONLOCAL (exp) = 1;
6795 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6796 abort ();
6797 (*lang_hooks.mark_addressable) (exp);
6798 if (GET_CODE (DECL_RTL (exp)) != MEM)
6799 abort ();
6800 addr = XEXP (DECL_RTL (exp), 0);
6801 if (GET_CODE (addr) == MEM)
6802 addr
6803 = replace_equiv_address (addr,
6804 fix_lexical_addr (XEXP (addr, 0), exp));
6805 else
6806 addr = fix_lexical_addr (addr, exp);
6807
6808 temp = replace_equiv_address (DECL_RTL (exp), addr);
6809 }
6810
6811 /* This is the case of an array whose size is to be determined
6812 from its initializer, while the initializer is still being parsed.
6813 See expand_decl. */
6814
6815 else if (GET_CODE (DECL_RTL (exp)) == MEM
6816 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6817 temp = validize_mem (DECL_RTL (exp));
6818
6819 /* If DECL_RTL is memory, we are in the normal case and either
6820 the address is not valid or it is not a register and -fforce-addr
6821 is specified, get the address into a register. */
6822
6823 else if (GET_CODE (DECL_RTL (exp)) == MEM
6824 && modifier != EXPAND_CONST_ADDRESS
6825 && modifier != EXPAND_SUM
6826 && modifier != EXPAND_INITIALIZER
6827 && (! memory_address_p (DECL_MODE (exp),
6828 XEXP (DECL_RTL (exp), 0))
6829 || (flag_force_addr
6830 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6831 temp = replace_equiv_address (DECL_RTL (exp),
6832 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6833
6834 /* If we got something, return it. But first, set the alignment
6835 if the address is a register. */
6836 if (temp != 0)
6837 {
6838 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6839 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6840
6841 return temp;
6842 }
6843
6844 /* If the mode of DECL_RTL does not match that of the decl, it
6845 must be a promoted value. We return a SUBREG of the wanted mode,
6846 but mark it so that we know that it was already extended. */
6847
6848 if (GET_CODE (DECL_RTL (exp)) == REG
6849 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6850 {
6851 /* Get the signedness used for this variable. Ensure we get the
6852 same mode we got when the variable was declared. */
6853 if (GET_MODE (DECL_RTL (exp))
6854 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6855 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6856 abort ();
6857
6858 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6859 SUBREG_PROMOTED_VAR_P (temp) = 1;
6860 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6861 return temp;
6862 }
6863
6864 return DECL_RTL (exp);
6865
6866 case INTEGER_CST:
6867 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6868 TREE_INT_CST_HIGH (exp), mode);
6869
6870 /* ??? If overflow is set, fold will have done an incomplete job,
6871 which can result in (plus xx (const_int 0)), which can get
6872 simplified by validate_replace_rtx during virtual register
6873 instantiation, which can result in unrecognizable insns.
6874 Avoid this by forcing all overflows into registers. */
6875 if (TREE_CONSTANT_OVERFLOW (exp)
6876 && modifier != EXPAND_INITIALIZER)
6877 temp = force_reg (mode, temp);
6878
6879 return temp;
6880
6881 case VECTOR_CST:
6882 return const_vector_from_tree (exp);
6883
6884 case CONST_DECL:
6885 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6886
6887 case REAL_CST:
6888 /* If optimized, generate immediate CONST_DOUBLE
6889 which will be turned into memory by reload if necessary.
6890
6891 We used to force a register so that loop.c could see it. But
6892 this does not allow gen_* patterns to perform optimizations with
6893 the constants. It also produces two insns in cases like "x = 1.0;".
6894 On most machines, floating-point constants are not permitted in
6895 many insns, so we'd end up copying it to a register in any case.
6896
6897 Now, we do the copying in expand_binop, if appropriate. */
6898 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6899 TYPE_MODE (TREE_TYPE (exp)));
6900
6901 case COMPLEX_CST:
6902 /* Handle evaluating a complex constant in a CONCAT target. */
6903 if (original_target && GET_CODE (original_target) == CONCAT)
6904 {
6905 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6906 rtx rtarg, itarg;
6907
6908 rtarg = XEXP (original_target, 0);
6909 itarg = XEXP (original_target, 1);
6910
6911 /* Move the real and imaginary parts separately. */
6912 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6913 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6914
6915 if (op0 != rtarg)
6916 emit_move_insn (rtarg, op0);
6917 if (op1 != itarg)
6918 emit_move_insn (itarg, op1);
6919
6920 return original_target;
6921 }
6922
6923 /* ... fall through ... */
6924
6925 case STRING_CST:
6926 temp = output_constant_def (exp, 1);
6927
6928 /* temp contains a constant address.
6929 On RISC machines where a constant address isn't valid,
6930 make some insns to get that address into a register. */
6931 if (modifier != EXPAND_CONST_ADDRESS
6932 && modifier != EXPAND_INITIALIZER
6933 && modifier != EXPAND_SUM
6934 && (! memory_address_p (mode, XEXP (temp, 0))
6935 || flag_force_addr))
6936 return replace_equiv_address (temp,
6937 copy_rtx (XEXP (temp, 0)));
6938 return temp;
6939
6940 case EXPR_WITH_FILE_LOCATION:
6941 {
6942 rtx to_return;
6943 location_t saved_loc = input_location;
6944 input_filename = EXPR_WFL_FILENAME (exp);
6945 input_line = EXPR_WFL_LINENO (exp);
6946 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6947 emit_line_note (input_location);
6948 /* Possibly avoid switching back and forth here. */
6949 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6950 input_location = saved_loc;
6951 return to_return;
6952 }
6953
6954 case SAVE_EXPR:
6955 context = decl_function_context (exp);
6956
6957 /* If this SAVE_EXPR was at global context, assume we are an
6958 initialization function and move it into our context. */
6959 if (context == 0)
6960 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6961
6962 /* We treat inline_function_decl as an alias for the current function
6963 because that is the inline function whose vars, types, etc.
6964 are being merged into the current function.
6965 See expand_inline_function. */
6966 if (context == current_function_decl || context == inline_function_decl)
6967 context = 0;
6968
6969 /* If this is non-local, handle it. */
6970 if (context)
6971 {
6972 /* The following call just exists to abort if the context is
6973 not of a containing function. */
6974 find_function_data (context);
6975
6976 temp = SAVE_EXPR_RTL (exp);
6977 if (temp && GET_CODE (temp) == REG)
6978 {
6979 put_var_into_stack (exp, /*rescan=*/true);
6980 temp = SAVE_EXPR_RTL (exp);
6981 }
6982 if (temp == 0 || GET_CODE (temp) != MEM)
6983 abort ();
6984 return
6985 replace_equiv_address (temp,
6986 fix_lexical_addr (XEXP (temp, 0), exp));
6987 }
6988 if (SAVE_EXPR_RTL (exp) == 0)
6989 {
6990 if (mode == VOIDmode)
6991 temp = const0_rtx;
6992 else
6993 temp = assign_temp (build_qualified_type (type,
6994 (TYPE_QUALS (type)
6995 | TYPE_QUAL_CONST)),
6996 3, 0, 0);
6997
6998 SAVE_EXPR_RTL (exp) = temp;
6999 if (!optimize && GET_CODE (temp) == REG)
7000 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
7001 save_expr_regs);
7002
7003 /* If the mode of TEMP does not match that of the expression, it
7004 must be a promoted value. We pass store_expr a SUBREG of the
7005 wanted mode but mark it so that we know that it was already
7006 extended. */
7007
7008 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
7009 {
7010 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
7011 promote_mode (type, mode, &unsignedp, 0);
7012 SUBREG_PROMOTED_VAR_P (temp) = 1;
7013 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7014 }
7015
7016 if (temp == const0_rtx)
7017 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7018 else
7019 store_expr (TREE_OPERAND (exp, 0), temp,
7020 modifier == EXPAND_STACK_PARM ? 2 : 0);
7021
7022 TREE_USED (exp) = 1;
7023 }
7024
7025 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
7026 must be a promoted value. We return a SUBREG of the wanted mode,
7027 but mark it so that we know that it was already extended. */
7028
7029 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
7030 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
7031 {
7032 /* Compute the signedness and make the proper SUBREG. */
7033 promote_mode (type, mode, &unsignedp, 0);
7034 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
7035 SUBREG_PROMOTED_VAR_P (temp) = 1;
7036 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7037 return temp;
7038 }
7039
7040 return SAVE_EXPR_RTL (exp);
7041
7042 case UNSAVE_EXPR:
7043 {
7044 rtx temp;
7045 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7046 TREE_OPERAND (exp, 0)
7047 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
7048 return temp;
7049 }
7050
7051 case PLACEHOLDER_EXPR:
7052 {
7053 tree old_list = placeholder_list;
7054 tree placeholder_expr = 0;
7055
7056 exp = find_placeholder (exp, &placeholder_expr);
7057 if (exp == 0)
7058 abort ();
7059
7060 placeholder_list = TREE_CHAIN (placeholder_expr);
7061 temp = expand_expr (exp, original_target, tmode, modifier);
7062 placeholder_list = old_list;
7063 return temp;
7064 }
7065
7066 case WITH_RECORD_EXPR:
7067 /* Put the object on the placeholder list, expand our first operand,
7068 and pop the list. */
7069 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
7070 placeholder_list);
7071 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
7072 modifier);
7073 placeholder_list = TREE_CHAIN (placeholder_list);
7074 return target;
7075
7076 case GOTO_EXPR:
7077 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7078 expand_goto (TREE_OPERAND (exp, 0));
7079 else
7080 expand_computed_goto (TREE_OPERAND (exp, 0));
7081 return const0_rtx;
7082
7083 case EXIT_EXPR:
7084 expand_exit_loop_if_false (NULL,
7085 invert_truthvalue (TREE_OPERAND (exp, 0)));
7086 return const0_rtx;
7087
7088 case LABELED_BLOCK_EXPR:
7089 if (LABELED_BLOCK_BODY (exp))
7090 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
7091 /* Should perhaps use expand_label, but this is simpler and safer. */
7092 do_pending_stack_adjust ();
7093 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
7094 return const0_rtx;
7095
7096 case EXIT_BLOCK_EXPR:
7097 if (EXIT_BLOCK_RETURN (exp))
7098 sorry ("returned value in block_exit_expr");
7099 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
7100 return const0_rtx;
7101
7102 case LOOP_EXPR:
7103 push_temp_slots ();
7104 expand_start_loop (1);
7105 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
7106 expand_end_loop ();
7107 pop_temp_slots ();
7108
7109 return const0_rtx;
7110
7111 case BIND_EXPR:
7112 {
7113 tree vars = TREE_OPERAND (exp, 0);
7114
7115 /* Need to open a binding contour here because
7116 if there are any cleanups they must be contained here. */
7117 expand_start_bindings (2);
7118
7119 /* Mark the corresponding BLOCK for output in its proper place. */
7120 if (TREE_OPERAND (exp, 2) != 0
7121 && ! TREE_USED (TREE_OPERAND (exp, 2)))
7122 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
7123
7124 /* If VARS have not yet been expanded, expand them now. */
7125 while (vars)
7126 {
7127 if (!DECL_RTL_SET_P (vars))
7128 expand_decl (vars);
7129 expand_decl_init (vars);
7130 vars = TREE_CHAIN (vars);
7131 }
7132
7133 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
7134
7135 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7136
7137 return temp;
7138 }
7139
7140 case RTL_EXPR:
7141 if (RTL_EXPR_SEQUENCE (exp))
7142 {
7143 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
7144 abort ();
7145 emit_insn (RTL_EXPR_SEQUENCE (exp));
7146 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
7147 }
7148 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
7149 free_temps_for_rtl_expr (exp);
7150 return RTL_EXPR_RTL (exp);
7151
7152 case CONSTRUCTOR:
7153 /* If we don't need the result, just ensure we evaluate any
7154 subexpressions. */
7155 if (ignore)
7156 {
7157 tree elt;
7158
7159 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
7160 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7161
7162 return const0_rtx;
7163 }
7164
7165 /* All elts simple constants => refer to a constant in memory. But
7166 if this is a non-BLKmode mode, let it store a field at a time
7167 since that should make a CONST_INT or CONST_DOUBLE when we
7168 fold. Likewise, if we have a target we can use, it is best to
7169 store directly into the target unless the type is large enough
7170 that memcpy will be used. If we are making an initializer and
7171 all operands are constant, put it in memory as well.
7172
7173 FIXME: Avoid trying to fill vector constructors piece-meal.
7174 Output them with output_constant_def below unless we're sure
7175 they're zeros. This should go away when vector initializers
7176 are treated like VECTOR_CST instead of arrays.
7177 */
7178 else if ((TREE_STATIC (exp)
7179 && ((mode == BLKmode
7180 && ! (target != 0 && safe_from_p (target, exp, 1)))
7181 || TREE_ADDRESSABLE (exp)
7182 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7183 && (! MOVE_BY_PIECES_P
7184 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7185 TYPE_ALIGN (type)))
7186 && ((TREE_CODE (type) == VECTOR_TYPE
7187 && !is_zeros_p (exp))
7188 || ! mostly_zeros_p (exp)))))
7189 || ((modifier == EXPAND_INITIALIZER
7190 || modifier == EXPAND_CONST_ADDRESS)
7191 && TREE_CONSTANT (exp)))
7192 {
7193 rtx constructor = output_constant_def (exp, 1);
7194
7195 if (modifier != EXPAND_CONST_ADDRESS
7196 && modifier != EXPAND_INITIALIZER
7197 && modifier != EXPAND_SUM)
7198 constructor = validize_mem (constructor);
7199
7200 return constructor;
7201 }
7202 else
7203 {
7204 /* Handle calls that pass values in multiple non-contiguous
7205 locations. The Irix 6 ABI has examples of this. */
7206 if (target == 0 || ! safe_from_p (target, exp, 1)
7207 || GET_CODE (target) == PARALLEL
7208 || modifier == EXPAND_STACK_PARM)
7209 target
7210 = assign_temp (build_qualified_type (type,
7211 (TYPE_QUALS (type)
7212 | (TREE_READONLY (exp)
7213 * TYPE_QUAL_CONST))),
7214 0, TREE_ADDRESSABLE (exp), 1);
7215
7216 store_constructor (exp, target, 0, int_expr_size (exp));
7217 return target;
7218 }
7219
7220 case INDIRECT_REF:
7221 {
7222 tree exp1 = TREE_OPERAND (exp, 0);
7223 tree index;
7224 tree string = string_constant (exp1, &index);
7225
7226 /* Try to optimize reads from const strings. */
7227 if (string
7228 && TREE_CODE (string) == STRING_CST
7229 && TREE_CODE (index) == INTEGER_CST
7230 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7231 && GET_MODE_CLASS (mode) == MODE_INT
7232 && GET_MODE_SIZE (mode) == 1
7233 && modifier != EXPAND_WRITE)
7234 return gen_int_mode (TREE_STRING_POINTER (string)
7235 [TREE_INT_CST_LOW (index)], mode);
7236
7237 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7238 op0 = memory_address (mode, op0);
7239 temp = gen_rtx_MEM (mode, op0);
7240 set_mem_attributes (temp, exp, 0);
7241
7242 /* If we are writing to this object and its type is a record with
7243 readonly fields, we must mark it as readonly so it will
7244 conflict with readonly references to those fields. */
7245 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7246 RTX_UNCHANGING_P (temp) = 1;
7247
7248 return temp;
7249 }
7250
7251 case ARRAY_REF:
7252 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7253 abort ();
7254
7255 {
7256 tree array = TREE_OPERAND (exp, 0);
7257 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7258 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7259 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7260 HOST_WIDE_INT i;
7261
7262 /* Optimize the special-case of a zero lower bound.
7263
7264 We convert the low_bound to sizetype to avoid some problems
7265 with constant folding. (E.g. suppose the lower bound is 1,
7266 and its mode is QI. Without the conversion, (ARRAY
7267 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7268 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7269
7270 if (! integer_zerop (low_bound))
7271 index = size_diffop (index, convert (sizetype, low_bound));
7272
7273 /* Fold an expression like: "foo"[2].
7274 This is not done in fold so it won't happen inside &.
7275 Don't fold if this is for wide characters since it's too
7276 difficult to do correctly and this is a very rare case. */
7277
7278 if (modifier != EXPAND_CONST_ADDRESS
7279 && modifier != EXPAND_INITIALIZER
7280 && modifier != EXPAND_MEMORY
7281 && TREE_CODE (array) == STRING_CST
7282 && TREE_CODE (index) == INTEGER_CST
7283 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7284 && GET_MODE_CLASS (mode) == MODE_INT
7285 && GET_MODE_SIZE (mode) == 1)
7286 return gen_int_mode (TREE_STRING_POINTER (array)
7287 [TREE_INT_CST_LOW (index)], mode);
7288
7289 /* If this is a constant index into a constant array,
7290 just get the value from the array. Handle both the cases when
7291 we have an explicit constructor and when our operand is a variable
7292 that was declared const. */
7293
7294 if (modifier != EXPAND_CONST_ADDRESS
7295 && modifier != EXPAND_INITIALIZER
7296 && modifier != EXPAND_MEMORY
7297 && TREE_CODE (array) == CONSTRUCTOR
7298 && ! TREE_SIDE_EFFECTS (array)
7299 && TREE_CODE (index) == INTEGER_CST
7300 && 0 > compare_tree_int (index,
7301 list_length (CONSTRUCTOR_ELTS
7302 (TREE_OPERAND (exp, 0)))))
7303 {
7304 tree elem;
7305
7306 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7307 i = TREE_INT_CST_LOW (index);
7308 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7309 ;
7310
7311 if (elem)
7312 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7313 modifier);
7314 }
7315
7316 else if (optimize >= 1
7317 && modifier != EXPAND_CONST_ADDRESS
7318 && modifier != EXPAND_INITIALIZER
7319 && modifier != EXPAND_MEMORY
7320 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7321 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7322 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7323 {
7324 if (TREE_CODE (index) == INTEGER_CST)
7325 {
7326 tree init = DECL_INITIAL (array);
7327
7328 if (TREE_CODE (init) == CONSTRUCTOR)
7329 {
7330 tree elem;
7331
7332 for (elem = CONSTRUCTOR_ELTS (init);
7333 (elem
7334 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7335 elem = TREE_CHAIN (elem))
7336 ;
7337
7338 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7339 return expand_expr (fold (TREE_VALUE (elem)), target,
7340 tmode, modifier);
7341 }
7342 else if (TREE_CODE (init) == STRING_CST
7343 && 0 > compare_tree_int (index,
7344 TREE_STRING_LENGTH (init)))
7345 {
7346 tree type = TREE_TYPE (TREE_TYPE (init));
7347 enum machine_mode mode = TYPE_MODE (type);
7348
7349 if (GET_MODE_CLASS (mode) == MODE_INT
7350 && GET_MODE_SIZE (mode) == 1)
7351 return gen_int_mode (TREE_STRING_POINTER (init)
7352 [TREE_INT_CST_LOW (index)], mode);
7353 }
7354 }
7355 }
7356 }
7357 goto normal_inner_ref;
7358
7359 case COMPONENT_REF:
7360 /* If the operand is a CONSTRUCTOR, we can just extract the
7361 appropriate field if it is present. */
7362 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7363 {
7364 tree elt;
7365
7366 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7367 elt = TREE_CHAIN (elt))
7368 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7369 /* We can normally use the value of the field in the
7370 CONSTRUCTOR. However, if this is a bitfield in
7371 an integral mode that we can fit in a HOST_WIDE_INT,
7372 we must mask only the number of bits in the bitfield,
7373 since this is done implicitly by the constructor. If
7374 the bitfield does not meet either of those conditions,
7375 we can't do this optimization. */
7376 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7377 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7378 == MODE_INT)
7379 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7380 <= HOST_BITS_PER_WIDE_INT))))
7381 {
7382 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7383 && modifier == EXPAND_STACK_PARM)
7384 target = 0;
7385 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7386 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7387 {
7388 HOST_WIDE_INT bitsize
7389 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7390 enum machine_mode imode
7391 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7392
7393 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7394 {
7395 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7396 op0 = expand_and (imode, op0, op1, target);
7397 }
7398 else
7399 {
7400 tree count
7401 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7402 0);
7403
7404 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7405 target, 0);
7406 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7407 target, 0);
7408 }
7409 }
7410
7411 return op0;
7412 }
7413 }
7414 goto normal_inner_ref;
7415
7416 case BIT_FIELD_REF:
7417 case ARRAY_RANGE_REF:
7418 normal_inner_ref:
7419 {
7420 enum machine_mode mode1;
7421 HOST_WIDE_INT bitsize, bitpos;
7422 tree offset;
7423 int volatilep = 0;
7424 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7425 &mode1, &unsignedp, &volatilep);
7426 rtx orig_op0;
7427
7428 /* If we got back the original object, something is wrong. Perhaps
7429 we are evaluating an expression too early. In any event, don't
7430 infinitely recurse. */
7431 if (tem == exp)
7432 abort ();
7433
7434 /* If TEM's type is a union of variable size, pass TARGET to the inner
7435 computation, since it will need a temporary and TARGET is known
7436 to have to do. This occurs in unchecked conversion in Ada. */
7437
7438 orig_op0 = op0
7439 = expand_expr (tem,
7440 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7441 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7442 != INTEGER_CST)
7443 && modifier != EXPAND_STACK_PARM
7444 ? target : NULL_RTX),
7445 VOIDmode,
7446 (modifier == EXPAND_INITIALIZER
7447 || modifier == EXPAND_CONST_ADDRESS
7448 || modifier == EXPAND_STACK_PARM)
7449 ? modifier : EXPAND_NORMAL);
7450
7451 /* If this is a constant, put it into a register if it is a
7452 legitimate constant and OFFSET is 0 and memory if it isn't. */
7453 if (CONSTANT_P (op0))
7454 {
7455 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7456 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7457 && offset == 0)
7458 op0 = force_reg (mode, op0);
7459 else
7460 op0 = validize_mem (force_const_mem (mode, op0));
7461 }
7462
7463 /* Otherwise, if this object not in memory and we either have an
7464 offset or a BLKmode result, put it there. This case can't occur in
7465 C, but can in Ada if we have unchecked conversion of an expression
7466 from a scalar type to an array or record type or for an
7467 ARRAY_RANGE_REF whose type is BLKmode. */
7468 else if (GET_CODE (op0) != MEM
7469 && (offset != 0
7470 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7471 {
7472 /* If the operand is a SAVE_EXPR, we can deal with this by
7473 forcing the SAVE_EXPR into memory. */
7474 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7475 {
7476 put_var_into_stack (TREE_OPERAND (exp, 0),
7477 /*rescan=*/true);
7478 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7479 }
7480 else
7481 {
7482 tree nt
7483 = build_qualified_type (TREE_TYPE (tem),
7484 (TYPE_QUALS (TREE_TYPE (tem))
7485 | TYPE_QUAL_CONST));
7486 rtx memloc = assign_temp (nt, 1, 1, 1);
7487
7488 emit_move_insn (memloc, op0);
7489 op0 = memloc;
7490 }
7491 }
7492
7493 if (offset != 0)
7494 {
7495 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7496 EXPAND_SUM);
7497
7498 if (GET_CODE (op0) != MEM)
7499 abort ();
7500
7501 #ifdef POINTERS_EXTEND_UNSIGNED
7502 if (GET_MODE (offset_rtx) != Pmode)
7503 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7504 #else
7505 if (GET_MODE (offset_rtx) != ptr_mode)
7506 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7507 #endif
7508
7509 /* A constant address in OP0 can have VOIDmode, we must not try
7510 to call force_reg for that case. Avoid that case. */
7511 if (GET_CODE (op0) == MEM
7512 && GET_MODE (op0) == BLKmode
7513 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7514 && bitsize != 0
7515 && (bitpos % bitsize) == 0
7516 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7517 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7518 {
7519 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7520 bitpos = 0;
7521 }
7522
7523 op0 = offset_address (op0, offset_rtx,
7524 highest_pow2_factor (offset));
7525 }
7526
7527 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7528 record its alignment as BIGGEST_ALIGNMENT. */
7529 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7530 && is_aligning_offset (offset, tem))
7531 set_mem_align (op0, BIGGEST_ALIGNMENT);
7532
7533 /* Don't forget about volatility even if this is a bitfield. */
7534 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7535 {
7536 if (op0 == orig_op0)
7537 op0 = copy_rtx (op0);
7538
7539 MEM_VOLATILE_P (op0) = 1;
7540 }
7541
7542 /* The following code doesn't handle CONCAT.
7543 Assume only bitpos == 0 can be used for CONCAT, due to
7544 one element arrays having the same mode as its element. */
7545 if (GET_CODE (op0) == CONCAT)
7546 {
7547 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7548 abort ();
7549 return op0;
7550 }
7551
7552 /* In cases where an aligned union has an unaligned object
7553 as a field, we might be extracting a BLKmode value from
7554 an integer-mode (e.g., SImode) object. Handle this case
7555 by doing the extract into an object as wide as the field
7556 (which we know to be the width of a basic mode), then
7557 storing into memory, and changing the mode to BLKmode. */
7558 if (mode1 == VOIDmode
7559 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7560 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7561 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7562 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7563 && modifier != EXPAND_CONST_ADDRESS
7564 && modifier != EXPAND_INITIALIZER)
7565 /* If the field isn't aligned enough to fetch as a memref,
7566 fetch it as a bit field. */
7567 || (mode1 != BLKmode
7568 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7569 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))
7570 && ((modifier == EXPAND_CONST_ADDRESS
7571 || modifier == EXPAND_INITIALIZER)
7572 ? STRICT_ALIGNMENT
7573 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7574 || (bitpos % BITS_PER_UNIT != 0)))
7575 /* If the type and the field are a constant size and the
7576 size of the type isn't the same size as the bitfield,
7577 we must use bitfield operations. */
7578 || (bitsize >= 0
7579 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7580 == INTEGER_CST)
7581 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7582 bitsize)))
7583 {
7584 enum machine_mode ext_mode = mode;
7585
7586 if (ext_mode == BLKmode
7587 && ! (target != 0 && GET_CODE (op0) == MEM
7588 && GET_CODE (target) == MEM
7589 && bitpos % BITS_PER_UNIT == 0))
7590 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7591
7592 if (ext_mode == BLKmode)
7593 {
7594 /* In this case, BITPOS must start at a byte boundary and
7595 TARGET, if specified, must be a MEM. */
7596 if (GET_CODE (op0) != MEM
7597 || (target != 0 && GET_CODE (target) != MEM)
7598 || bitpos % BITS_PER_UNIT != 0)
7599 abort ();
7600
7601 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7602 if (target == 0)
7603 target = assign_temp (type, 0, 1, 1);
7604
7605 emit_block_move (target, op0,
7606 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7607 / BITS_PER_UNIT),
7608 (modifier == EXPAND_STACK_PARM
7609 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7610
7611 return target;
7612 }
7613
7614 op0 = validize_mem (op0);
7615
7616 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7617 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7618
7619 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7620 (modifier == EXPAND_STACK_PARM
7621 ? NULL_RTX : target),
7622 ext_mode, ext_mode,
7623 int_size_in_bytes (TREE_TYPE (tem)));
7624
7625 /* If the result is a record type and BITSIZE is narrower than
7626 the mode of OP0, an integral mode, and this is a big endian
7627 machine, we must put the field into the high-order bits. */
7628 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7629 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7630 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7631 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7632 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7633 - bitsize),
7634 op0, 1);
7635
7636 if (mode == BLKmode)
7637 {
7638 rtx new = assign_temp (build_qualified_type
7639 ((*lang_hooks.types.type_for_mode)
7640 (ext_mode, 0),
7641 TYPE_QUAL_CONST), 0, 1, 1);
7642
7643 emit_move_insn (new, op0);
7644 op0 = copy_rtx (new);
7645 PUT_MODE (op0, BLKmode);
7646 set_mem_attributes (op0, exp, 1);
7647 }
7648
7649 return op0;
7650 }
7651
7652 /* If the result is BLKmode, use that to access the object
7653 now as well. */
7654 if (mode == BLKmode)
7655 mode1 = BLKmode;
7656
7657 /* Get a reference to just this component. */
7658 if (modifier == EXPAND_CONST_ADDRESS
7659 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7660 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7661 else
7662 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7663
7664 if (op0 == orig_op0)
7665 op0 = copy_rtx (op0);
7666
7667 set_mem_attributes (op0, exp, 0);
7668 if (GET_CODE (XEXP (op0, 0)) == REG)
7669 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7670
7671 MEM_VOLATILE_P (op0) |= volatilep;
7672 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7673 || modifier == EXPAND_CONST_ADDRESS
7674 || modifier == EXPAND_INITIALIZER)
7675 return op0;
7676 else if (target == 0)
7677 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7678
7679 convert_move (target, op0, unsignedp);
7680 return target;
7681 }
7682
7683 case VTABLE_REF:
7684 {
7685 rtx insn, before = get_last_insn (), vtbl_ref;
7686
7687 /* Evaluate the interior expression. */
7688 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7689 tmode, modifier);
7690
7691 /* Get or create an instruction off which to hang a note. */
7692 if (REG_P (subtarget))
7693 {
7694 target = subtarget;
7695 insn = get_last_insn ();
7696 if (insn == before)
7697 abort ();
7698 if (! INSN_P (insn))
7699 insn = prev_nonnote_insn (insn);
7700 }
7701 else
7702 {
7703 target = gen_reg_rtx (GET_MODE (subtarget));
7704 insn = emit_move_insn (target, subtarget);
7705 }
7706
7707 /* Collect the data for the note. */
7708 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7709 vtbl_ref = plus_constant (vtbl_ref,
7710 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7711 /* Discard the initial CONST that was added. */
7712 vtbl_ref = XEXP (vtbl_ref, 0);
7713
7714 REG_NOTES (insn)
7715 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7716
7717 return target;
7718 }
7719
7720 /* Intended for a reference to a buffer of a file-object in Pascal.
7721 But it's not certain that a special tree code will really be
7722 necessary for these. INDIRECT_REF might work for them. */
7723 case BUFFER_REF:
7724 abort ();
7725
7726 case IN_EXPR:
7727 {
7728 /* Pascal set IN expression.
7729
7730 Algorithm:
7731 rlo = set_low - (set_low%bits_per_word);
7732 the_word = set [ (index - rlo)/bits_per_word ];
7733 bit_index = index % bits_per_word;
7734 bitmask = 1 << bit_index;
7735 return !!(the_word & bitmask); */
7736
7737 tree set = TREE_OPERAND (exp, 0);
7738 tree index = TREE_OPERAND (exp, 1);
7739 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7740 tree set_type = TREE_TYPE (set);
7741 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7742 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7743 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7744 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7745 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7746 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7747 rtx setaddr = XEXP (setval, 0);
7748 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7749 rtx rlow;
7750 rtx diff, quo, rem, addr, bit, result;
7751
7752 /* If domain is empty, answer is no. Likewise if index is constant
7753 and out of bounds. */
7754 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7755 && TREE_CODE (set_low_bound) == INTEGER_CST
7756 && tree_int_cst_lt (set_high_bound, set_low_bound))
7757 || (TREE_CODE (index) == INTEGER_CST
7758 && TREE_CODE (set_low_bound) == INTEGER_CST
7759 && tree_int_cst_lt (index, set_low_bound))
7760 || (TREE_CODE (set_high_bound) == INTEGER_CST
7761 && TREE_CODE (index) == INTEGER_CST
7762 && tree_int_cst_lt (set_high_bound, index))))
7763 return const0_rtx;
7764
7765 if (target == 0)
7766 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7767
7768 /* If we get here, we have to generate the code for both cases
7769 (in range and out of range). */
7770
7771 op0 = gen_label_rtx ();
7772 op1 = gen_label_rtx ();
7773
7774 if (! (GET_CODE (index_val) == CONST_INT
7775 && GET_CODE (lo_r) == CONST_INT))
7776 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7777 GET_MODE (index_val), iunsignedp, op1);
7778
7779 if (! (GET_CODE (index_val) == CONST_INT
7780 && GET_CODE (hi_r) == CONST_INT))
7781 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7782 GET_MODE (index_val), iunsignedp, op1);
7783
7784 /* Calculate the element number of bit zero in the first word
7785 of the set. */
7786 if (GET_CODE (lo_r) == CONST_INT)
7787 rlow = GEN_INT (INTVAL (lo_r)
7788 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7789 else
7790 rlow = expand_binop (index_mode, and_optab, lo_r,
7791 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7792 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7793
7794 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7795 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7796
7797 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7798 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7799 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7800 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7801
7802 addr = memory_address (byte_mode,
7803 expand_binop (index_mode, add_optab, diff,
7804 setaddr, NULL_RTX, iunsignedp,
7805 OPTAB_LIB_WIDEN));
7806
7807 /* Extract the bit we want to examine. */
7808 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7809 gen_rtx_MEM (byte_mode, addr),
7810 make_tree (TREE_TYPE (index), rem),
7811 NULL_RTX, 1);
7812 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7813 GET_MODE (target) == byte_mode ? target : 0,
7814 1, OPTAB_LIB_WIDEN);
7815
7816 if (result != target)
7817 convert_move (target, result, 1);
7818
7819 /* Output the code to handle the out-of-range case. */
7820 emit_jump (op0);
7821 emit_label (op1);
7822 emit_move_insn (target, const0_rtx);
7823 emit_label (op0);
7824 return target;
7825 }
7826
7827 case WITH_CLEANUP_EXPR:
7828 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7829 {
7830 WITH_CLEANUP_EXPR_RTL (exp)
7831 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7832 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7833 CLEANUP_EH_ONLY (exp));
7834
7835 /* That's it for this cleanup. */
7836 TREE_OPERAND (exp, 1) = 0;
7837 }
7838 return WITH_CLEANUP_EXPR_RTL (exp);
7839
7840 case CLEANUP_POINT_EXPR:
7841 {
7842 /* Start a new binding layer that will keep track of all cleanup
7843 actions to be performed. */
7844 expand_start_bindings (2);
7845
7846 target_temp_slot_level = temp_slot_level;
7847
7848 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7849 /* If we're going to use this value, load it up now. */
7850 if (! ignore)
7851 op0 = force_not_mem (op0);
7852 preserve_temp_slots (op0);
7853 expand_end_bindings (NULL_TREE, 0, 0);
7854 }
7855 return op0;
7856
7857 case CALL_EXPR:
7858 /* Check for a built-in function. */
7859 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7860 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7861 == FUNCTION_DECL)
7862 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7863 {
7864 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7865 == BUILT_IN_FRONTEND)
7866 return (*lang_hooks.expand_expr) (exp, original_target,
7867 tmode, modifier);
7868 else
7869 return expand_builtin (exp, target, subtarget, tmode, ignore);
7870 }
7871
7872 return expand_call (exp, target, ignore);
7873
7874 case NON_LVALUE_EXPR:
7875 case NOP_EXPR:
7876 case CONVERT_EXPR:
7877 case REFERENCE_EXPR:
7878 if (TREE_OPERAND (exp, 0) == error_mark_node)
7879 return const0_rtx;
7880
7881 if (TREE_CODE (type) == UNION_TYPE)
7882 {
7883 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7884
7885 /* If both input and output are BLKmode, this conversion isn't doing
7886 anything except possibly changing memory attribute. */
7887 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7888 {
7889 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7890 modifier);
7891
7892 result = copy_rtx (result);
7893 set_mem_attributes (result, exp, 0);
7894 return result;
7895 }
7896
7897 if (target == 0)
7898 target = assign_temp (type, 0, 1, 1);
7899
7900 if (GET_CODE (target) == MEM)
7901 /* Store data into beginning of memory target. */
7902 store_expr (TREE_OPERAND (exp, 0),
7903 adjust_address (target, TYPE_MODE (valtype), 0),
7904 modifier == EXPAND_STACK_PARM ? 2 : 0);
7905
7906 else if (GET_CODE (target) == REG)
7907 /* Store this field into a union of the proper type. */
7908 store_field (target,
7909 MIN ((int_size_in_bytes (TREE_TYPE
7910 (TREE_OPERAND (exp, 0)))
7911 * BITS_PER_UNIT),
7912 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7913 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7914 VOIDmode, 0, type, 0);
7915 else
7916 abort ();
7917
7918 /* Return the entire union. */
7919 return target;
7920 }
7921
7922 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7923 {
7924 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7925 modifier);
7926
7927 /* If the signedness of the conversion differs and OP0 is
7928 a promoted SUBREG, clear that indication since we now
7929 have to do the proper extension. */
7930 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7931 && GET_CODE (op0) == SUBREG)
7932 SUBREG_PROMOTED_VAR_P (op0) = 0;
7933
7934 return op0;
7935 }
7936
7937 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7938 if (GET_MODE (op0) == mode)
7939 return op0;
7940
7941 /* If OP0 is a constant, just convert it into the proper mode. */
7942 if (CONSTANT_P (op0))
7943 {
7944 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7945 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7946
7947 if (modifier == EXPAND_INITIALIZER)
7948 return simplify_gen_subreg (mode, op0, inner_mode,
7949 subreg_lowpart_offset (mode,
7950 inner_mode));
7951 else
7952 return convert_modes (mode, inner_mode, op0,
7953 TREE_UNSIGNED (inner_type));
7954 }
7955
7956 if (modifier == EXPAND_INITIALIZER)
7957 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7958
7959 if (target == 0)
7960 return
7961 convert_to_mode (mode, op0,
7962 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7963 else
7964 convert_move (target, op0,
7965 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7966 return target;
7967
7968 case VIEW_CONVERT_EXPR:
7969 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7970
7971 /* If the input and output modes are both the same, we are done.
7972 Otherwise, if neither mode is BLKmode and both are integral and within
7973 a word, we can use gen_lowpart. If neither is true, make sure the
7974 operand is in memory and convert the MEM to the new mode. */
7975 if (TYPE_MODE (type) == GET_MODE (op0))
7976 ;
7977 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7978 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7979 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7980 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7981 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7982 op0 = gen_lowpart (TYPE_MODE (type), op0);
7983 else if (GET_CODE (op0) != MEM)
7984 {
7985 /* If the operand is not a MEM, force it into memory. Since we
7986 are going to be be changing the mode of the MEM, don't call
7987 force_const_mem for constants because we don't allow pool
7988 constants to change mode. */
7989 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7990
7991 if (TREE_ADDRESSABLE (exp))
7992 abort ();
7993
7994 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7995 target
7996 = assign_stack_temp_for_type
7997 (TYPE_MODE (inner_type),
7998 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7999
8000 emit_move_insn (target, op0);
8001 op0 = target;
8002 }
8003
8004 /* At this point, OP0 is in the correct mode. If the output type is such
8005 that the operand is known to be aligned, indicate that it is.
8006 Otherwise, we need only be concerned about alignment for non-BLKmode
8007 results. */
8008 if (GET_CODE (op0) == MEM)
8009 {
8010 op0 = copy_rtx (op0);
8011
8012 if (TYPE_ALIGN_OK (type))
8013 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
8014 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
8015 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
8016 {
8017 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8018 HOST_WIDE_INT temp_size
8019 = MAX (int_size_in_bytes (inner_type),
8020 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
8021 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
8022 temp_size, 0, type);
8023 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
8024
8025 if (TREE_ADDRESSABLE (exp))
8026 abort ();
8027
8028 if (GET_MODE (op0) == BLKmode)
8029 emit_block_move (new_with_op0_mode, op0,
8030 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8031 (modifier == EXPAND_STACK_PARM
8032 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8033 else
8034 emit_move_insn (new_with_op0_mode, op0);
8035
8036 op0 = new;
8037 }
8038
8039 op0 = adjust_address (op0, TYPE_MODE (type), 0);
8040 }
8041
8042 return op0;
8043
8044 case PLUS_EXPR:
8045 this_optab = ! unsignedp && flag_trapv
8046 && (GET_MODE_CLASS (mode) == MODE_INT)
8047 ? addv_optab : add_optab;
8048
8049 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
8050 something else, make sure we add the register to the constant and
8051 then to the other thing. This case can occur during strength
8052 reduction and doing it this way will produce better code if the
8053 frame pointer or argument pointer is eliminated.
8054
8055 fold-const.c will ensure that the constant is always in the inner
8056 PLUS_EXPR, so the only case we need to do anything about is if
8057 sp, ap, or fp is our second argument, in which case we must swap
8058 the innermost first argument and our second argument. */
8059
8060 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8061 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8062 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
8063 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8064 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8065 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8066 {
8067 tree t = TREE_OPERAND (exp, 1);
8068
8069 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8070 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8071 }
8072
8073 /* If the result is to be ptr_mode and we are adding an integer to
8074 something, we might be forming a constant. So try to use
8075 plus_constant. If it produces a sum and we can't accept it,
8076 use force_operand. This allows P = &ARR[const] to generate
8077 efficient code on machines where a SYMBOL_REF is not a valid
8078 address.
8079
8080 If this is an EXPAND_SUM call, always return the sum. */
8081 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8082 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8083 {
8084 if (modifier == EXPAND_STACK_PARM)
8085 target = 0;
8086 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8087 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8088 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8089 {
8090 rtx constant_part;
8091
8092 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8093 EXPAND_SUM);
8094 /* Use immed_double_const to ensure that the constant is
8095 truncated according to the mode of OP1, then sign extended
8096 to a HOST_WIDE_INT. Using the constant directly can result
8097 in non-canonical RTL in a 64x32 cross compile. */
8098 constant_part
8099 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8100 (HOST_WIDE_INT) 0,
8101 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8102 op1 = plus_constant (op1, INTVAL (constant_part));
8103 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8104 op1 = force_operand (op1, target);
8105 return op1;
8106 }
8107
8108 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8109 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
8110 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8111 {
8112 rtx constant_part;
8113
8114 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8115 (modifier == EXPAND_INITIALIZER
8116 ? EXPAND_INITIALIZER : EXPAND_SUM));
8117 if (! CONSTANT_P (op0))
8118 {
8119 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8120 VOIDmode, modifier);
8121 /* Don't go to both_summands if modifier
8122 says it's not right to return a PLUS. */
8123 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8124 goto binop2;
8125 goto both_summands;
8126 }
8127 /* Use immed_double_const to ensure that the constant is
8128 truncated according to the mode of OP1, then sign extended
8129 to a HOST_WIDE_INT. Using the constant directly can result
8130 in non-canonical RTL in a 64x32 cross compile. */
8131 constant_part
8132 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8133 (HOST_WIDE_INT) 0,
8134 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8135 op0 = plus_constant (op0, INTVAL (constant_part));
8136 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8137 op0 = force_operand (op0, target);
8138 return op0;
8139 }
8140 }
8141
8142 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8143 subtarget = 0;
8144
8145 /* No sense saving up arithmetic to be done
8146 if it's all in the wrong mode to form part of an address.
8147 And force_operand won't know whether to sign-extend or
8148 zero-extend. */
8149 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8150 || mode != ptr_mode)
8151 {
8152 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8153 if (! operand_equal_p (TREE_OPERAND (exp, 0),
8154 TREE_OPERAND (exp, 1), 0))
8155 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8156 else
8157 op1 = op0;
8158 if (op0 == const0_rtx)
8159 return op1;
8160 if (op1 == const0_rtx)
8161 return op0;
8162 goto binop2;
8163 }
8164
8165 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8166 if (! operand_equal_p (TREE_OPERAND (exp, 0),
8167 TREE_OPERAND (exp, 1), 0))
8168 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8169 VOIDmode, modifier);
8170 else
8171 op1 = op0;
8172
8173 /* We come here from MINUS_EXPR when the second operand is a
8174 constant. */
8175 both_summands:
8176 /* Make sure any term that's a sum with a constant comes last. */
8177 if (GET_CODE (op0) == PLUS
8178 && CONSTANT_P (XEXP (op0, 1)))
8179 {
8180 temp = op0;
8181 op0 = op1;
8182 op1 = temp;
8183 }
8184 /* If adding to a sum including a constant,
8185 associate it to put the constant outside. */
8186 if (GET_CODE (op1) == PLUS
8187 && CONSTANT_P (XEXP (op1, 1)))
8188 {
8189 rtx constant_term = const0_rtx;
8190
8191 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
8192 if (temp != 0)
8193 op0 = temp;
8194 /* Ensure that MULT comes first if there is one. */
8195 else if (GET_CODE (op0) == MULT)
8196 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
8197 else
8198 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
8199
8200 /* Let's also eliminate constants from op0 if possible. */
8201 op0 = eliminate_constant_term (op0, &constant_term);
8202
8203 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8204 their sum should be a constant. Form it into OP1, since the
8205 result we want will then be OP0 + OP1. */
8206
8207 temp = simplify_binary_operation (PLUS, mode, constant_term,
8208 XEXP (op1, 1));
8209 if (temp != 0)
8210 op1 = temp;
8211 else
8212 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
8213 }
8214
8215 /* Put a constant term last and put a multiplication first. */
8216 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
8217 temp = op1, op1 = op0, op0 = temp;
8218
8219 temp = simplify_binary_operation (PLUS, mode, op0, op1);
8220 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
8221
8222 case MINUS_EXPR:
8223 /* For initializers, we are allowed to return a MINUS of two
8224 symbolic constants. Here we handle all cases when both operands
8225 are constant. */
8226 /* Handle difference of two symbolic constants,
8227 for the sake of an initializer. */
8228 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8229 && really_constant_p (TREE_OPERAND (exp, 0))
8230 && really_constant_p (TREE_OPERAND (exp, 1)))
8231 {
8232 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
8233 modifier);
8234 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8235 modifier);
8236
8237 /* If the last operand is a CONST_INT, use plus_constant of
8238 the negated constant. Else make the MINUS. */
8239 if (GET_CODE (op1) == CONST_INT)
8240 return plus_constant (op0, - INTVAL (op1));
8241 else
8242 return gen_rtx_MINUS (mode, op0, op1);
8243 }
8244
8245 this_optab = ! unsignedp && flag_trapv
8246 && (GET_MODE_CLASS(mode) == MODE_INT)
8247 ? subv_optab : sub_optab;
8248
8249 /* No sense saving up arithmetic to be done
8250 if it's all in the wrong mode to form part of an address.
8251 And force_operand won't know whether to sign-extend or
8252 zero-extend. */
8253 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8254 || mode != ptr_mode)
8255 goto binop;
8256
8257 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8258 subtarget = 0;
8259
8260 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8261 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8262
8263 /* Convert A - const to A + (-const). */
8264 if (GET_CODE (op1) == CONST_INT)
8265 {
8266 op1 = negate_rtx (mode, op1);
8267 goto both_summands;
8268 }
8269
8270 goto binop2;
8271
8272 case MULT_EXPR:
8273 /* If first operand is constant, swap them.
8274 Thus the following special case checks need only
8275 check the second operand. */
8276 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8277 {
8278 tree t1 = TREE_OPERAND (exp, 0);
8279 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8280 TREE_OPERAND (exp, 1) = t1;
8281 }
8282
8283 /* Attempt to return something suitable for generating an
8284 indexed address, for machines that support that. */
8285
8286 if (modifier == EXPAND_SUM && mode == ptr_mode
8287 && host_integerp (TREE_OPERAND (exp, 1), 0))
8288 {
8289 tree exp1 = TREE_OPERAND (exp, 1);
8290
8291 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8292 EXPAND_SUM);
8293
8294 /* If we knew for certain that this is arithmetic for an array
8295 reference, and we knew the bounds of the array, then we could
8296 apply the distributive law across (PLUS X C) for constant C.
8297 Without such knowledge, we risk overflowing the computation
8298 when both X and C are large, but X+C isn't. */
8299 /* ??? Could perhaps special-case EXP being unsigned and C being
8300 positive. In that case we are certain that X+C is no smaller
8301 than X and so the transformed expression will overflow iff the
8302 original would have. */
8303
8304 if (GET_CODE (op0) != REG)
8305 op0 = force_operand (op0, NULL_RTX);
8306 if (GET_CODE (op0) != REG)
8307 op0 = copy_to_mode_reg (mode, op0);
8308
8309 return gen_rtx_MULT (mode, op0,
8310 gen_int_mode (tree_low_cst (exp1, 0),
8311 TYPE_MODE (TREE_TYPE (exp1))));
8312 }
8313
8314 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8315 subtarget = 0;
8316
8317 if (modifier == EXPAND_STACK_PARM)
8318 target = 0;
8319
8320 /* Check for multiplying things that have been extended
8321 from a narrower type. If this machine supports multiplying
8322 in that narrower type with a result in the desired type,
8323 do it that way, and avoid the explicit type-conversion. */
8324 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8325 && TREE_CODE (type) == INTEGER_TYPE
8326 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8327 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8328 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8329 && int_fits_type_p (TREE_OPERAND (exp, 1),
8330 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8331 /* Don't use a widening multiply if a shift will do. */
8332 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8333 > HOST_BITS_PER_WIDE_INT)
8334 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8335 ||
8336 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8337 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8338 ==
8339 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8340 /* If both operands are extended, they must either both
8341 be zero-extended or both be sign-extended. */
8342 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8343 ==
8344 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8345 {
8346 enum machine_mode innermode
8347 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8348 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8349 ? smul_widen_optab : umul_widen_optab);
8350 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8351 ? umul_widen_optab : smul_widen_optab);
8352 if (mode == GET_MODE_WIDER_MODE (innermode))
8353 {
8354 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8355 {
8356 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8357 NULL_RTX, VOIDmode, 0);
8358 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8359 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8360 VOIDmode, 0);
8361 else
8362 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8363 NULL_RTX, VOIDmode, 0);
8364 goto binop2;
8365 }
8366 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8367 && innermode == word_mode)
8368 {
8369 rtx htem;
8370 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8371 NULL_RTX, VOIDmode, 0);
8372 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8373 op1 = convert_modes (innermode, mode,
8374 expand_expr (TREE_OPERAND (exp, 1),
8375 NULL_RTX, VOIDmode, 0),
8376 unsignedp);
8377 else
8378 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8379 NULL_RTX, VOIDmode, 0);
8380 temp = expand_binop (mode, other_optab, op0, op1, target,
8381 unsignedp, OPTAB_LIB_WIDEN);
8382 htem = expand_mult_highpart_adjust (innermode,
8383 gen_highpart (innermode, temp),
8384 op0, op1,
8385 gen_highpart (innermode, temp),
8386 unsignedp);
8387 emit_move_insn (gen_highpart (innermode, temp), htem);
8388 return temp;
8389 }
8390 }
8391 }
8392 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8393 if (! operand_equal_p (TREE_OPERAND (exp, 0),
8394 TREE_OPERAND (exp, 1), 0))
8395 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8396 else
8397 op1 = op0;
8398 return expand_mult (mode, op0, op1, target, unsignedp);
8399
8400 case TRUNC_DIV_EXPR:
8401 case FLOOR_DIV_EXPR:
8402 case CEIL_DIV_EXPR:
8403 case ROUND_DIV_EXPR:
8404 case EXACT_DIV_EXPR:
8405 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8406 subtarget = 0;
8407 if (modifier == EXPAND_STACK_PARM)
8408 target = 0;
8409 /* Possible optimization: compute the dividend with EXPAND_SUM
8410 then if the divisor is constant can optimize the case
8411 where some terms of the dividend have coeffs divisible by it. */
8412 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8413 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8414 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8415
8416 case RDIV_EXPR:
8417 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8418 expensive divide. If not, combine will rebuild the original
8419 computation. */
8420 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8421 && TREE_CODE (type) == REAL_TYPE
8422 && !real_onep (TREE_OPERAND (exp, 0)))
8423 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8424 build (RDIV_EXPR, type,
8425 build_real (type, dconst1),
8426 TREE_OPERAND (exp, 1))),
8427 target, tmode, modifier);
8428 this_optab = sdiv_optab;
8429 goto binop;
8430
8431 case TRUNC_MOD_EXPR:
8432 case FLOOR_MOD_EXPR:
8433 case CEIL_MOD_EXPR:
8434 case ROUND_MOD_EXPR:
8435 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8436 subtarget = 0;
8437 if (modifier == EXPAND_STACK_PARM)
8438 target = 0;
8439 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8440 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8441 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8442
8443 case FIX_ROUND_EXPR:
8444 case FIX_FLOOR_EXPR:
8445 case FIX_CEIL_EXPR:
8446 abort (); /* Not used for C. */
8447
8448 case FIX_TRUNC_EXPR:
8449 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8450 if (target == 0 || modifier == EXPAND_STACK_PARM)
8451 target = gen_reg_rtx (mode);
8452 expand_fix (target, op0, unsignedp);
8453 return target;
8454
8455 case FLOAT_EXPR:
8456 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8457 if (target == 0 || modifier == EXPAND_STACK_PARM)
8458 target = gen_reg_rtx (mode);
8459 /* expand_float can't figure out what to do if FROM has VOIDmode.
8460 So give it the correct mode. With -O, cse will optimize this. */
8461 if (GET_MODE (op0) == VOIDmode)
8462 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8463 op0);
8464 expand_float (target, op0,
8465 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8466 return target;
8467
8468 case NEGATE_EXPR:
8469 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8470 if (modifier == EXPAND_STACK_PARM)
8471 target = 0;
8472 temp = expand_unop (mode,
8473 ! unsignedp && flag_trapv
8474 && (GET_MODE_CLASS(mode) == MODE_INT)
8475 ? negv_optab : neg_optab, op0, target, 0);
8476 if (temp == 0)
8477 abort ();
8478 return temp;
8479
8480 case ABS_EXPR:
8481 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8482 if (modifier == EXPAND_STACK_PARM)
8483 target = 0;
8484
8485 /* Handle complex values specially. */
8486 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8487 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8488 return expand_complex_abs (mode, op0, target, unsignedp);
8489
8490 /* Unsigned abs is simply the operand. Testing here means we don't
8491 risk generating incorrect code below. */
8492 if (TREE_UNSIGNED (type))
8493 return op0;
8494
8495 return expand_abs (mode, op0, target, unsignedp,
8496 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8497
8498 case MAX_EXPR:
8499 case MIN_EXPR:
8500 target = original_target;
8501 if (target == 0
8502 || modifier == EXPAND_STACK_PARM
8503 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8504 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8505 || GET_MODE (target) != mode
8506 || (GET_CODE (target) == REG
8507 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8508 target = gen_reg_rtx (mode);
8509 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8510 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8511
8512 /* First try to do it with a special MIN or MAX instruction.
8513 If that does not win, use a conditional jump to select the proper
8514 value. */
8515 this_optab = (TREE_UNSIGNED (type)
8516 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8517 : (code == MIN_EXPR ? smin_optab : smax_optab));
8518
8519 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8520 OPTAB_WIDEN);
8521 if (temp != 0)
8522 return temp;
8523
8524 /* At this point, a MEM target is no longer useful; we will get better
8525 code without it. */
8526
8527 if (GET_CODE (target) == MEM)
8528 target = gen_reg_rtx (mode);
8529
8530 if (target != op0)
8531 emit_move_insn (target, op0);
8532
8533 op0 = gen_label_rtx ();
8534
8535 /* If this mode is an integer too wide to compare properly,
8536 compare word by word. Rely on cse to optimize constant cases. */
8537 if (GET_MODE_CLASS (mode) == MODE_INT
8538 && ! can_compare_p (GE, mode, ccp_jump))
8539 {
8540 if (code == MAX_EXPR)
8541 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8542 target, op1, NULL_RTX, op0);
8543 else
8544 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8545 op1, target, NULL_RTX, op0);
8546 }
8547 else
8548 {
8549 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8550 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8551 unsignedp, mode, NULL_RTX, NULL_RTX,
8552 op0);
8553 }
8554 emit_move_insn (target, op1);
8555 emit_label (op0);
8556 return target;
8557
8558 case BIT_NOT_EXPR:
8559 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8560 if (modifier == EXPAND_STACK_PARM)
8561 target = 0;
8562 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8563 if (temp == 0)
8564 abort ();
8565 return temp;
8566
8567 case FFS_EXPR:
8568 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8569 if (modifier == EXPAND_STACK_PARM)
8570 target = 0;
8571 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8572 if (temp == 0)
8573 abort ();
8574 return temp;
8575
8576 case CLZ_EXPR:
8577 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8578 temp = expand_unop (mode, clz_optab, op0, target, 1);
8579 if (temp == 0)
8580 abort ();
8581 return temp;
8582
8583 case CTZ_EXPR:
8584 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8585 temp = expand_unop (mode, ctz_optab, op0, target, 1);
8586 if (temp == 0)
8587 abort ();
8588 return temp;
8589
8590 case POPCOUNT_EXPR:
8591 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8592 temp = expand_unop (mode, popcount_optab, op0, target, 1);
8593 if (temp == 0)
8594 abort ();
8595 return temp;
8596
8597 case PARITY_EXPR:
8598 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8599 temp = expand_unop (mode, parity_optab, op0, target, 1);
8600 if (temp == 0)
8601 abort ();
8602 return temp;
8603
8604 /* ??? Can optimize bitwise operations with one arg constant.
8605 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8606 and (a bitwise1 b) bitwise2 b (etc)
8607 but that is probably not worth while. */
8608
8609 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8610 boolean values when we want in all cases to compute both of them. In
8611 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8612 as actual zero-or-1 values and then bitwise anding. In cases where
8613 there cannot be any side effects, better code would be made by
8614 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8615 how to recognize those cases. */
8616
8617 case TRUTH_AND_EXPR:
8618 case BIT_AND_EXPR:
8619 this_optab = and_optab;
8620 goto binop;
8621
8622 case TRUTH_OR_EXPR:
8623 case BIT_IOR_EXPR:
8624 this_optab = ior_optab;
8625 goto binop;
8626
8627 case TRUTH_XOR_EXPR:
8628 case BIT_XOR_EXPR:
8629 this_optab = xor_optab;
8630 goto binop;
8631
8632 case LSHIFT_EXPR:
8633 case RSHIFT_EXPR:
8634 case LROTATE_EXPR:
8635 case RROTATE_EXPR:
8636 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8637 subtarget = 0;
8638 if (modifier == EXPAND_STACK_PARM)
8639 target = 0;
8640 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8641 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8642 unsignedp);
8643
8644 /* Could determine the answer when only additive constants differ. Also,
8645 the addition of one can be handled by changing the condition. */
8646 case LT_EXPR:
8647 case LE_EXPR:
8648 case GT_EXPR:
8649 case GE_EXPR:
8650 case EQ_EXPR:
8651 case NE_EXPR:
8652 case UNORDERED_EXPR:
8653 case ORDERED_EXPR:
8654 case UNLT_EXPR:
8655 case UNLE_EXPR:
8656 case UNGT_EXPR:
8657 case UNGE_EXPR:
8658 case UNEQ_EXPR:
8659 temp = do_store_flag (exp,
8660 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8661 tmode != VOIDmode ? tmode : mode, 0);
8662 if (temp != 0)
8663 return temp;
8664
8665 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8666 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8667 && original_target
8668 && GET_CODE (original_target) == REG
8669 && (GET_MODE (original_target)
8670 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8671 {
8672 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8673 VOIDmode, 0);
8674
8675 /* If temp is constant, we can just compute the result. */
8676 if (GET_CODE (temp) == CONST_INT)
8677 {
8678 if (INTVAL (temp) != 0)
8679 emit_move_insn (target, const1_rtx);
8680 else
8681 emit_move_insn (target, const0_rtx);
8682
8683 return target;
8684 }
8685
8686 if (temp != original_target)
8687 {
8688 enum machine_mode mode1 = GET_MODE (temp);
8689 if (mode1 == VOIDmode)
8690 mode1 = tmode != VOIDmode ? tmode : mode;
8691
8692 temp = copy_to_mode_reg (mode1, temp);
8693 }
8694
8695 op1 = gen_label_rtx ();
8696 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8697 GET_MODE (temp), unsignedp, op1);
8698 emit_move_insn (temp, const1_rtx);
8699 emit_label (op1);
8700 return temp;
8701 }
8702
8703 /* If no set-flag instruction, must generate a conditional
8704 store into a temporary variable. Drop through
8705 and handle this like && and ||. */
8706
8707 case TRUTH_ANDIF_EXPR:
8708 case TRUTH_ORIF_EXPR:
8709 if (! ignore
8710 && (target == 0
8711 || modifier == EXPAND_STACK_PARM
8712 || ! safe_from_p (target, exp, 1)
8713 /* Make sure we don't have a hard reg (such as function's return
8714 value) live across basic blocks, if not optimizing. */
8715 || (!optimize && GET_CODE (target) == REG
8716 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8717 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8718
8719 if (target)
8720 emit_clr_insn (target);
8721
8722 op1 = gen_label_rtx ();
8723 jumpifnot (exp, op1);
8724
8725 if (target)
8726 emit_0_to_1_insn (target);
8727
8728 emit_label (op1);
8729 return ignore ? const0_rtx : target;
8730
8731 case TRUTH_NOT_EXPR:
8732 if (modifier == EXPAND_STACK_PARM)
8733 target = 0;
8734 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8735 /* The parser is careful to generate TRUTH_NOT_EXPR
8736 only with operands that are always zero or one. */
8737 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8738 target, 1, OPTAB_LIB_WIDEN);
8739 if (temp == 0)
8740 abort ();
8741 return temp;
8742
8743 case COMPOUND_EXPR:
8744 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8745 emit_queue ();
8746 return expand_expr (TREE_OPERAND (exp, 1),
8747 (ignore ? const0_rtx : target),
8748 VOIDmode, modifier);
8749
8750 case COND_EXPR:
8751 /* If we would have a "singleton" (see below) were it not for a
8752 conversion in each arm, bring that conversion back out. */
8753 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8754 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8755 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8756 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8757 {
8758 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8759 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8760
8761 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8762 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8763 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8764 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8765 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8766 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8767 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8768 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8769 return expand_expr (build1 (NOP_EXPR, type,
8770 build (COND_EXPR, TREE_TYPE (iftrue),
8771 TREE_OPERAND (exp, 0),
8772 iftrue, iffalse)),
8773 target, tmode, modifier);
8774 }
8775
8776 {
8777 /* Note that COND_EXPRs whose type is a structure or union
8778 are required to be constructed to contain assignments of
8779 a temporary variable, so that we can evaluate them here
8780 for side effect only. If type is void, we must do likewise. */
8781
8782 /* If an arm of the branch requires a cleanup,
8783 only that cleanup is performed. */
8784
8785 tree singleton = 0;
8786 tree binary_op = 0, unary_op = 0;
8787
8788 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8789 convert it to our mode, if necessary. */
8790 if (integer_onep (TREE_OPERAND (exp, 1))
8791 && integer_zerop (TREE_OPERAND (exp, 2))
8792 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8793 {
8794 if (ignore)
8795 {
8796 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8797 modifier);
8798 return const0_rtx;
8799 }
8800
8801 if (modifier == EXPAND_STACK_PARM)
8802 target = 0;
8803 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8804 if (GET_MODE (op0) == mode)
8805 return op0;
8806
8807 if (target == 0)
8808 target = gen_reg_rtx (mode);
8809 convert_move (target, op0, unsignedp);
8810 return target;
8811 }
8812
8813 /* Check for X ? A + B : A. If we have this, we can copy A to the
8814 output and conditionally add B. Similarly for unary operations.
8815 Don't do this if X has side-effects because those side effects
8816 might affect A or B and the "?" operation is a sequence point in
8817 ANSI. (operand_equal_p tests for side effects.) */
8818
8819 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8820 && operand_equal_p (TREE_OPERAND (exp, 2),
8821 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8822 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8823 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8824 && operand_equal_p (TREE_OPERAND (exp, 1),
8825 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8826 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8827 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8828 && operand_equal_p (TREE_OPERAND (exp, 2),
8829 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8830 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8831 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8832 && operand_equal_p (TREE_OPERAND (exp, 1),
8833 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8834 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8835
8836 /* If we are not to produce a result, we have no target. Otherwise,
8837 if a target was specified use it; it will not be used as an
8838 intermediate target unless it is safe. If no target, use a
8839 temporary. */
8840
8841 if (ignore)
8842 temp = 0;
8843 else if (modifier == EXPAND_STACK_PARM)
8844 temp = assign_temp (type, 0, 0, 1);
8845 else if (original_target
8846 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8847 || (singleton && GET_CODE (original_target) == REG
8848 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8849 && original_target == var_rtx (singleton)))
8850 && GET_MODE (original_target) == mode
8851 #ifdef HAVE_conditional_move
8852 && (! can_conditionally_move_p (mode)
8853 || GET_CODE (original_target) == REG
8854 || TREE_ADDRESSABLE (type))
8855 #endif
8856 && (GET_CODE (original_target) != MEM
8857 || TREE_ADDRESSABLE (type)))
8858 temp = original_target;
8859 else if (TREE_ADDRESSABLE (type))
8860 abort ();
8861 else
8862 temp = assign_temp (type, 0, 0, 1);
8863
8864 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8865 do the test of X as a store-flag operation, do this as
8866 A + ((X != 0) << log C). Similarly for other simple binary
8867 operators. Only do for C == 1 if BRANCH_COST is low. */
8868 if (temp && singleton && binary_op
8869 && (TREE_CODE (binary_op) == PLUS_EXPR
8870 || TREE_CODE (binary_op) == MINUS_EXPR
8871 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8872 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8873 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8874 : integer_onep (TREE_OPERAND (binary_op, 1)))
8875 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8876 {
8877 rtx result;
8878 tree cond;
8879 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8880 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8881 ? addv_optab : add_optab)
8882 : TREE_CODE (binary_op) == MINUS_EXPR
8883 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8884 ? subv_optab : sub_optab)
8885 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8886 : xor_optab);
8887
8888 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8889 if (singleton == TREE_OPERAND (exp, 1))
8890 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8891 else
8892 cond = TREE_OPERAND (exp, 0);
8893
8894 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8895 ? temp : NULL_RTX),
8896 mode, BRANCH_COST <= 1);
8897
8898 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8899 result = expand_shift (LSHIFT_EXPR, mode, result,
8900 build_int_2 (tree_log2
8901 (TREE_OPERAND
8902 (binary_op, 1)),
8903 0),
8904 (safe_from_p (temp, singleton, 1)
8905 ? temp : NULL_RTX), 0);
8906
8907 if (result)
8908 {
8909 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8910 return expand_binop (mode, boptab, op1, result, temp,
8911 unsignedp, OPTAB_LIB_WIDEN);
8912 }
8913 }
8914
8915 do_pending_stack_adjust ();
8916 NO_DEFER_POP;
8917 op0 = gen_label_rtx ();
8918
8919 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8920 {
8921 if (temp != 0)
8922 {
8923 /* If the target conflicts with the other operand of the
8924 binary op, we can't use it. Also, we can't use the target
8925 if it is a hard register, because evaluating the condition
8926 might clobber it. */
8927 if ((binary_op
8928 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8929 || (GET_CODE (temp) == REG
8930 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8931 temp = gen_reg_rtx (mode);
8932 store_expr (singleton, temp,
8933 modifier == EXPAND_STACK_PARM ? 2 : 0);
8934 }
8935 else
8936 expand_expr (singleton,
8937 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8938 if (singleton == TREE_OPERAND (exp, 1))
8939 jumpif (TREE_OPERAND (exp, 0), op0);
8940 else
8941 jumpifnot (TREE_OPERAND (exp, 0), op0);
8942
8943 start_cleanup_deferral ();
8944 if (binary_op && temp == 0)
8945 /* Just touch the other operand. */
8946 expand_expr (TREE_OPERAND (binary_op, 1),
8947 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8948 else if (binary_op)
8949 store_expr (build (TREE_CODE (binary_op), type,
8950 make_tree (type, temp),
8951 TREE_OPERAND (binary_op, 1)),
8952 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8953 else
8954 store_expr (build1 (TREE_CODE (unary_op), type,
8955 make_tree (type, temp)),
8956 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8957 op1 = op0;
8958 }
8959 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8960 comparison operator. If we have one of these cases, set the
8961 output to A, branch on A (cse will merge these two references),
8962 then set the output to FOO. */
8963 else if (temp
8964 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8965 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8966 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8967 TREE_OPERAND (exp, 1), 0)
8968 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8969 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8970 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8971 {
8972 if (GET_CODE (temp) == REG
8973 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8974 temp = gen_reg_rtx (mode);
8975 store_expr (TREE_OPERAND (exp, 1), temp,
8976 modifier == EXPAND_STACK_PARM ? 2 : 0);
8977 jumpif (TREE_OPERAND (exp, 0), op0);
8978
8979 start_cleanup_deferral ();
8980 store_expr (TREE_OPERAND (exp, 2), temp,
8981 modifier == EXPAND_STACK_PARM ? 2 : 0);
8982 op1 = op0;
8983 }
8984 else if (temp
8985 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8986 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8987 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8988 TREE_OPERAND (exp, 2), 0)
8989 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8990 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8991 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8992 {
8993 if (GET_CODE (temp) == REG
8994 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8995 temp = gen_reg_rtx (mode);
8996 store_expr (TREE_OPERAND (exp, 2), temp,
8997 modifier == EXPAND_STACK_PARM ? 2 : 0);
8998 jumpifnot (TREE_OPERAND (exp, 0), op0);
8999
9000 start_cleanup_deferral ();
9001 store_expr (TREE_OPERAND (exp, 1), temp,
9002 modifier == EXPAND_STACK_PARM ? 2 : 0);
9003 op1 = op0;
9004 }
9005 else
9006 {
9007 op1 = gen_label_rtx ();
9008 jumpifnot (TREE_OPERAND (exp, 0), op0);
9009
9010 start_cleanup_deferral ();
9011
9012 /* One branch of the cond can be void, if it never returns. For
9013 example A ? throw : E */
9014 if (temp != 0
9015 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
9016 store_expr (TREE_OPERAND (exp, 1), temp,
9017 modifier == EXPAND_STACK_PARM ? 2 : 0);
9018 else
9019 expand_expr (TREE_OPERAND (exp, 1),
9020 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
9021 end_cleanup_deferral ();
9022 emit_queue ();
9023 emit_jump_insn (gen_jump (op1));
9024 emit_barrier ();
9025 emit_label (op0);
9026 start_cleanup_deferral ();
9027 if (temp != 0
9028 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
9029 store_expr (TREE_OPERAND (exp, 2), temp,
9030 modifier == EXPAND_STACK_PARM ? 2 : 0);
9031 else
9032 expand_expr (TREE_OPERAND (exp, 2),
9033 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
9034 }
9035
9036 end_cleanup_deferral ();
9037
9038 emit_queue ();
9039 emit_label (op1);
9040 OK_DEFER_POP;
9041
9042 return temp;
9043 }
9044
9045 case TARGET_EXPR:
9046 {
9047 /* Something needs to be initialized, but we didn't know
9048 where that thing was when building the tree. For example,
9049 it could be the return value of a function, or a parameter
9050 to a function which lays down in the stack, or a temporary
9051 variable which must be passed by reference.
9052
9053 We guarantee that the expression will either be constructed
9054 or copied into our original target. */
9055
9056 tree slot = TREE_OPERAND (exp, 0);
9057 tree cleanups = NULL_TREE;
9058 tree exp1;
9059
9060 if (TREE_CODE (slot) != VAR_DECL)
9061 abort ();
9062
9063 if (! ignore)
9064 target = original_target;
9065
9066 /* Set this here so that if we get a target that refers to a
9067 register variable that's already been used, put_reg_into_stack
9068 knows that it should fix up those uses. */
9069 TREE_USED (slot) = 1;
9070
9071 if (target == 0)
9072 {
9073 if (DECL_RTL_SET_P (slot))
9074 {
9075 target = DECL_RTL (slot);
9076 /* If we have already expanded the slot, so don't do
9077 it again. (mrs) */
9078 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9079 return target;
9080 }
9081 else
9082 {
9083 target = assign_temp (type, 2, 0, 1);
9084 /* All temp slots at this level must not conflict. */
9085 preserve_temp_slots (target);
9086 SET_DECL_RTL (slot, target);
9087 if (TREE_ADDRESSABLE (slot))
9088 put_var_into_stack (slot, /*rescan=*/false);
9089
9090 /* Since SLOT is not known to the called function
9091 to belong to its stack frame, we must build an explicit
9092 cleanup. This case occurs when we must build up a reference
9093 to pass the reference as an argument. In this case,
9094 it is very likely that such a reference need not be
9095 built here. */
9096
9097 if (TREE_OPERAND (exp, 2) == 0)
9098 TREE_OPERAND (exp, 2)
9099 = (*lang_hooks.maybe_build_cleanup) (slot);
9100 cleanups = TREE_OPERAND (exp, 2);
9101 }
9102 }
9103 else
9104 {
9105 /* This case does occur, when expanding a parameter which
9106 needs to be constructed on the stack. The target
9107 is the actual stack address that we want to initialize.
9108 The function we call will perform the cleanup in this case. */
9109
9110 /* If we have already assigned it space, use that space,
9111 not target that we were passed in, as our target
9112 parameter is only a hint. */
9113 if (DECL_RTL_SET_P (slot))
9114 {
9115 target = DECL_RTL (slot);
9116 /* If we have already expanded the slot, so don't do
9117 it again. (mrs) */
9118 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9119 return target;
9120 }
9121 else
9122 {
9123 SET_DECL_RTL (slot, target);
9124 /* If we must have an addressable slot, then make sure that
9125 the RTL that we just stored in slot is OK. */
9126 if (TREE_ADDRESSABLE (slot))
9127 put_var_into_stack (slot, /*rescan=*/true);
9128 }
9129 }
9130
9131 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
9132 /* Mark it as expanded. */
9133 TREE_OPERAND (exp, 1) = NULL_TREE;
9134
9135 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
9136
9137 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
9138
9139 return target;
9140 }
9141
9142 case INIT_EXPR:
9143 {
9144 tree lhs = TREE_OPERAND (exp, 0);
9145 tree rhs = TREE_OPERAND (exp, 1);
9146
9147 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9148 return temp;
9149 }
9150
9151 case MODIFY_EXPR:
9152 {
9153 /* If lhs is complex, expand calls in rhs before computing it.
9154 That's so we don't compute a pointer and save it over a
9155 call. If lhs is simple, compute it first so we can give it
9156 as a target if the rhs is just a call. This avoids an
9157 extra temp and copy and that prevents a partial-subsumption
9158 which makes bad code. Actually we could treat
9159 component_ref's of vars like vars. */
9160
9161 tree lhs = TREE_OPERAND (exp, 0);
9162 tree rhs = TREE_OPERAND (exp, 1);
9163
9164 temp = 0;
9165
9166 /* Check for |= or &= of a bitfield of size one into another bitfield
9167 of size 1. In this case, (unless we need the result of the
9168 assignment) we can do this more efficiently with a
9169 test followed by an assignment, if necessary.
9170
9171 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9172 things change so we do, this code should be enhanced to
9173 support it. */
9174 if (ignore
9175 && TREE_CODE (lhs) == COMPONENT_REF
9176 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9177 || TREE_CODE (rhs) == BIT_AND_EXPR)
9178 && TREE_OPERAND (rhs, 0) == lhs
9179 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9180 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9181 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9182 {
9183 rtx label = gen_label_rtx ();
9184
9185 do_jump (TREE_OPERAND (rhs, 1),
9186 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
9187 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
9188 expand_assignment (lhs, convert (TREE_TYPE (rhs),
9189 (TREE_CODE (rhs) == BIT_IOR_EXPR
9190 ? integer_one_node
9191 : integer_zero_node)),
9192 0, 0);
9193 do_pending_stack_adjust ();
9194 emit_label (label);
9195 return const0_rtx;
9196 }
9197
9198 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9199
9200 return temp;
9201 }
9202
9203 case RETURN_EXPR:
9204 if (!TREE_OPERAND (exp, 0))
9205 expand_null_return ();
9206 else
9207 expand_return (TREE_OPERAND (exp, 0));
9208 return const0_rtx;
9209
9210 case PREINCREMENT_EXPR:
9211 case PREDECREMENT_EXPR:
9212 return expand_increment (exp, 0, ignore);
9213
9214 case POSTINCREMENT_EXPR:
9215 case POSTDECREMENT_EXPR:
9216 /* Faster to treat as pre-increment if result is not used. */
9217 return expand_increment (exp, ! ignore, ignore);
9218
9219 case ADDR_EXPR:
9220 if (modifier == EXPAND_STACK_PARM)
9221 target = 0;
9222 /* Are we taking the address of a nested function? */
9223 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
9224 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
9225 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
9226 && ! TREE_STATIC (exp))
9227 {
9228 op0 = trampoline_address (TREE_OPERAND (exp, 0));
9229 op0 = force_operand (op0, target);
9230 }
9231 /* If we are taking the address of something erroneous, just
9232 return a zero. */
9233 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9234 return const0_rtx;
9235 /* If we are taking the address of a constant and are at the
9236 top level, we have to use output_constant_def since we can't
9237 call force_const_mem at top level. */
9238 else if (cfun == 0
9239 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9240 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9241 == 'c')))
9242 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
9243 else
9244 {
9245 /* We make sure to pass const0_rtx down if we came in with
9246 ignore set, to avoid doing the cleanups twice for something. */
9247 op0 = expand_expr (TREE_OPERAND (exp, 0),
9248 ignore ? const0_rtx : NULL_RTX, VOIDmode,
9249 (modifier == EXPAND_INITIALIZER
9250 ? modifier : EXPAND_CONST_ADDRESS));
9251
9252 /* If we are going to ignore the result, OP0 will have been set
9253 to const0_rtx, so just return it. Don't get confused and
9254 think we are taking the address of the constant. */
9255 if (ignore)
9256 return op0;
9257
9258 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9259 clever and returns a REG when given a MEM. */
9260 op0 = protect_from_queue (op0, 1);
9261
9262 /* We would like the object in memory. If it is a constant, we can
9263 have it be statically allocated into memory. For a non-constant,
9264 we need to allocate some memory and store the value into it. */
9265
9266 if (CONSTANT_P (op0))
9267 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9268 op0);
9269 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9270 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9271 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
9272 {
9273 /* If the operand is a SAVE_EXPR, we can deal with this by
9274 forcing the SAVE_EXPR into memory. */
9275 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9276 {
9277 put_var_into_stack (TREE_OPERAND (exp, 0),
9278 /*rescan=*/true);
9279 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9280 }
9281 else
9282 {
9283 /* If this object is in a register, it can't be BLKmode. */
9284 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9285 rtx memloc = assign_temp (inner_type, 1, 1, 1);
9286
9287 if (GET_CODE (op0) == PARALLEL)
9288 /* Handle calls that pass values in multiple
9289 non-contiguous locations. The Irix 6 ABI has examples
9290 of this. */
9291 emit_group_store (memloc, op0, inner_type,
9292 int_size_in_bytes (inner_type));
9293 else
9294 emit_move_insn (memloc, op0);
9295
9296 op0 = memloc;
9297 }
9298 }
9299
9300 if (GET_CODE (op0) != MEM)
9301 abort ();
9302
9303 mark_temp_addr_taken (op0);
9304 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9305 {
9306 op0 = XEXP (op0, 0);
9307 #ifdef POINTERS_EXTEND_UNSIGNED
9308 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9309 && mode == ptr_mode)
9310 op0 = convert_memory_address (ptr_mode, op0);
9311 #endif
9312 return op0;
9313 }
9314
9315 /* If OP0 is not aligned as least as much as the type requires, we
9316 need to make a temporary, copy OP0 to it, and take the address of
9317 the temporary. We want to use the alignment of the type, not of
9318 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9319 the test for BLKmode means that can't happen. The test for
9320 BLKmode is because we never make mis-aligned MEMs with
9321 non-BLKmode.
9322
9323 We don't need to do this at all if the machine doesn't have
9324 strict alignment. */
9325 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9326 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9327 > MEM_ALIGN (op0))
9328 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9329 {
9330 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9331 rtx new;
9332
9333 if (TYPE_ALIGN_OK (inner_type))
9334 abort ();
9335
9336 if (TREE_ADDRESSABLE (inner_type))
9337 {
9338 /* We can't make a bitwise copy of this object, so fail. */
9339 error ("cannot take the address of an unaligned member");
9340 return const0_rtx;
9341 }
9342
9343 new = assign_stack_temp_for_type
9344 (TYPE_MODE (inner_type),
9345 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9346 : int_size_in_bytes (inner_type),
9347 1, build_qualified_type (inner_type,
9348 (TYPE_QUALS (inner_type)
9349 | TYPE_QUAL_CONST)));
9350
9351 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9352 (modifier == EXPAND_STACK_PARM
9353 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9354
9355 op0 = new;
9356 }
9357
9358 op0 = force_operand (XEXP (op0, 0), target);
9359 }
9360
9361 if (flag_force_addr
9362 && GET_CODE (op0) != REG
9363 && modifier != EXPAND_CONST_ADDRESS
9364 && modifier != EXPAND_INITIALIZER
9365 && modifier != EXPAND_SUM)
9366 op0 = force_reg (Pmode, op0);
9367
9368 if (GET_CODE (op0) == REG
9369 && ! REG_USERVAR_P (op0))
9370 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9371
9372 #ifdef POINTERS_EXTEND_UNSIGNED
9373 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9374 && mode == ptr_mode)
9375 op0 = convert_memory_address (ptr_mode, op0);
9376 #endif
9377
9378 return op0;
9379
9380 case ENTRY_VALUE_EXPR:
9381 abort ();
9382
9383 /* COMPLEX type for Extended Pascal & Fortran */
9384 case COMPLEX_EXPR:
9385 {
9386 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9387 rtx insns;
9388
9389 /* Get the rtx code of the operands. */
9390 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9391 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9392
9393 if (! target)
9394 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9395
9396 start_sequence ();
9397
9398 /* Move the real (op0) and imaginary (op1) parts to their location. */
9399 emit_move_insn (gen_realpart (mode, target), op0);
9400 emit_move_insn (gen_imagpart (mode, target), op1);
9401
9402 insns = get_insns ();
9403 end_sequence ();
9404
9405 /* Complex construction should appear as a single unit. */
9406 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9407 each with a separate pseudo as destination.
9408 It's not correct for flow to treat them as a unit. */
9409 if (GET_CODE (target) != CONCAT)
9410 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9411 else
9412 emit_insn (insns);
9413
9414 return target;
9415 }
9416
9417 case REALPART_EXPR:
9418 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9419 return gen_realpart (mode, op0);
9420
9421 case IMAGPART_EXPR:
9422 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9423 return gen_imagpart (mode, op0);
9424
9425 case CONJ_EXPR:
9426 {
9427 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9428 rtx imag_t;
9429 rtx insns;
9430
9431 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9432
9433 if (! target)
9434 target = gen_reg_rtx (mode);
9435
9436 start_sequence ();
9437
9438 /* Store the realpart and the negated imagpart to target. */
9439 emit_move_insn (gen_realpart (partmode, target),
9440 gen_realpart (partmode, op0));
9441
9442 imag_t = gen_imagpart (partmode, target);
9443 temp = expand_unop (partmode,
9444 ! unsignedp && flag_trapv
9445 && (GET_MODE_CLASS(partmode) == MODE_INT)
9446 ? negv_optab : neg_optab,
9447 gen_imagpart (partmode, op0), imag_t, 0);
9448 if (temp != imag_t)
9449 emit_move_insn (imag_t, temp);
9450
9451 insns = get_insns ();
9452 end_sequence ();
9453
9454 /* Conjugate should appear as a single unit
9455 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9456 each with a separate pseudo as destination.
9457 It's not correct for flow to treat them as a unit. */
9458 if (GET_CODE (target) != CONCAT)
9459 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9460 else
9461 emit_insn (insns);
9462
9463 return target;
9464 }
9465
9466 case TRY_CATCH_EXPR:
9467 {
9468 tree handler = TREE_OPERAND (exp, 1);
9469
9470 expand_eh_region_start ();
9471
9472 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9473
9474 expand_eh_region_end_cleanup (handler);
9475
9476 return op0;
9477 }
9478
9479 case TRY_FINALLY_EXPR:
9480 {
9481 tree try_block = TREE_OPERAND (exp, 0);
9482 tree finally_block = TREE_OPERAND (exp, 1);
9483
9484 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9485 {
9486 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9487 is not sufficient, so we cannot expand the block twice.
9488 So we play games with GOTO_SUBROUTINE_EXPR to let us
9489 expand the thing only once. */
9490 /* When not optimizing, we go ahead with this form since
9491 (1) user breakpoints operate more predictably without
9492 code duplication, and
9493 (2) we're not running any of the global optimizers
9494 that would explode in time/space with the highly
9495 connected CFG created by the indirect branching. */
9496
9497 rtx finally_label = gen_label_rtx ();
9498 rtx done_label = gen_label_rtx ();
9499 rtx return_link = gen_reg_rtx (Pmode);
9500 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9501 (tree) finally_label, (tree) return_link);
9502 TREE_SIDE_EFFECTS (cleanup) = 1;
9503
9504 /* Start a new binding layer that will keep track of all cleanup
9505 actions to be performed. */
9506 expand_start_bindings (2);
9507 target_temp_slot_level = temp_slot_level;
9508
9509 expand_decl_cleanup (NULL_TREE, cleanup);
9510 op0 = expand_expr (try_block, target, tmode, modifier);
9511
9512 preserve_temp_slots (op0);
9513 expand_end_bindings (NULL_TREE, 0, 0);
9514 emit_jump (done_label);
9515 emit_label (finally_label);
9516 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9517 emit_indirect_jump (return_link);
9518 emit_label (done_label);
9519 }
9520 else
9521 {
9522 expand_start_bindings (2);
9523 target_temp_slot_level = temp_slot_level;
9524
9525 expand_decl_cleanup (NULL_TREE, finally_block);
9526 op0 = expand_expr (try_block, target, tmode, modifier);
9527
9528 preserve_temp_slots (op0);
9529 expand_end_bindings (NULL_TREE, 0, 0);
9530 }
9531
9532 return op0;
9533 }
9534
9535 case GOTO_SUBROUTINE_EXPR:
9536 {
9537 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9538 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9539 rtx return_address = gen_label_rtx ();
9540 emit_move_insn (return_link,
9541 gen_rtx_LABEL_REF (Pmode, return_address));
9542 emit_jump (subr);
9543 emit_label (return_address);
9544 return const0_rtx;
9545 }
9546
9547 case VA_ARG_EXPR:
9548 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9549
9550 case EXC_PTR_EXPR:
9551 return get_exception_pointer (cfun);
9552
9553 case FDESC_EXPR:
9554 /* Function descriptors are not valid except for as
9555 initialization constants, and should not be expanded. */
9556 abort ();
9557
9558 default:
9559 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9560 }
9561
9562 /* Here to do an ordinary binary operator, generating an instruction
9563 from the optab already placed in `this_optab'. */
9564 binop:
9565 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9566 subtarget = 0;
9567 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9568 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9569 binop2:
9570 if (modifier == EXPAND_STACK_PARM)
9571 target = 0;
9572 temp = expand_binop (mode, this_optab, op0, op1, target,
9573 unsignedp, OPTAB_LIB_WIDEN);
9574 if (temp == 0)
9575 abort ();
9576 return temp;
9577 }
9578 \f
9579 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9580 when applied to the address of EXP produces an address known to be
9581 aligned more than BIGGEST_ALIGNMENT. */
9582
9583 static int
9584 is_aligning_offset (tree offset, tree exp)
9585 {
9586 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9587 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9588 || TREE_CODE (offset) == NOP_EXPR
9589 || TREE_CODE (offset) == CONVERT_EXPR
9590 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9591 offset = TREE_OPERAND (offset, 0);
9592
9593 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9594 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9595 if (TREE_CODE (offset) != BIT_AND_EXPR
9596 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9597 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9598 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9599 return 0;
9600
9601 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9602 It must be NEGATE_EXPR. Then strip any more conversions. */
9603 offset = TREE_OPERAND (offset, 0);
9604 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9605 || TREE_CODE (offset) == NOP_EXPR
9606 || TREE_CODE (offset) == CONVERT_EXPR)
9607 offset = TREE_OPERAND (offset, 0);
9608
9609 if (TREE_CODE (offset) != NEGATE_EXPR)
9610 return 0;
9611
9612 offset = TREE_OPERAND (offset, 0);
9613 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9614 || TREE_CODE (offset) == NOP_EXPR
9615 || TREE_CODE (offset) == CONVERT_EXPR)
9616 offset = TREE_OPERAND (offset, 0);
9617
9618 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9619 whose type is the same as EXP. */
9620 return (TREE_CODE (offset) == ADDR_EXPR
9621 && (TREE_OPERAND (offset, 0) == exp
9622 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9623 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9624 == TREE_TYPE (exp)))));
9625 }
9626 \f
9627 /* Return the tree node if an ARG corresponds to a string constant or zero
9628 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9629 in bytes within the string that ARG is accessing. The type of the
9630 offset will be `sizetype'. */
9631
9632 tree
9633 string_constant (tree arg, tree *ptr_offset)
9634 {
9635 STRIP_NOPS (arg);
9636
9637 if (TREE_CODE (arg) == ADDR_EXPR
9638 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9639 {
9640 *ptr_offset = size_zero_node;
9641 return TREE_OPERAND (arg, 0);
9642 }
9643 else if (TREE_CODE (arg) == PLUS_EXPR)
9644 {
9645 tree arg0 = TREE_OPERAND (arg, 0);
9646 tree arg1 = TREE_OPERAND (arg, 1);
9647
9648 STRIP_NOPS (arg0);
9649 STRIP_NOPS (arg1);
9650
9651 if (TREE_CODE (arg0) == ADDR_EXPR
9652 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9653 {
9654 *ptr_offset = convert (sizetype, arg1);
9655 return TREE_OPERAND (arg0, 0);
9656 }
9657 else if (TREE_CODE (arg1) == ADDR_EXPR
9658 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9659 {
9660 *ptr_offset = convert (sizetype, arg0);
9661 return TREE_OPERAND (arg1, 0);
9662 }
9663 }
9664
9665 return 0;
9666 }
9667 \f
9668 /* Expand code for a post- or pre- increment or decrement
9669 and return the RTX for the result.
9670 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9671
9672 static rtx
9673 expand_increment (tree exp, int post, int ignore)
9674 {
9675 rtx op0, op1;
9676 rtx temp, value;
9677 tree incremented = TREE_OPERAND (exp, 0);
9678 optab this_optab = add_optab;
9679 int icode;
9680 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9681 int op0_is_copy = 0;
9682 int single_insn = 0;
9683 /* 1 means we can't store into OP0 directly,
9684 because it is a subreg narrower than a word,
9685 and we don't dare clobber the rest of the word. */
9686 int bad_subreg = 0;
9687
9688 /* Stabilize any component ref that might need to be
9689 evaluated more than once below. */
9690 if (!post
9691 || TREE_CODE (incremented) == BIT_FIELD_REF
9692 || (TREE_CODE (incremented) == COMPONENT_REF
9693 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9694 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9695 incremented = stabilize_reference (incremented);
9696 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9697 ones into save exprs so that they don't accidentally get evaluated
9698 more than once by the code below. */
9699 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9700 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9701 incremented = save_expr (incremented);
9702
9703 /* Compute the operands as RTX.
9704 Note whether OP0 is the actual lvalue or a copy of it:
9705 I believe it is a copy iff it is a register or subreg
9706 and insns were generated in computing it. */
9707
9708 temp = get_last_insn ();
9709 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9710
9711 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9712 in place but instead must do sign- or zero-extension during assignment,
9713 so we copy it into a new register and let the code below use it as
9714 a copy.
9715
9716 Note that we can safely modify this SUBREG since it is know not to be
9717 shared (it was made by the expand_expr call above). */
9718
9719 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9720 {
9721 if (post)
9722 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9723 else
9724 bad_subreg = 1;
9725 }
9726 else if (GET_CODE (op0) == SUBREG
9727 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9728 {
9729 /* We cannot increment this SUBREG in place. If we are
9730 post-incrementing, get a copy of the old value. Otherwise,
9731 just mark that we cannot increment in place. */
9732 if (post)
9733 op0 = copy_to_reg (op0);
9734 else
9735 bad_subreg = 1;
9736 }
9737
9738 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9739 && temp != get_last_insn ());
9740 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9741
9742 /* Decide whether incrementing or decrementing. */
9743 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9744 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9745 this_optab = sub_optab;
9746
9747 /* Convert decrement by a constant into a negative increment. */
9748 if (this_optab == sub_optab
9749 && GET_CODE (op1) == CONST_INT)
9750 {
9751 op1 = GEN_INT (-INTVAL (op1));
9752 this_optab = add_optab;
9753 }
9754
9755 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9756 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9757
9758 /* For a preincrement, see if we can do this with a single instruction. */
9759 if (!post)
9760 {
9761 icode = (int) this_optab->handlers[(int) mode].insn_code;
9762 if (icode != (int) CODE_FOR_nothing
9763 /* Make sure that OP0 is valid for operands 0 and 1
9764 of the insn we want to queue. */
9765 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9766 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9767 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9768 single_insn = 1;
9769 }
9770
9771 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9772 then we cannot just increment OP0. We must therefore contrive to
9773 increment the original value. Then, for postincrement, we can return
9774 OP0 since it is a copy of the old value. For preincrement, expand here
9775 unless we can do it with a single insn.
9776
9777 Likewise if storing directly into OP0 would clobber high bits
9778 we need to preserve (bad_subreg). */
9779 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9780 {
9781 /* This is the easiest way to increment the value wherever it is.
9782 Problems with multiple evaluation of INCREMENTED are prevented
9783 because either (1) it is a component_ref or preincrement,
9784 in which case it was stabilized above, or (2) it is an array_ref
9785 with constant index in an array in a register, which is
9786 safe to reevaluate. */
9787 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9788 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9789 ? MINUS_EXPR : PLUS_EXPR),
9790 TREE_TYPE (exp),
9791 incremented,
9792 TREE_OPERAND (exp, 1));
9793
9794 while (TREE_CODE (incremented) == NOP_EXPR
9795 || TREE_CODE (incremented) == CONVERT_EXPR)
9796 {
9797 newexp = convert (TREE_TYPE (incremented), newexp);
9798 incremented = TREE_OPERAND (incremented, 0);
9799 }
9800
9801 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9802 return post ? op0 : temp;
9803 }
9804
9805 if (post)
9806 {
9807 /* We have a true reference to the value in OP0.
9808 If there is an insn to add or subtract in this mode, queue it.
9809 Queueing the increment insn avoids the register shuffling
9810 that often results if we must increment now and first save
9811 the old value for subsequent use. */
9812
9813 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9814 op0 = stabilize (op0);
9815 #endif
9816
9817 icode = (int) this_optab->handlers[(int) mode].insn_code;
9818 if (icode != (int) CODE_FOR_nothing
9819 /* Make sure that OP0 is valid for operands 0 and 1
9820 of the insn we want to queue. */
9821 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9822 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9823 {
9824 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9825 op1 = force_reg (mode, op1);
9826
9827 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9828 }
9829 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9830 {
9831 rtx addr = (general_operand (XEXP (op0, 0), mode)
9832 ? force_reg (Pmode, XEXP (op0, 0))
9833 : copy_to_reg (XEXP (op0, 0)));
9834 rtx temp, result;
9835
9836 op0 = replace_equiv_address (op0, addr);
9837 temp = force_reg (GET_MODE (op0), op0);
9838 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9839 op1 = force_reg (mode, op1);
9840
9841 /* The increment queue is LIFO, thus we have to `queue'
9842 the instructions in reverse order. */
9843 enqueue_insn (op0, gen_move_insn (op0, temp));
9844 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9845 return result;
9846 }
9847 }
9848
9849 /* Preincrement, or we can't increment with one simple insn. */
9850 if (post)
9851 /* Save a copy of the value before inc or dec, to return it later. */
9852 temp = value = copy_to_reg (op0);
9853 else
9854 /* Arrange to return the incremented value. */
9855 /* Copy the rtx because expand_binop will protect from the queue,
9856 and the results of that would be invalid for us to return
9857 if our caller does emit_queue before using our result. */
9858 temp = copy_rtx (value = op0);
9859
9860 /* Increment however we can. */
9861 op1 = expand_binop (mode, this_optab, value, op1, op0,
9862 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9863
9864 /* Make sure the value is stored into OP0. */
9865 if (op1 != op0)
9866 emit_move_insn (op0, op1);
9867
9868 return temp;
9869 }
9870 \f
9871 /* Generate code to calculate EXP using a store-flag instruction
9872 and return an rtx for the result. EXP is either a comparison
9873 or a TRUTH_NOT_EXPR whose operand is a comparison.
9874
9875 If TARGET is nonzero, store the result there if convenient.
9876
9877 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9878 cheap.
9879
9880 Return zero if there is no suitable set-flag instruction
9881 available on this machine.
9882
9883 Once expand_expr has been called on the arguments of the comparison,
9884 we are committed to doing the store flag, since it is not safe to
9885 re-evaluate the expression. We emit the store-flag insn by calling
9886 emit_store_flag, but only expand the arguments if we have a reason
9887 to believe that emit_store_flag will be successful. If we think that
9888 it will, but it isn't, we have to simulate the store-flag with a
9889 set/jump/set sequence. */
9890
9891 static rtx
9892 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9893 {
9894 enum rtx_code code;
9895 tree arg0, arg1, type;
9896 tree tem;
9897 enum machine_mode operand_mode;
9898 int invert = 0;
9899 int unsignedp;
9900 rtx op0, op1;
9901 enum insn_code icode;
9902 rtx subtarget = target;
9903 rtx result, label;
9904
9905 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9906 result at the end. We can't simply invert the test since it would
9907 have already been inverted if it were valid. This case occurs for
9908 some floating-point comparisons. */
9909
9910 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9911 invert = 1, exp = TREE_OPERAND (exp, 0);
9912
9913 arg0 = TREE_OPERAND (exp, 0);
9914 arg1 = TREE_OPERAND (exp, 1);
9915
9916 /* Don't crash if the comparison was erroneous. */
9917 if (arg0 == error_mark_node || arg1 == error_mark_node)
9918 return const0_rtx;
9919
9920 type = TREE_TYPE (arg0);
9921 operand_mode = TYPE_MODE (type);
9922 unsignedp = TREE_UNSIGNED (type);
9923
9924 /* We won't bother with BLKmode store-flag operations because it would mean
9925 passing a lot of information to emit_store_flag. */
9926 if (operand_mode == BLKmode)
9927 return 0;
9928
9929 /* We won't bother with store-flag operations involving function pointers
9930 when function pointers must be canonicalized before comparisons. */
9931 #ifdef HAVE_canonicalize_funcptr_for_compare
9932 if (HAVE_canonicalize_funcptr_for_compare
9933 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9934 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9935 == FUNCTION_TYPE))
9936 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9937 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9938 == FUNCTION_TYPE))))
9939 return 0;
9940 #endif
9941
9942 STRIP_NOPS (arg0);
9943 STRIP_NOPS (arg1);
9944
9945 /* Get the rtx comparison code to use. We know that EXP is a comparison
9946 operation of some type. Some comparisons against 1 and -1 can be
9947 converted to comparisons with zero. Do so here so that the tests
9948 below will be aware that we have a comparison with zero. These
9949 tests will not catch constants in the first operand, but constants
9950 are rarely passed as the first operand. */
9951
9952 switch (TREE_CODE (exp))
9953 {
9954 case EQ_EXPR:
9955 code = EQ;
9956 break;
9957 case NE_EXPR:
9958 code = NE;
9959 break;
9960 case LT_EXPR:
9961 if (integer_onep (arg1))
9962 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9963 else
9964 code = unsignedp ? LTU : LT;
9965 break;
9966 case LE_EXPR:
9967 if (! unsignedp && integer_all_onesp (arg1))
9968 arg1 = integer_zero_node, code = LT;
9969 else
9970 code = unsignedp ? LEU : LE;
9971 break;
9972 case GT_EXPR:
9973 if (! unsignedp && integer_all_onesp (arg1))
9974 arg1 = integer_zero_node, code = GE;
9975 else
9976 code = unsignedp ? GTU : GT;
9977 break;
9978 case GE_EXPR:
9979 if (integer_onep (arg1))
9980 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9981 else
9982 code = unsignedp ? GEU : GE;
9983 break;
9984
9985 case UNORDERED_EXPR:
9986 code = UNORDERED;
9987 break;
9988 case ORDERED_EXPR:
9989 code = ORDERED;
9990 break;
9991 case UNLT_EXPR:
9992 code = UNLT;
9993 break;
9994 case UNLE_EXPR:
9995 code = UNLE;
9996 break;
9997 case UNGT_EXPR:
9998 code = UNGT;
9999 break;
10000 case UNGE_EXPR:
10001 code = UNGE;
10002 break;
10003 case UNEQ_EXPR:
10004 code = UNEQ;
10005 break;
10006
10007 default:
10008 abort ();
10009 }
10010
10011 /* Put a constant second. */
10012 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10013 {
10014 tem = arg0; arg0 = arg1; arg1 = tem;
10015 code = swap_condition (code);
10016 }
10017
10018 /* If this is an equality or inequality test of a single bit, we can
10019 do this by shifting the bit being tested to the low-order bit and
10020 masking the result with the constant 1. If the condition was EQ,
10021 we xor it with 1. This does not require an scc insn and is faster
10022 than an scc insn even if we have it.
10023
10024 The code to make this transformation was moved into fold_single_bit_test,
10025 so we just call into the folder and expand its result. */
10026
10027 if ((code == NE || code == EQ)
10028 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10029 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10030 {
10031 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
10032 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
10033 arg0, arg1, type),
10034 target, VOIDmode, EXPAND_NORMAL);
10035 }
10036
10037 /* Now see if we are likely to be able to do this. Return if not. */
10038 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10039 return 0;
10040
10041 icode = setcc_gen_code[(int) code];
10042 if (icode == CODE_FOR_nothing
10043 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10044 {
10045 /* We can only do this if it is one of the special cases that
10046 can be handled without an scc insn. */
10047 if ((code == LT && integer_zerop (arg1))
10048 || (! only_cheap && code == GE && integer_zerop (arg1)))
10049 ;
10050 else if (BRANCH_COST >= 0
10051 && ! only_cheap && (code == NE || code == EQ)
10052 && TREE_CODE (type) != REAL_TYPE
10053 && ((abs_optab->handlers[(int) operand_mode].insn_code
10054 != CODE_FOR_nothing)
10055 || (ffs_optab->handlers[(int) operand_mode].insn_code
10056 != CODE_FOR_nothing)))
10057 ;
10058 else
10059 return 0;
10060 }
10061
10062 if (! get_subtarget (target)
10063 || GET_MODE (subtarget) != operand_mode
10064 || ! safe_from_p (subtarget, arg1, 1))
10065 subtarget = 0;
10066
10067 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10068 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10069
10070 if (target == 0)
10071 target = gen_reg_rtx (mode);
10072
10073 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10074 because, if the emit_store_flag does anything it will succeed and
10075 OP0 and OP1 will not be used subsequently. */
10076
10077 result = emit_store_flag (target, code,
10078 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10079 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10080 operand_mode, unsignedp, 1);
10081
10082 if (result)
10083 {
10084 if (invert)
10085 result = expand_binop (mode, xor_optab, result, const1_rtx,
10086 result, 0, OPTAB_LIB_WIDEN);
10087 return result;
10088 }
10089
10090 /* If this failed, we have to do this with set/compare/jump/set code. */
10091 if (GET_CODE (target) != REG
10092 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10093 target = gen_reg_rtx (GET_MODE (target));
10094
10095 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10096 result = compare_from_rtx (op0, op1, code, unsignedp,
10097 operand_mode, NULL_RTX);
10098 if (GET_CODE (result) == CONST_INT)
10099 return (((result == const0_rtx && ! invert)
10100 || (result != const0_rtx && invert))
10101 ? const0_rtx : const1_rtx);
10102
10103 /* The code of RESULT may not match CODE if compare_from_rtx
10104 decided to swap its operands and reverse the original code.
10105
10106 We know that compare_from_rtx returns either a CONST_INT or
10107 a new comparison code, so it is safe to just extract the
10108 code from RESULT. */
10109 code = GET_CODE (result);
10110
10111 label = gen_label_rtx ();
10112 if (bcc_gen_fctn[(int) code] == 0)
10113 abort ();
10114
10115 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10116 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10117 emit_label (label);
10118
10119 return target;
10120 }
10121 \f
10122
10123 /* Stubs in case we haven't got a casesi insn. */
10124 #ifndef HAVE_casesi
10125 # define HAVE_casesi 0
10126 # define gen_casesi(a, b, c, d, e) (0)
10127 # define CODE_FOR_casesi CODE_FOR_nothing
10128 #endif
10129
10130 /* If the machine does not have a case insn that compares the bounds,
10131 this means extra overhead for dispatch tables, which raises the
10132 threshold for using them. */
10133 #ifndef CASE_VALUES_THRESHOLD
10134 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10135 #endif /* CASE_VALUES_THRESHOLD */
10136
10137 unsigned int
10138 case_values_threshold (void)
10139 {
10140 return CASE_VALUES_THRESHOLD;
10141 }
10142
10143 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10144 0 otherwise (i.e. if there is no casesi instruction). */
10145 int
10146 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10147 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
10148 {
10149 enum machine_mode index_mode = SImode;
10150 int index_bits = GET_MODE_BITSIZE (index_mode);
10151 rtx op1, op2, index;
10152 enum machine_mode op_mode;
10153
10154 if (! HAVE_casesi)
10155 return 0;
10156
10157 /* Convert the index to SImode. */
10158 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10159 {
10160 enum machine_mode omode = TYPE_MODE (index_type);
10161 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10162
10163 /* We must handle the endpoints in the original mode. */
10164 index_expr = build (MINUS_EXPR, index_type,
10165 index_expr, minval);
10166 minval = integer_zero_node;
10167 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10168 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10169 omode, 1, default_label);
10170 /* Now we can safely truncate. */
10171 index = convert_to_mode (index_mode, index, 0);
10172 }
10173 else
10174 {
10175 if (TYPE_MODE (index_type) != index_mode)
10176 {
10177 index_expr = convert ((*lang_hooks.types.type_for_size)
10178 (index_bits, 0), index_expr);
10179 index_type = TREE_TYPE (index_expr);
10180 }
10181
10182 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10183 }
10184 emit_queue ();
10185 index = protect_from_queue (index, 0);
10186 do_pending_stack_adjust ();
10187
10188 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10189 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10190 (index, op_mode))
10191 index = copy_to_mode_reg (op_mode, index);
10192
10193 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10194
10195 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10196 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10197 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10198 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10199 (op1, op_mode))
10200 op1 = copy_to_mode_reg (op_mode, op1);
10201
10202 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10203
10204 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10205 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10206 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10207 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10208 (op2, op_mode))
10209 op2 = copy_to_mode_reg (op_mode, op2);
10210
10211 emit_jump_insn (gen_casesi (index, op1, op2,
10212 table_label, default_label));
10213 return 1;
10214 }
10215
10216 /* Attempt to generate a tablejump instruction; same concept. */
10217 #ifndef HAVE_tablejump
10218 #define HAVE_tablejump 0
10219 #define gen_tablejump(x, y) (0)
10220 #endif
10221
10222 /* Subroutine of the next function.
10223
10224 INDEX is the value being switched on, with the lowest value
10225 in the table already subtracted.
10226 MODE is its expected mode (needed if INDEX is constant).
10227 RANGE is the length of the jump table.
10228 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10229
10230 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10231 index value is out of range. */
10232
10233 static void
10234 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10235 rtx default_label)
10236 {
10237 rtx temp, vector;
10238
10239 if (INTVAL (range) > cfun->max_jumptable_ents)
10240 cfun->max_jumptable_ents = INTVAL (range);
10241
10242 /* Do an unsigned comparison (in the proper mode) between the index
10243 expression and the value which represents the length of the range.
10244 Since we just finished subtracting the lower bound of the range
10245 from the index expression, this comparison allows us to simultaneously
10246 check that the original index expression value is both greater than
10247 or equal to the minimum value of the range and less than or equal to
10248 the maximum value of the range. */
10249
10250 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10251 default_label);
10252
10253 /* If index is in range, it must fit in Pmode.
10254 Convert to Pmode so we can index with it. */
10255 if (mode != Pmode)
10256 index = convert_to_mode (Pmode, index, 1);
10257
10258 /* Don't let a MEM slip thru, because then INDEX that comes
10259 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10260 and break_out_memory_refs will go to work on it and mess it up. */
10261 #ifdef PIC_CASE_VECTOR_ADDRESS
10262 if (flag_pic && GET_CODE (index) != REG)
10263 index = copy_to_mode_reg (Pmode, index);
10264 #endif
10265
10266 /* If flag_force_addr were to affect this address
10267 it could interfere with the tricky assumptions made
10268 about addresses that contain label-refs,
10269 which may be valid only very near the tablejump itself. */
10270 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10271 GET_MODE_SIZE, because this indicates how large insns are. The other
10272 uses should all be Pmode, because they are addresses. This code
10273 could fail if addresses and insns are not the same size. */
10274 index = gen_rtx_PLUS (Pmode,
10275 gen_rtx_MULT (Pmode, index,
10276 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10277 gen_rtx_LABEL_REF (Pmode, table_label));
10278 #ifdef PIC_CASE_VECTOR_ADDRESS
10279 if (flag_pic)
10280 index = PIC_CASE_VECTOR_ADDRESS (index);
10281 else
10282 #endif
10283 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10284 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10285 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10286 RTX_UNCHANGING_P (vector) = 1;
10287 MEM_NOTRAP_P (vector) = 1;
10288 convert_move (temp, vector, 0);
10289
10290 emit_jump_insn (gen_tablejump (temp, table_label));
10291
10292 /* If we are generating PIC code or if the table is PC-relative, the
10293 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10294 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10295 emit_barrier ();
10296 }
10297
10298 int
10299 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10300 rtx table_label, rtx default_label)
10301 {
10302 rtx index;
10303
10304 if (! HAVE_tablejump)
10305 return 0;
10306
10307 index_expr = fold (build (MINUS_EXPR, index_type,
10308 convert (index_type, index_expr),
10309 convert (index_type, minval)));
10310 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10311 emit_queue ();
10312 index = protect_from_queue (index, 0);
10313 do_pending_stack_adjust ();
10314
10315 do_tablejump (index, TYPE_MODE (index_type),
10316 convert_modes (TYPE_MODE (index_type),
10317 TYPE_MODE (TREE_TYPE (range)),
10318 expand_expr (range, NULL_RTX,
10319 VOIDmode, 0),
10320 TREE_UNSIGNED (TREE_TYPE (range))),
10321 table_label, default_label);
10322 return 1;
10323 }
10324
10325 /* Nonzero if the mode is a valid vector mode for this architecture.
10326 This returns nonzero even if there is no hardware support for the
10327 vector mode, but we can emulate with narrower modes. */
10328
10329 int
10330 vector_mode_valid_p (enum machine_mode mode)
10331 {
10332 enum mode_class class = GET_MODE_CLASS (mode);
10333 enum machine_mode innermode;
10334
10335 /* Doh! What's going on? */
10336 if (class != MODE_VECTOR_INT
10337 && class != MODE_VECTOR_FLOAT)
10338 return 0;
10339
10340 /* Hardware support. Woo hoo! */
10341 if (VECTOR_MODE_SUPPORTED_P (mode))
10342 return 1;
10343
10344 innermode = GET_MODE_INNER (mode);
10345
10346 /* We should probably return 1 if requesting V4DI and we have no DI,
10347 but we have V2DI, but this is probably very unlikely. */
10348
10349 /* If we have support for the inner mode, we can safely emulate it.
10350 We may not have V2DI, but me can emulate with a pair of DIs. */
10351 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10352 }
10353
10354 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10355 static rtx
10356 const_vector_from_tree (tree exp)
10357 {
10358 rtvec v;
10359 int units, i;
10360 tree link, elt;
10361 enum machine_mode inner, mode;
10362
10363 mode = TYPE_MODE (TREE_TYPE (exp));
10364
10365 if (is_zeros_p (exp))
10366 return CONST0_RTX (mode);
10367
10368 units = GET_MODE_NUNITS (mode);
10369 inner = GET_MODE_INNER (mode);
10370
10371 v = rtvec_alloc (units);
10372
10373 link = TREE_VECTOR_CST_ELTS (exp);
10374 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10375 {
10376 elt = TREE_VALUE (link);
10377
10378 if (TREE_CODE (elt) == REAL_CST)
10379 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10380 inner);
10381 else
10382 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10383 TREE_INT_CST_HIGH (elt),
10384 inner);
10385 }
10386
10387 /* Initialize remaining elements to 0. */
10388 for (; i < units; ++i)
10389 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10390
10391 return gen_rtx_raw_CONST_VECTOR (mode, v);
10392 }
10393
10394 #include "gt-expr.h"