* function.c (purge_addressof_1): Postpone insn in fewer cases.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
53
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
56
57 #ifdef PUSH_ROUNDING
58
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
62 #endif
63 #endif
64
65 #endif
66
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
70 #else
71 #define STACK_PUSH_CODE PRE_INC
72 #endif
73 #endif
74
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
78 #endif
79
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
84 #else
85 #define TARGET_MEM_FUNCTIONS 0
86 #endif
87
88
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
95 int cse_not_expected;
96
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 tree placeholder_list = 0;
99
100 /* This structure is used by move_by_pieces to describe the move to
101 be performed. */
102 struct move_by_pieces
103 {
104 rtx to;
105 rtx to_addr;
106 int autinc_to;
107 int explicit_inc_to;
108 rtx from;
109 rtx from_addr;
110 int autinc_from;
111 int explicit_inc_from;
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
114 int reverse;
115 };
116
117 /* This structure is used by store_by_pieces to describe the clear to
118 be performed. */
119
120 struct store_by_pieces
121 {
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
128 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
129 PTR constfundata;
130 int reverse;
131 };
132
133 static rtx enqueue_insn PARAMS ((rtx, rtx));
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
135 PARAMS ((unsigned HOST_WIDE_INT,
136 unsigned int));
137 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
138 struct move_by_pieces *));
139 static bool block_move_libcall_safe_for_call_parm PARAMS ((void));
140 static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
141 static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
142 static tree emit_block_move_libcall_fn PARAMS ((int));
143 static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned));
144 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
145 enum machine_mode));
146 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
147 unsigned int));
148 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
149 unsigned int));
150 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
151 enum machine_mode,
152 struct store_by_pieces *));
153 static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
154 static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
155 static tree clear_storage_libcall_fn PARAMS ((int));
156 static rtx compress_float_constant PARAMS ((rtx, rtx));
157 static rtx get_subtarget PARAMS ((rtx));
158 static int is_zeros_p PARAMS ((tree));
159 static int mostly_zeros_p PARAMS ((tree));
160 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, tree, int, int));
163 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
164 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
165 HOST_WIDE_INT, enum machine_mode,
166 tree, enum machine_mode, int, tree,
167 int));
168 static rtx var_rtx PARAMS ((tree));
169
170 static unsigned HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
171 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree,
172 tree));
173
174 static int is_aligning_offset PARAMS ((tree, tree));
175 static rtx expand_increment PARAMS ((tree, int, int));
176 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
177 #ifdef PUSH_ROUNDING
178 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
179 #endif
180 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
181 static rtx const_vector_from_tree PARAMS ((tree));
182
183 /* Record for each mode whether we can move a register directly to or
184 from an object of that mode in memory. If we can't, we won't try
185 to use that mode directly when accessing a field of that mode. */
186
187 static char direct_load[NUM_MACHINE_MODES];
188 static char direct_store[NUM_MACHINE_MODES];
189
190 /* Record for each mode whether we can float-extend from memory. */
191
192 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
193
194 /* If a memory-to-memory move would take MOVE_RATIO or more simple
195 move-instruction sequences, we will do a movstr or libcall instead. */
196
197 #ifndef MOVE_RATIO
198 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
199 #define MOVE_RATIO 2
200 #else
201 /* If we are optimizing for space (-Os), cut down the default move ratio. */
202 #define MOVE_RATIO (optimize_size ? 3 : 15)
203 #endif
204 #endif
205
206 /* This macro is used to determine whether move_by_pieces should be called
207 to perform a structure copy. */
208 #ifndef MOVE_BY_PIECES_P
209 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
210 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
211 #endif
212
213 /* If a clear memory operation would take CLEAR_RATIO or more simple
214 move-instruction sequences, we will do a clrstr or libcall instead. */
215
216 #ifndef CLEAR_RATIO
217 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
218 #define CLEAR_RATIO 2
219 #else
220 /* If we are optimizing for space, cut down the default clear ratio. */
221 #define CLEAR_RATIO (optimize_size ? 3 : 15)
222 #endif
223 #endif
224
225 /* This macro is used to determine whether clear_by_pieces should be
226 called to clear storage. */
227 #ifndef CLEAR_BY_PIECES_P
228 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
229 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
230 #endif
231
232 /* This macro is used to determine whether store_by_pieces should be
233 called to "memset" storage with byte values other than zero, or
234 to "memcpy" storage when the source is a constant string. */
235 #ifndef STORE_BY_PIECES_P
236 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
237 #endif
238
239 /* This array records the insn_code of insns to perform block moves. */
240 enum insn_code movstr_optab[NUM_MACHINE_MODES];
241
242 /* This array records the insn_code of insns to perform block clears. */
243 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
244
245 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
246
247 #ifndef SLOW_UNALIGNED_ACCESS
248 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
249 #endif
250 \f
251 /* This is run once per compilation to set up which modes can be used
252 directly in memory and to initialize the block move optab. */
253
254 void
255 init_expr_once ()
256 {
257 rtx insn, pat;
258 enum machine_mode mode;
259 int num_clobbers;
260 rtx mem, mem1;
261 rtx reg;
262
263 /* Try indexing by frame ptr and try by stack ptr.
264 It is known that on the Convex the stack ptr isn't a valid index.
265 With luck, one or the other is valid on any machine. */
266 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
267 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
268
269 /* A scratch register we can modify in-place below to avoid
270 useless RTL allocations. */
271 reg = gen_rtx_REG (VOIDmode, -1);
272
273 insn = rtx_alloc (INSN);
274 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
275 PATTERN (insn) = pat;
276
277 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
278 mode = (enum machine_mode) ((int) mode + 1))
279 {
280 int regno;
281
282 direct_load[(int) mode] = direct_store[(int) mode] = 0;
283 PUT_MODE (mem, mode);
284 PUT_MODE (mem1, mode);
285 PUT_MODE (reg, mode);
286
287 /* See if there is some register that can be used in this mode and
288 directly loaded or stored from memory. */
289
290 if (mode != VOIDmode && mode != BLKmode)
291 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
292 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
293 regno++)
294 {
295 if (! HARD_REGNO_MODE_OK (regno, mode))
296 continue;
297
298 REGNO (reg) = regno;
299
300 SET_SRC (pat) = mem;
301 SET_DEST (pat) = reg;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_load[(int) mode] = 1;
304
305 SET_SRC (pat) = mem1;
306 SET_DEST (pat) = reg;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_load[(int) mode] = 1;
309
310 SET_SRC (pat) = reg;
311 SET_DEST (pat) = mem;
312 if (recog (pat, insn, &num_clobbers) >= 0)
313 direct_store[(int) mode] = 1;
314
315 SET_SRC (pat) = reg;
316 SET_DEST (pat) = mem1;
317 if (recog (pat, insn, &num_clobbers) >= 0)
318 direct_store[(int) mode] = 1;
319 }
320 }
321
322 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
323
324 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
325 mode = GET_MODE_WIDER_MODE (mode))
326 {
327 enum machine_mode srcmode;
328 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
329 srcmode = GET_MODE_WIDER_MODE (srcmode))
330 {
331 enum insn_code ic;
332
333 ic = can_extend_p (mode, srcmode, 0);
334 if (ic == CODE_FOR_nothing)
335 continue;
336
337 PUT_MODE (mem, srcmode);
338
339 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
340 float_extend_from_mem[mode][srcmode] = true;
341 }
342 }
343 }
344
345 /* This is run at the start of compiling a function. */
346
347 void
348 init_expr ()
349 {
350 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
351
352 pending_chain = 0;
353 pending_stack_adjust = 0;
354 stack_pointer_delta = 0;
355 inhibit_defer_pop = 0;
356 saveregs_value = 0;
357 apply_args_value = 0;
358 forced_labels = 0;
359 }
360
361 /* Small sanity check that the queue is empty at the end of a function. */
362
363 void
364 finish_expr_for_function ()
365 {
366 if (pending_chain)
367 abort ();
368 }
369 \f
370 /* Manage the queue of increment instructions to be output
371 for POSTINCREMENT_EXPR expressions, etc. */
372
373 /* Queue up to increment (or change) VAR later. BODY says how:
374 BODY should be the same thing you would pass to emit_insn
375 to increment right away. It will go to emit_insn later on.
376
377 The value is a QUEUED expression to be used in place of VAR
378 where you want to guarantee the pre-incrementation value of VAR. */
379
380 static rtx
381 enqueue_insn (var, body)
382 rtx var, body;
383 {
384 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
385 body, pending_chain);
386 return pending_chain;
387 }
388
389 /* Use protect_from_queue to convert a QUEUED expression
390 into something that you can put immediately into an instruction.
391 If the queued incrementation has not happened yet,
392 protect_from_queue returns the variable itself.
393 If the incrementation has happened, protect_from_queue returns a temp
394 that contains a copy of the old value of the variable.
395
396 Any time an rtx which might possibly be a QUEUED is to be put
397 into an instruction, it must be passed through protect_from_queue first.
398 QUEUED expressions are not meaningful in instructions.
399
400 Do not pass a value through protect_from_queue and then hold
401 on to it for a while before putting it in an instruction!
402 If the queue is flushed in between, incorrect code will result. */
403
404 rtx
405 protect_from_queue (x, modify)
406 rtx x;
407 int modify;
408 {
409 RTX_CODE code = GET_CODE (x);
410
411 #if 0 /* A QUEUED can hang around after the queue is forced out. */
412 /* Shortcut for most common case. */
413 if (pending_chain == 0)
414 return x;
415 #endif
416
417 if (code != QUEUED)
418 {
419 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
420 use of autoincrement. Make a copy of the contents of the memory
421 location rather than a copy of the address, but not if the value is
422 of mode BLKmode. Don't modify X in place since it might be
423 shared. */
424 if (code == MEM && GET_MODE (x) != BLKmode
425 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
426 {
427 rtx y = XEXP (x, 0);
428 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
429
430 if (QUEUED_INSN (y))
431 {
432 rtx temp = gen_reg_rtx (GET_MODE (x));
433
434 emit_insn_before (gen_move_insn (temp, new),
435 QUEUED_INSN (y));
436 return temp;
437 }
438
439 /* Copy the address into a pseudo, so that the returned value
440 remains correct across calls to emit_queue. */
441 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
442 }
443
444 /* Otherwise, recursively protect the subexpressions of all
445 the kinds of rtx's that can contain a QUEUED. */
446 if (code == MEM)
447 {
448 rtx tem = protect_from_queue (XEXP (x, 0), 0);
449 if (tem != XEXP (x, 0))
450 {
451 x = copy_rtx (x);
452 XEXP (x, 0) = tem;
453 }
454 }
455 else if (code == PLUS || code == MULT)
456 {
457 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
458 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
459 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
460 {
461 x = copy_rtx (x);
462 XEXP (x, 0) = new0;
463 XEXP (x, 1) = new1;
464 }
465 }
466 return x;
467 }
468 /* If the increment has not happened, use the variable itself. Copy it
469 into a new pseudo so that the value remains correct across calls to
470 emit_queue. */
471 if (QUEUED_INSN (x) == 0)
472 return copy_to_reg (QUEUED_VAR (x));
473 /* If the increment has happened and a pre-increment copy exists,
474 use that copy. */
475 if (QUEUED_COPY (x) != 0)
476 return QUEUED_COPY (x);
477 /* The increment has happened but we haven't set up a pre-increment copy.
478 Set one up now, and use it. */
479 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
480 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
481 QUEUED_INSN (x));
482 return QUEUED_COPY (x);
483 }
484
485 /* Return nonzero if X contains a QUEUED expression:
486 if it contains anything that will be altered by a queued increment.
487 We handle only combinations of MEM, PLUS, MINUS and MULT operators
488 since memory addresses generally contain only those. */
489
490 int
491 queued_subexp_p (x)
492 rtx x;
493 {
494 enum rtx_code code = GET_CODE (x);
495 switch (code)
496 {
497 case QUEUED:
498 return 1;
499 case MEM:
500 return queued_subexp_p (XEXP (x, 0));
501 case MULT:
502 case PLUS:
503 case MINUS:
504 return (queued_subexp_p (XEXP (x, 0))
505 || queued_subexp_p (XEXP (x, 1)));
506 default:
507 return 0;
508 }
509 }
510
511 /* Perform all the pending incrementations. */
512
513 void
514 emit_queue ()
515 {
516 rtx p;
517 while ((p = pending_chain))
518 {
519 rtx body = QUEUED_BODY (p);
520
521 switch (GET_CODE (body))
522 {
523 case INSN:
524 case JUMP_INSN:
525 case CALL_INSN:
526 case CODE_LABEL:
527 case BARRIER:
528 case NOTE:
529 QUEUED_INSN (p) = body;
530 emit_insn (body);
531 break;
532
533 #ifdef ENABLE_CHECKING
534 case SEQUENCE:
535 abort ();
536 break;
537 #endif
538
539 default:
540 QUEUED_INSN (p) = emit_insn (body);
541 break;
542 }
543
544 pending_chain = QUEUED_NEXT (p);
545 }
546 }
547 \f
548 /* Copy data from FROM to TO, where the machine modes are not the same.
549 Both modes may be integer, or both may be floating.
550 UNSIGNEDP should be nonzero if FROM is an unsigned type.
551 This causes zero-extension instead of sign-extension. */
552
553 void
554 convert_move (to, from, unsignedp)
555 rtx to, from;
556 int unsignedp;
557 {
558 enum machine_mode to_mode = GET_MODE (to);
559 enum machine_mode from_mode = GET_MODE (from);
560 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
561 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
562 enum insn_code code;
563 rtx libcall;
564
565 /* rtx code for making an equivalent value. */
566 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
567 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
568
569 to = protect_from_queue (to, 1);
570 from = protect_from_queue (from, 0);
571
572 if (to_real != from_real)
573 abort ();
574
575 /* If FROM is a SUBREG that indicates that we have already done at least
576 the required extension, strip it. We don't handle such SUBREGs as
577 TO here. */
578
579 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
580 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
581 >= GET_MODE_SIZE (to_mode))
582 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
583 from = gen_lowpart (to_mode, from), from_mode = to_mode;
584
585 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
586 abort ();
587
588 if (to_mode == from_mode
589 || (from_mode == VOIDmode && CONSTANT_P (from)))
590 {
591 emit_move_insn (to, from);
592 return;
593 }
594
595 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
596 {
597 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
598 abort ();
599
600 if (VECTOR_MODE_P (to_mode))
601 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
602 else
603 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
604
605 emit_move_insn (to, from);
606 return;
607 }
608
609 if (to_real != from_real)
610 abort ();
611
612 if (to_real)
613 {
614 rtx value, insns;
615
616 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
617 {
618 /* Try converting directly if the insn is supported. */
619 if ((code = can_extend_p (to_mode, from_mode, 0))
620 != CODE_FOR_nothing)
621 {
622 emit_unop_insn (code, to, from, UNKNOWN);
623 return;
624 }
625 }
626
627 #ifdef HAVE_trunchfqf2
628 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
629 {
630 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
631 return;
632 }
633 #endif
634 #ifdef HAVE_trunctqfqf2
635 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
636 {
637 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
638 return;
639 }
640 #endif
641 #ifdef HAVE_truncsfqf2
642 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
643 {
644 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
645 return;
646 }
647 #endif
648 #ifdef HAVE_truncdfqf2
649 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
650 {
651 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
652 return;
653 }
654 #endif
655 #ifdef HAVE_truncxfqf2
656 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
657 {
658 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
659 return;
660 }
661 #endif
662 #ifdef HAVE_trunctfqf2
663 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
664 {
665 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
666 return;
667 }
668 #endif
669
670 #ifdef HAVE_trunctqfhf2
671 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
672 {
673 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
674 return;
675 }
676 #endif
677 #ifdef HAVE_truncsfhf2
678 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
679 {
680 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
681 return;
682 }
683 #endif
684 #ifdef HAVE_truncdfhf2
685 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
686 {
687 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
688 return;
689 }
690 #endif
691 #ifdef HAVE_truncxfhf2
692 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
693 {
694 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
695 return;
696 }
697 #endif
698 #ifdef HAVE_trunctfhf2
699 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
700 {
701 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
702 return;
703 }
704 #endif
705
706 #ifdef HAVE_truncsftqf2
707 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
708 {
709 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
710 return;
711 }
712 #endif
713 #ifdef HAVE_truncdftqf2
714 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
715 {
716 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
717 return;
718 }
719 #endif
720 #ifdef HAVE_truncxftqf2
721 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
722 {
723 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
724 return;
725 }
726 #endif
727 #ifdef HAVE_trunctftqf2
728 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
729 {
730 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
731 return;
732 }
733 #endif
734
735 #ifdef HAVE_truncdfsf2
736 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
737 {
738 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
739 return;
740 }
741 #endif
742 #ifdef HAVE_truncxfsf2
743 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
744 {
745 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
746 return;
747 }
748 #endif
749 #ifdef HAVE_trunctfsf2
750 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
751 {
752 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
753 return;
754 }
755 #endif
756 #ifdef HAVE_truncxfdf2
757 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
758 {
759 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
760 return;
761 }
762 #endif
763 #ifdef HAVE_trunctfdf2
764 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
765 {
766 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
767 return;
768 }
769 #endif
770
771 libcall = (rtx) 0;
772 switch (from_mode)
773 {
774 case SFmode:
775 switch (to_mode)
776 {
777 case DFmode:
778 libcall = extendsfdf2_libfunc;
779 break;
780
781 case XFmode:
782 libcall = extendsfxf2_libfunc;
783 break;
784
785 case TFmode:
786 libcall = extendsftf2_libfunc;
787 break;
788
789 default:
790 break;
791 }
792 break;
793
794 case DFmode:
795 switch (to_mode)
796 {
797 case SFmode:
798 libcall = truncdfsf2_libfunc;
799 break;
800
801 case XFmode:
802 libcall = extenddfxf2_libfunc;
803 break;
804
805 case TFmode:
806 libcall = extenddftf2_libfunc;
807 break;
808
809 default:
810 break;
811 }
812 break;
813
814 case XFmode:
815 switch (to_mode)
816 {
817 case SFmode:
818 libcall = truncxfsf2_libfunc;
819 break;
820
821 case DFmode:
822 libcall = truncxfdf2_libfunc;
823 break;
824
825 default:
826 break;
827 }
828 break;
829
830 case TFmode:
831 switch (to_mode)
832 {
833 case SFmode:
834 libcall = trunctfsf2_libfunc;
835 break;
836
837 case DFmode:
838 libcall = trunctfdf2_libfunc;
839 break;
840
841 default:
842 break;
843 }
844 break;
845
846 default:
847 break;
848 }
849
850 if (libcall == (rtx) 0)
851 /* This conversion is not implemented yet. */
852 abort ();
853
854 start_sequence ();
855 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
856 1, from, from_mode);
857 insns = get_insns ();
858 end_sequence ();
859 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
860 from));
861 return;
862 }
863
864 /* Now both modes are integers. */
865
866 /* Handle expanding beyond a word. */
867 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
868 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
869 {
870 rtx insns;
871 rtx lowpart;
872 rtx fill_value;
873 rtx lowfrom;
874 int i;
875 enum machine_mode lowpart_mode;
876 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
877
878 /* Try converting directly if the insn is supported. */
879 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
880 != CODE_FOR_nothing)
881 {
882 /* If FROM is a SUBREG, put it into a register. Do this
883 so that we always generate the same set of insns for
884 better cse'ing; if an intermediate assignment occurred,
885 we won't be doing the operation directly on the SUBREG. */
886 if (optimize > 0 && GET_CODE (from) == SUBREG)
887 from = force_reg (from_mode, from);
888 emit_unop_insn (code, to, from, equiv_code);
889 return;
890 }
891 /* Next, try converting via full word. */
892 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
893 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
894 != CODE_FOR_nothing))
895 {
896 if (GET_CODE (to) == REG)
897 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
898 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
899 emit_unop_insn (code, to,
900 gen_lowpart (word_mode, to), equiv_code);
901 return;
902 }
903
904 /* No special multiword conversion insn; do it by hand. */
905 start_sequence ();
906
907 /* Since we will turn this into a no conflict block, we must ensure
908 that the source does not overlap the target. */
909
910 if (reg_overlap_mentioned_p (to, from))
911 from = force_reg (from_mode, from);
912
913 /* Get a copy of FROM widened to a word, if necessary. */
914 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
915 lowpart_mode = word_mode;
916 else
917 lowpart_mode = from_mode;
918
919 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
920
921 lowpart = gen_lowpart (lowpart_mode, to);
922 emit_move_insn (lowpart, lowfrom);
923
924 /* Compute the value to put in each remaining word. */
925 if (unsignedp)
926 fill_value = const0_rtx;
927 else
928 {
929 #ifdef HAVE_slt
930 if (HAVE_slt
931 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
932 && STORE_FLAG_VALUE == -1)
933 {
934 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
935 lowpart_mode, 0);
936 fill_value = gen_reg_rtx (word_mode);
937 emit_insn (gen_slt (fill_value));
938 }
939 else
940 #endif
941 {
942 fill_value
943 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
944 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
945 NULL_RTX, 0);
946 fill_value = convert_to_mode (word_mode, fill_value, 1);
947 }
948 }
949
950 /* Fill the remaining words. */
951 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
952 {
953 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
954 rtx subword = operand_subword (to, index, 1, to_mode);
955
956 if (subword == 0)
957 abort ();
958
959 if (fill_value != subword)
960 emit_move_insn (subword, fill_value);
961 }
962
963 insns = get_insns ();
964 end_sequence ();
965
966 emit_no_conflict_block (insns, to, from, NULL_RTX,
967 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
968 return;
969 }
970
971 /* Truncating multi-word to a word or less. */
972 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
973 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
974 {
975 if (!((GET_CODE (from) == MEM
976 && ! MEM_VOLATILE_P (from)
977 && direct_load[(int) to_mode]
978 && ! mode_dependent_address_p (XEXP (from, 0)))
979 || GET_CODE (from) == REG
980 || GET_CODE (from) == SUBREG))
981 from = force_reg (from_mode, from);
982 convert_move (to, gen_lowpart (word_mode, from), 0);
983 return;
984 }
985
986 /* Handle pointer conversion. */ /* SPEE 900220. */
987 if (to_mode == PQImode)
988 {
989 if (from_mode != QImode)
990 from = convert_to_mode (QImode, from, unsignedp);
991
992 #ifdef HAVE_truncqipqi2
993 if (HAVE_truncqipqi2)
994 {
995 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
996 return;
997 }
998 #endif /* HAVE_truncqipqi2 */
999 abort ();
1000 }
1001
1002 if (from_mode == PQImode)
1003 {
1004 if (to_mode != QImode)
1005 {
1006 from = convert_to_mode (QImode, from, unsignedp);
1007 from_mode = QImode;
1008 }
1009 else
1010 {
1011 #ifdef HAVE_extendpqiqi2
1012 if (HAVE_extendpqiqi2)
1013 {
1014 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1015 return;
1016 }
1017 #endif /* HAVE_extendpqiqi2 */
1018 abort ();
1019 }
1020 }
1021
1022 if (to_mode == PSImode)
1023 {
1024 if (from_mode != SImode)
1025 from = convert_to_mode (SImode, from, unsignedp);
1026
1027 #ifdef HAVE_truncsipsi2
1028 if (HAVE_truncsipsi2)
1029 {
1030 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1031 return;
1032 }
1033 #endif /* HAVE_truncsipsi2 */
1034 abort ();
1035 }
1036
1037 if (from_mode == PSImode)
1038 {
1039 if (to_mode != SImode)
1040 {
1041 from = convert_to_mode (SImode, from, unsignedp);
1042 from_mode = SImode;
1043 }
1044 else
1045 {
1046 #ifdef HAVE_extendpsisi2
1047 if (! unsignedp && HAVE_extendpsisi2)
1048 {
1049 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1050 return;
1051 }
1052 #endif /* HAVE_extendpsisi2 */
1053 #ifdef HAVE_zero_extendpsisi2
1054 if (unsignedp && HAVE_zero_extendpsisi2)
1055 {
1056 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1057 return;
1058 }
1059 #endif /* HAVE_zero_extendpsisi2 */
1060 abort ();
1061 }
1062 }
1063
1064 if (to_mode == PDImode)
1065 {
1066 if (from_mode != DImode)
1067 from = convert_to_mode (DImode, from, unsignedp);
1068
1069 #ifdef HAVE_truncdipdi2
1070 if (HAVE_truncdipdi2)
1071 {
1072 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1073 return;
1074 }
1075 #endif /* HAVE_truncdipdi2 */
1076 abort ();
1077 }
1078
1079 if (from_mode == PDImode)
1080 {
1081 if (to_mode != DImode)
1082 {
1083 from = convert_to_mode (DImode, from, unsignedp);
1084 from_mode = DImode;
1085 }
1086 else
1087 {
1088 #ifdef HAVE_extendpdidi2
1089 if (HAVE_extendpdidi2)
1090 {
1091 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1092 return;
1093 }
1094 #endif /* HAVE_extendpdidi2 */
1095 abort ();
1096 }
1097 }
1098
1099 /* Now follow all the conversions between integers
1100 no more than a word long. */
1101
1102 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1103 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1104 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1105 GET_MODE_BITSIZE (from_mode)))
1106 {
1107 if (!((GET_CODE (from) == MEM
1108 && ! MEM_VOLATILE_P (from)
1109 && direct_load[(int) to_mode]
1110 && ! mode_dependent_address_p (XEXP (from, 0)))
1111 || GET_CODE (from) == REG
1112 || GET_CODE (from) == SUBREG))
1113 from = force_reg (from_mode, from);
1114 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1115 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1116 from = copy_to_reg (from);
1117 emit_move_insn (to, gen_lowpart (to_mode, from));
1118 return;
1119 }
1120
1121 /* Handle extension. */
1122 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1123 {
1124 /* Convert directly if that works. */
1125 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1126 != CODE_FOR_nothing)
1127 {
1128 if (flag_force_mem)
1129 from = force_not_mem (from);
1130
1131 emit_unop_insn (code, to, from, equiv_code);
1132 return;
1133 }
1134 else
1135 {
1136 enum machine_mode intermediate;
1137 rtx tmp;
1138 tree shift_amount;
1139
1140 /* Search for a mode to convert via. */
1141 for (intermediate = from_mode; intermediate != VOIDmode;
1142 intermediate = GET_MODE_WIDER_MODE (intermediate))
1143 if (((can_extend_p (to_mode, intermediate, unsignedp)
1144 != CODE_FOR_nothing)
1145 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1146 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1147 GET_MODE_BITSIZE (intermediate))))
1148 && (can_extend_p (intermediate, from_mode, unsignedp)
1149 != CODE_FOR_nothing))
1150 {
1151 convert_move (to, convert_to_mode (intermediate, from,
1152 unsignedp), unsignedp);
1153 return;
1154 }
1155
1156 /* No suitable intermediate mode.
1157 Generate what we need with shifts. */
1158 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1159 - GET_MODE_BITSIZE (from_mode), 0);
1160 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1161 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1162 to, unsignedp);
1163 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1164 to, unsignedp);
1165 if (tmp != to)
1166 emit_move_insn (to, tmp);
1167 return;
1168 }
1169 }
1170
1171 /* Support special truncate insns for certain modes. */
1172
1173 if (from_mode == DImode && to_mode == SImode)
1174 {
1175 #ifdef HAVE_truncdisi2
1176 if (HAVE_truncdisi2)
1177 {
1178 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1179 return;
1180 }
1181 #endif
1182 convert_move (to, force_reg (from_mode, from), unsignedp);
1183 return;
1184 }
1185
1186 if (from_mode == DImode && to_mode == HImode)
1187 {
1188 #ifdef HAVE_truncdihi2
1189 if (HAVE_truncdihi2)
1190 {
1191 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1192 return;
1193 }
1194 #endif
1195 convert_move (to, force_reg (from_mode, from), unsignedp);
1196 return;
1197 }
1198
1199 if (from_mode == DImode && to_mode == QImode)
1200 {
1201 #ifdef HAVE_truncdiqi2
1202 if (HAVE_truncdiqi2)
1203 {
1204 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1205 return;
1206 }
1207 #endif
1208 convert_move (to, force_reg (from_mode, from), unsignedp);
1209 return;
1210 }
1211
1212 if (from_mode == SImode && to_mode == HImode)
1213 {
1214 #ifdef HAVE_truncsihi2
1215 if (HAVE_truncsihi2)
1216 {
1217 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1218 return;
1219 }
1220 #endif
1221 convert_move (to, force_reg (from_mode, from), unsignedp);
1222 return;
1223 }
1224
1225 if (from_mode == SImode && to_mode == QImode)
1226 {
1227 #ifdef HAVE_truncsiqi2
1228 if (HAVE_truncsiqi2)
1229 {
1230 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1231 return;
1232 }
1233 #endif
1234 convert_move (to, force_reg (from_mode, from), unsignedp);
1235 return;
1236 }
1237
1238 if (from_mode == HImode && to_mode == QImode)
1239 {
1240 #ifdef HAVE_trunchiqi2
1241 if (HAVE_trunchiqi2)
1242 {
1243 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1244 return;
1245 }
1246 #endif
1247 convert_move (to, force_reg (from_mode, from), unsignedp);
1248 return;
1249 }
1250
1251 if (from_mode == TImode && to_mode == DImode)
1252 {
1253 #ifdef HAVE_trunctidi2
1254 if (HAVE_trunctidi2)
1255 {
1256 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1257 return;
1258 }
1259 #endif
1260 convert_move (to, force_reg (from_mode, from), unsignedp);
1261 return;
1262 }
1263
1264 if (from_mode == TImode && to_mode == SImode)
1265 {
1266 #ifdef HAVE_trunctisi2
1267 if (HAVE_trunctisi2)
1268 {
1269 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1270 return;
1271 }
1272 #endif
1273 convert_move (to, force_reg (from_mode, from), unsignedp);
1274 return;
1275 }
1276
1277 if (from_mode == TImode && to_mode == HImode)
1278 {
1279 #ifdef HAVE_trunctihi2
1280 if (HAVE_trunctihi2)
1281 {
1282 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1283 return;
1284 }
1285 #endif
1286 convert_move (to, force_reg (from_mode, from), unsignedp);
1287 return;
1288 }
1289
1290 if (from_mode == TImode && to_mode == QImode)
1291 {
1292 #ifdef HAVE_trunctiqi2
1293 if (HAVE_trunctiqi2)
1294 {
1295 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1296 return;
1297 }
1298 #endif
1299 convert_move (to, force_reg (from_mode, from), unsignedp);
1300 return;
1301 }
1302
1303 /* Handle truncation of volatile memrefs, and so on;
1304 the things that couldn't be truncated directly,
1305 and for which there was no special instruction. */
1306 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1307 {
1308 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1309 emit_move_insn (to, temp);
1310 return;
1311 }
1312
1313 /* Mode combination is not recognized. */
1314 abort ();
1315 }
1316
1317 /* Return an rtx for a value that would result
1318 from converting X to mode MODE.
1319 Both X and MODE may be floating, or both integer.
1320 UNSIGNEDP is nonzero if X is an unsigned value.
1321 This can be done by referring to a part of X in place
1322 or by copying to a new temporary with conversion.
1323
1324 This function *must not* call protect_from_queue
1325 except when putting X into an insn (in which case convert_move does it). */
1326
1327 rtx
1328 convert_to_mode (mode, x, unsignedp)
1329 enum machine_mode mode;
1330 rtx x;
1331 int unsignedp;
1332 {
1333 return convert_modes (mode, VOIDmode, x, unsignedp);
1334 }
1335
1336 /* Return an rtx for a value that would result
1337 from converting X from mode OLDMODE to mode MODE.
1338 Both modes may be floating, or both integer.
1339 UNSIGNEDP is nonzero if X is an unsigned value.
1340
1341 This can be done by referring to a part of X in place
1342 or by copying to a new temporary with conversion.
1343
1344 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1345
1346 This function *must not* call protect_from_queue
1347 except when putting X into an insn (in which case convert_move does it). */
1348
1349 rtx
1350 convert_modes (mode, oldmode, x, unsignedp)
1351 enum machine_mode mode, oldmode;
1352 rtx x;
1353 int unsignedp;
1354 {
1355 rtx temp;
1356
1357 /* If FROM is a SUBREG that indicates that we have already done at least
1358 the required extension, strip it. */
1359
1360 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1361 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1362 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1363 x = gen_lowpart (mode, x);
1364
1365 if (GET_MODE (x) != VOIDmode)
1366 oldmode = GET_MODE (x);
1367
1368 if (mode == oldmode)
1369 return x;
1370
1371 /* There is one case that we must handle specially: If we are converting
1372 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1373 we are to interpret the constant as unsigned, gen_lowpart will do
1374 the wrong if the constant appears negative. What we want to do is
1375 make the high-order word of the constant zero, not all ones. */
1376
1377 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1378 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1379 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1380 {
1381 HOST_WIDE_INT val = INTVAL (x);
1382
1383 if (oldmode != VOIDmode
1384 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1385 {
1386 int width = GET_MODE_BITSIZE (oldmode);
1387
1388 /* We need to zero extend VAL. */
1389 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1390 }
1391
1392 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1393 }
1394
1395 /* We can do this with a gen_lowpart if both desired and current modes
1396 are integer, and this is either a constant integer, a register, or a
1397 non-volatile MEM. Except for the constant case where MODE is no
1398 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1399
1400 if ((GET_CODE (x) == CONST_INT
1401 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1402 || (GET_MODE_CLASS (mode) == MODE_INT
1403 && GET_MODE_CLASS (oldmode) == MODE_INT
1404 && (GET_CODE (x) == CONST_DOUBLE
1405 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1406 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1407 && direct_load[(int) mode])
1408 || (GET_CODE (x) == REG
1409 && (! HARD_REGISTER_P (x)
1410 || HARD_REGNO_MODE_OK (REGNO (x), mode))
1411 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1412 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1413 {
1414 /* ?? If we don't know OLDMODE, we have to assume here that
1415 X does not need sign- or zero-extension. This may not be
1416 the case, but it's the best we can do. */
1417 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1418 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1419 {
1420 HOST_WIDE_INT val = INTVAL (x);
1421 int width = GET_MODE_BITSIZE (oldmode);
1422
1423 /* We must sign or zero-extend in this case. Start by
1424 zero-extending, then sign extend if we need to. */
1425 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1426 if (! unsignedp
1427 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1428 val |= (HOST_WIDE_INT) (-1) << width;
1429
1430 return gen_int_mode (val, mode);
1431 }
1432
1433 return gen_lowpart (mode, x);
1434 }
1435
1436 temp = gen_reg_rtx (mode);
1437 convert_move (temp, x, unsignedp);
1438 return temp;
1439 }
1440 \f
1441 /* This macro is used to determine what the largest unit size that
1442 move_by_pieces can use is. */
1443
1444 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1445 move efficiently, as opposed to MOVE_MAX which is the maximum
1446 number of bytes we can move with a single instruction. */
1447
1448 #ifndef MOVE_MAX_PIECES
1449 #define MOVE_MAX_PIECES MOVE_MAX
1450 #endif
1451
1452 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1453 store efficiently. Due to internal GCC limitations, this is
1454 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1455 for an immediate constant. */
1456
1457 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1458
1459 /* Generate several move instructions to copy LEN bytes from block FROM to
1460 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1461 and TO through protect_from_queue before calling.
1462
1463 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1464 used to push FROM to the stack.
1465
1466 ALIGN is maximum alignment we can assume. */
1467
1468 void
1469 move_by_pieces (to, from, len, align)
1470 rtx to, from;
1471 unsigned HOST_WIDE_INT len;
1472 unsigned int align;
1473 {
1474 struct move_by_pieces data;
1475 rtx to_addr, from_addr = XEXP (from, 0);
1476 unsigned int max_size = MOVE_MAX_PIECES + 1;
1477 enum machine_mode mode = VOIDmode, tmode;
1478 enum insn_code icode;
1479
1480 data.offset = 0;
1481 data.from_addr = from_addr;
1482 if (to)
1483 {
1484 to_addr = XEXP (to, 0);
1485 data.to = to;
1486 data.autinc_to
1487 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1488 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1489 data.reverse
1490 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1491 }
1492 else
1493 {
1494 to_addr = NULL_RTX;
1495 data.to = NULL_RTX;
1496 data.autinc_to = 1;
1497 #ifdef STACK_GROWS_DOWNWARD
1498 data.reverse = 1;
1499 #else
1500 data.reverse = 0;
1501 #endif
1502 }
1503 data.to_addr = to_addr;
1504 data.from = from;
1505 data.autinc_from
1506 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1507 || GET_CODE (from_addr) == POST_INC
1508 || GET_CODE (from_addr) == POST_DEC);
1509
1510 data.explicit_inc_from = 0;
1511 data.explicit_inc_to = 0;
1512 if (data.reverse) data.offset = len;
1513 data.len = len;
1514
1515 /* If copying requires more than two move insns,
1516 copy addresses to registers (to make displacements shorter)
1517 and use post-increment if available. */
1518 if (!(data.autinc_from && data.autinc_to)
1519 && move_by_pieces_ninsns (len, align) > 2)
1520 {
1521 /* Find the mode of the largest move... */
1522 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1523 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1524 if (GET_MODE_SIZE (tmode) < max_size)
1525 mode = tmode;
1526
1527 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1528 {
1529 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1530 data.autinc_from = 1;
1531 data.explicit_inc_from = -1;
1532 }
1533 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1534 {
1535 data.from_addr = copy_addr_to_reg (from_addr);
1536 data.autinc_from = 1;
1537 data.explicit_inc_from = 1;
1538 }
1539 if (!data.autinc_from && CONSTANT_P (from_addr))
1540 data.from_addr = copy_addr_to_reg (from_addr);
1541 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1542 {
1543 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1544 data.autinc_to = 1;
1545 data.explicit_inc_to = -1;
1546 }
1547 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1548 {
1549 data.to_addr = copy_addr_to_reg (to_addr);
1550 data.autinc_to = 1;
1551 data.explicit_inc_to = 1;
1552 }
1553 if (!data.autinc_to && CONSTANT_P (to_addr))
1554 data.to_addr = copy_addr_to_reg (to_addr);
1555 }
1556
1557 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1558 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1559 align = MOVE_MAX * BITS_PER_UNIT;
1560
1561 /* First move what we can in the largest integer mode, then go to
1562 successively smaller modes. */
1563
1564 while (max_size > 1)
1565 {
1566 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1567 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1568 if (GET_MODE_SIZE (tmode) < max_size)
1569 mode = tmode;
1570
1571 if (mode == VOIDmode)
1572 break;
1573
1574 icode = mov_optab->handlers[(int) mode].insn_code;
1575 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1576 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1577
1578 max_size = GET_MODE_SIZE (mode);
1579 }
1580
1581 /* The code above should have handled everything. */
1582 if (data.len > 0)
1583 abort ();
1584 }
1585
1586 /* Return number of insns required to move L bytes by pieces.
1587 ALIGN (in bits) is maximum alignment we can assume. */
1588
1589 static unsigned HOST_WIDE_INT
1590 move_by_pieces_ninsns (l, align)
1591 unsigned HOST_WIDE_INT l;
1592 unsigned int align;
1593 {
1594 unsigned HOST_WIDE_INT n_insns = 0;
1595 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1596
1597 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1598 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1599 align = MOVE_MAX * BITS_PER_UNIT;
1600
1601 while (max_size > 1)
1602 {
1603 enum machine_mode mode = VOIDmode, tmode;
1604 enum insn_code icode;
1605
1606 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1607 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1608 if (GET_MODE_SIZE (tmode) < max_size)
1609 mode = tmode;
1610
1611 if (mode == VOIDmode)
1612 break;
1613
1614 icode = mov_optab->handlers[(int) mode].insn_code;
1615 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1616 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1617
1618 max_size = GET_MODE_SIZE (mode);
1619 }
1620
1621 if (l)
1622 abort ();
1623 return n_insns;
1624 }
1625
1626 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1627 with move instructions for mode MODE. GENFUN is the gen_... function
1628 to make a move insn for that mode. DATA has all the other info. */
1629
1630 static void
1631 move_by_pieces_1 (genfun, mode, data)
1632 rtx (*genfun) PARAMS ((rtx, ...));
1633 enum machine_mode mode;
1634 struct move_by_pieces *data;
1635 {
1636 unsigned int size = GET_MODE_SIZE (mode);
1637 rtx to1 = NULL_RTX, from1;
1638
1639 while (data->len >= size)
1640 {
1641 if (data->reverse)
1642 data->offset -= size;
1643
1644 if (data->to)
1645 {
1646 if (data->autinc_to)
1647 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1648 data->offset);
1649 else
1650 to1 = adjust_address (data->to, mode, data->offset);
1651 }
1652
1653 if (data->autinc_from)
1654 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1655 data->offset);
1656 else
1657 from1 = adjust_address (data->from, mode, data->offset);
1658
1659 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1660 emit_insn (gen_add2_insn (data->to_addr,
1661 GEN_INT (-(HOST_WIDE_INT)size)));
1662 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1663 emit_insn (gen_add2_insn (data->from_addr,
1664 GEN_INT (-(HOST_WIDE_INT)size)));
1665
1666 if (data->to)
1667 emit_insn ((*genfun) (to1, from1));
1668 else
1669 {
1670 #ifdef PUSH_ROUNDING
1671 emit_single_push_insn (mode, from1, NULL);
1672 #else
1673 abort ();
1674 #endif
1675 }
1676
1677 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1678 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1679 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1680 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1681
1682 if (! data->reverse)
1683 data->offset += size;
1684
1685 data->len -= size;
1686 }
1687 }
1688 \f
1689 /* Emit code to move a block Y to a block X. This may be done with
1690 string-move instructions, with multiple scalar move instructions,
1691 or with a library call.
1692
1693 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1694 SIZE is an rtx that says how long they are.
1695 ALIGN is the maximum alignment we can assume they have.
1696 METHOD describes what kind of copy this is, and what mechanisms may be used.
1697
1698 Return the address of the new block, if memcpy is called and returns it,
1699 0 otherwise. */
1700
1701 rtx
1702 emit_block_move (x, y, size, method)
1703 rtx x, y, size;
1704 enum block_op_methods method;
1705 {
1706 bool may_use_call;
1707 rtx retval = 0;
1708 unsigned int align;
1709
1710 switch (method)
1711 {
1712 case BLOCK_OP_NORMAL:
1713 may_use_call = true;
1714 break;
1715
1716 case BLOCK_OP_CALL_PARM:
1717 may_use_call = block_move_libcall_safe_for_call_parm ();
1718
1719 /* Make inhibit_defer_pop nonzero around the library call
1720 to force it to pop the arguments right away. */
1721 NO_DEFER_POP;
1722 break;
1723
1724 case BLOCK_OP_NO_LIBCALL:
1725 may_use_call = false;
1726 break;
1727
1728 default:
1729 abort ();
1730 }
1731
1732 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1733
1734 if (GET_MODE (x) != BLKmode)
1735 abort ();
1736 if (GET_MODE (y) != BLKmode)
1737 abort ();
1738
1739 x = protect_from_queue (x, 1);
1740 y = protect_from_queue (y, 0);
1741 size = protect_from_queue (size, 0);
1742
1743 if (GET_CODE (x) != MEM)
1744 abort ();
1745 if (GET_CODE (y) != MEM)
1746 abort ();
1747 if (size == 0)
1748 abort ();
1749
1750 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1751 can be incorrect is coming from __builtin_memcpy. */
1752 if (GET_CODE (size) == CONST_INT)
1753 {
1754 x = shallow_copy_rtx (x);
1755 y = shallow_copy_rtx (y);
1756 set_mem_size (x, size);
1757 set_mem_size (y, size);
1758 }
1759
1760 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1761 move_by_pieces (x, y, INTVAL (size), align);
1762 else if (emit_block_move_via_movstr (x, y, size, align))
1763 ;
1764 else if (may_use_call)
1765 retval = emit_block_move_via_libcall (x, y, size);
1766 else
1767 emit_block_move_via_loop (x, y, size, align);
1768
1769 if (method == BLOCK_OP_CALL_PARM)
1770 OK_DEFER_POP;
1771
1772 return retval;
1773 }
1774
1775 /* A subroutine of emit_block_move. Returns true if calling the
1776 block move libcall will not clobber any parameters which may have
1777 already been placed on the stack. */
1778
1779 static bool
1780 block_move_libcall_safe_for_call_parm ()
1781 {
1782 if (PUSH_ARGS)
1783 return true;
1784 else
1785 {
1786 /* Check to see whether memcpy takes all register arguments. */
1787 static enum {
1788 takes_regs_uninit, takes_regs_no, takes_regs_yes
1789 } takes_regs = takes_regs_uninit;
1790
1791 switch (takes_regs)
1792 {
1793 case takes_regs_uninit:
1794 {
1795 CUMULATIVE_ARGS args_so_far;
1796 tree fn, arg;
1797
1798 fn = emit_block_move_libcall_fn (false);
1799 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1800
1801 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1802 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1803 {
1804 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1805 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1806 if (!tmp || !REG_P (tmp))
1807 goto fail_takes_regs;
1808 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1809 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1810 NULL_TREE, 1))
1811 goto fail_takes_regs;
1812 #endif
1813 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1814 }
1815 }
1816 takes_regs = takes_regs_yes;
1817 /* FALLTHRU */
1818
1819 case takes_regs_yes:
1820 return true;
1821
1822 fail_takes_regs:
1823 takes_regs = takes_regs_no;
1824 /* FALLTHRU */
1825 case takes_regs_no:
1826 return false;
1827
1828 default:
1829 abort ();
1830 }
1831 }
1832 }
1833
1834 /* A subroutine of emit_block_move. Expand a movstr pattern;
1835 return true if successful. */
1836
1837 static bool
1838 emit_block_move_via_movstr (x, y, size, align)
1839 rtx x, y, size;
1840 unsigned int align;
1841 {
1842 /* Try the most limited insn first, because there's no point
1843 including more than one in the machine description unless
1844 the more limited one has some advantage. */
1845
1846 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1847 enum machine_mode mode;
1848
1849 /* Since this is a move insn, we don't care about volatility. */
1850 volatile_ok = 1;
1851
1852 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1853 mode = GET_MODE_WIDER_MODE (mode))
1854 {
1855 enum insn_code code = movstr_optab[(int) mode];
1856 insn_operand_predicate_fn pred;
1857
1858 if (code != CODE_FOR_nothing
1859 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1860 here because if SIZE is less than the mode mask, as it is
1861 returned by the macro, it will definitely be less than the
1862 actual mode mask. */
1863 && ((GET_CODE (size) == CONST_INT
1864 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1865 <= (GET_MODE_MASK (mode) >> 1)))
1866 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1867 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1868 || (*pred) (x, BLKmode))
1869 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1870 || (*pred) (y, BLKmode))
1871 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1872 || (*pred) (opalign, VOIDmode)))
1873 {
1874 rtx op2;
1875 rtx last = get_last_insn ();
1876 rtx pat;
1877
1878 op2 = convert_to_mode (mode, size, 1);
1879 pred = insn_data[(int) code].operand[2].predicate;
1880 if (pred != 0 && ! (*pred) (op2, mode))
1881 op2 = copy_to_mode_reg (mode, op2);
1882
1883 /* ??? When called via emit_block_move_for_call, it'd be
1884 nice if there were some way to inform the backend, so
1885 that it doesn't fail the expansion because it thinks
1886 emitting the libcall would be more efficient. */
1887
1888 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1889 if (pat)
1890 {
1891 emit_insn (pat);
1892 volatile_ok = 0;
1893 return true;
1894 }
1895 else
1896 delete_insns_since (last);
1897 }
1898 }
1899
1900 volatile_ok = 0;
1901 return false;
1902 }
1903
1904 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1905 Return the return value from memcpy, 0 otherwise. */
1906
1907 static rtx
1908 emit_block_move_via_libcall (dst, src, size)
1909 rtx dst, src, size;
1910 {
1911 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1912 enum machine_mode size_mode;
1913 rtx retval;
1914
1915 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1916
1917 It is unsafe to save the value generated by protect_from_queue
1918 and reuse it later. Consider what happens if emit_queue is
1919 called before the return value from protect_from_queue is used.
1920
1921 Expansion of the CALL_EXPR below will call emit_queue before
1922 we are finished emitting RTL for argument setup. So if we are
1923 not careful we could get the wrong value for an argument.
1924
1925 To avoid this problem we go ahead and emit code to copy X, Y &
1926 SIZE into new pseudos. We can then place those new pseudos
1927 into an RTL_EXPR and use them later, even after a call to
1928 emit_queue.
1929
1930 Note this is not strictly needed for library calls since they
1931 do not call emit_queue before loading their arguments. However,
1932 we may need to have library calls call emit_queue in the future
1933 since failing to do so could cause problems for targets which
1934 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1935
1936 dst = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1937 src = copy_to_mode_reg (Pmode, XEXP (src, 0));
1938
1939 if (TARGET_MEM_FUNCTIONS)
1940 size_mode = TYPE_MODE (sizetype);
1941 else
1942 size_mode = TYPE_MODE (unsigned_type_node);
1943 size = convert_to_mode (size_mode, size, 1);
1944 size = copy_to_mode_reg (size_mode, size);
1945
1946 /* It is incorrect to use the libcall calling conventions to call
1947 memcpy in this context. This could be a user call to memcpy and
1948 the user may wish to examine the return value from memcpy. For
1949 targets where libcalls and normal calls have different conventions
1950 for returning pointers, we could end up generating incorrect code.
1951
1952 For convenience, we generate the call to bcopy this way as well. */
1953
1954 dst_tree = make_tree (ptr_type_node, dst);
1955 src_tree = make_tree (ptr_type_node, src);
1956 if (TARGET_MEM_FUNCTIONS)
1957 size_tree = make_tree (sizetype, size);
1958 else
1959 size_tree = make_tree (unsigned_type_node, size);
1960
1961 fn = emit_block_move_libcall_fn (true);
1962 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1963 if (TARGET_MEM_FUNCTIONS)
1964 {
1965 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1966 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1967 }
1968 else
1969 {
1970 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1971 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1972 }
1973
1974 /* Now we have to build up the CALL_EXPR itself. */
1975 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1976 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1977 call_expr, arg_list, NULL_TREE);
1978 TREE_SIDE_EFFECTS (call_expr) = 1;
1979
1980 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1981
1982 /* If we are initializing a readonly value, show the above call
1983 clobbered it. Otherwise, a load from it may erroneously be
1984 hoisted from a loop. */
1985 if (RTX_UNCHANGING_P (dst))
1986 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
1987
1988 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
1989 }
1990
1991 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1992 for the function we use for block copies. The first time FOR_CALL
1993 is true, we call assemble_external. */
1994
1995 static GTY(()) tree block_move_fn;
1996
1997 void
1998 init_block_move_fn (asmspec)
1999 const char *asmspec;
2000 {
2001 if (!block_move_fn)
2002 {
2003 tree fn, args;
2004
2005 if (TARGET_MEM_FUNCTIONS)
2006 {
2007 fn = get_identifier ("memcpy");
2008 args = build_function_type_list (ptr_type_node, ptr_type_node,
2009 const_ptr_type_node, sizetype,
2010 NULL_TREE);
2011 }
2012 else
2013 {
2014 fn = get_identifier ("bcopy");
2015 args = build_function_type_list (void_type_node, const_ptr_type_node,
2016 ptr_type_node, unsigned_type_node,
2017 NULL_TREE);
2018 }
2019
2020 fn = build_decl (FUNCTION_DECL, fn, args);
2021 DECL_EXTERNAL (fn) = 1;
2022 TREE_PUBLIC (fn) = 1;
2023 DECL_ARTIFICIAL (fn) = 1;
2024 TREE_NOTHROW (fn) = 1;
2025
2026 block_move_fn = fn;
2027 }
2028
2029 if (asmspec)
2030 {
2031 SET_DECL_RTL (block_move_fn, NULL_RTX);
2032 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
2033 }
2034 }
2035
2036 static tree
2037 emit_block_move_libcall_fn (for_call)
2038 int for_call;
2039 {
2040 static bool emitted_extern;
2041
2042 if (!block_move_fn)
2043 init_block_move_fn (NULL);
2044
2045 if (for_call && !emitted_extern)
2046 {
2047 emitted_extern = true;
2048 make_decl_rtl (block_move_fn, NULL);
2049 assemble_external (block_move_fn);
2050 }
2051
2052 return block_move_fn;
2053 }
2054
2055 /* A subroutine of emit_block_move. Copy the data via an explicit
2056 loop. This is used only when libcalls are forbidden. */
2057 /* ??? It'd be nice to copy in hunks larger than QImode. */
2058
2059 static void
2060 emit_block_move_via_loop (x, y, size, align)
2061 rtx x, y, size;
2062 unsigned int align ATTRIBUTE_UNUSED;
2063 {
2064 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2065 enum machine_mode iter_mode;
2066
2067 iter_mode = GET_MODE (size);
2068 if (iter_mode == VOIDmode)
2069 iter_mode = word_mode;
2070
2071 top_label = gen_label_rtx ();
2072 cmp_label = gen_label_rtx ();
2073 iter = gen_reg_rtx (iter_mode);
2074
2075 emit_move_insn (iter, const0_rtx);
2076
2077 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2078 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2079 do_pending_stack_adjust ();
2080
2081 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2082
2083 emit_jump (cmp_label);
2084 emit_label (top_label);
2085
2086 tmp = convert_modes (Pmode, iter_mode, iter, true);
2087 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2088 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2089 x = change_address (x, QImode, x_addr);
2090 y = change_address (y, QImode, y_addr);
2091
2092 emit_move_insn (x, y);
2093
2094 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2095 true, OPTAB_LIB_WIDEN);
2096 if (tmp != iter)
2097 emit_move_insn (iter, tmp);
2098
2099 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2100 emit_label (cmp_label);
2101
2102 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2103 true, top_label);
2104
2105 emit_note (NULL, NOTE_INSN_LOOP_END);
2106 }
2107 \f
2108 /* Copy all or part of a value X into registers starting at REGNO.
2109 The number of registers to be filled is NREGS. */
2110
2111 void
2112 move_block_to_reg (regno, x, nregs, mode)
2113 int regno;
2114 rtx x;
2115 int nregs;
2116 enum machine_mode mode;
2117 {
2118 int i;
2119 #ifdef HAVE_load_multiple
2120 rtx pat;
2121 rtx last;
2122 #endif
2123
2124 if (nregs == 0)
2125 return;
2126
2127 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2128 x = validize_mem (force_const_mem (mode, x));
2129
2130 /* See if the machine can do this with a load multiple insn. */
2131 #ifdef HAVE_load_multiple
2132 if (HAVE_load_multiple)
2133 {
2134 last = get_last_insn ();
2135 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2136 GEN_INT (nregs));
2137 if (pat)
2138 {
2139 emit_insn (pat);
2140 return;
2141 }
2142 else
2143 delete_insns_since (last);
2144 }
2145 #endif
2146
2147 for (i = 0; i < nregs; i++)
2148 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2149 operand_subword_force (x, i, mode));
2150 }
2151
2152 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2153 The number of registers to be filled is NREGS. SIZE indicates the number
2154 of bytes in the object X. */
2155
2156 void
2157 move_block_from_reg (regno, x, nregs, size)
2158 int regno;
2159 rtx x;
2160 int nregs;
2161 int size;
2162 {
2163 int i;
2164 #ifdef HAVE_store_multiple
2165 rtx pat;
2166 rtx last;
2167 #endif
2168 enum machine_mode mode;
2169
2170 if (nregs == 0)
2171 return;
2172
2173 /* If SIZE is that of a mode no bigger than a word, just use that
2174 mode's store operation. */
2175 if (size <= UNITS_PER_WORD
2176 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
2177 {
2178 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
2179 return;
2180 }
2181
2182 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
2183 to the left before storing to memory. Note that the previous test
2184 doesn't handle all cases (e.g. SIZE == 3). */
2185 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
2186 {
2187 rtx tem = operand_subword (x, 0, 1, BLKmode);
2188 rtx shift;
2189
2190 if (tem == 0)
2191 abort ();
2192
2193 shift = expand_shift (LSHIFT_EXPR, word_mode,
2194 gen_rtx_REG (word_mode, regno),
2195 build_int_2 ((UNITS_PER_WORD - size)
2196 * BITS_PER_UNIT, 0), NULL_RTX, 0);
2197 emit_move_insn (tem, shift);
2198 return;
2199 }
2200
2201 /* See if the machine can do this with a store multiple insn. */
2202 #ifdef HAVE_store_multiple
2203 if (HAVE_store_multiple)
2204 {
2205 last = get_last_insn ();
2206 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2207 GEN_INT (nregs));
2208 if (pat)
2209 {
2210 emit_insn (pat);
2211 return;
2212 }
2213 else
2214 delete_insns_since (last);
2215 }
2216 #endif
2217
2218 for (i = 0; i < nregs; i++)
2219 {
2220 rtx tem = operand_subword (x, i, 1, BLKmode);
2221
2222 if (tem == 0)
2223 abort ();
2224
2225 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2226 }
2227 }
2228
2229 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2230 ORIG, where ORIG is a non-consecutive group of registers represented by
2231 a PARALLEL. The clone is identical to the original except in that the
2232 original set of registers is replaced by a new set of pseudo registers.
2233 The new set has the same modes as the original set. */
2234
2235 rtx
2236 gen_group_rtx (orig)
2237 rtx orig;
2238 {
2239 int i, length;
2240 rtx *tmps;
2241
2242 if (GET_CODE (orig) != PARALLEL)
2243 abort ();
2244
2245 length = XVECLEN (orig, 0);
2246 tmps = (rtx *) alloca (sizeof (rtx) * length);
2247
2248 /* Skip a NULL entry in first slot. */
2249 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2250
2251 if (i)
2252 tmps[0] = 0;
2253
2254 for (; i < length; i++)
2255 {
2256 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2257 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2258
2259 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2260 }
2261
2262 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2263 }
2264
2265 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2266 registers represented by a PARALLEL. SSIZE represents the total size of
2267 block SRC in bytes, or -1 if not known. */
2268 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2269 the balance will be in what would be the low-order memory addresses, i.e.
2270 left justified for big endian, right justified for little endian. This
2271 happens to be true for the targets currently using this support. If this
2272 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2273 would be needed. */
2274
2275 void
2276 emit_group_load (dst, orig_src, ssize)
2277 rtx dst, orig_src;
2278 int ssize;
2279 {
2280 rtx *tmps, src;
2281 int start, i;
2282
2283 if (GET_CODE (dst) != PARALLEL)
2284 abort ();
2285
2286 /* Check for a NULL entry, used to indicate that the parameter goes
2287 both on the stack and in registers. */
2288 if (XEXP (XVECEXP (dst, 0, 0), 0))
2289 start = 0;
2290 else
2291 start = 1;
2292
2293 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2294
2295 /* Process the pieces. */
2296 for (i = start; i < XVECLEN (dst, 0); i++)
2297 {
2298 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2299 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2300 unsigned int bytelen = GET_MODE_SIZE (mode);
2301 int shift = 0;
2302
2303 /* Handle trailing fragments that run over the size of the struct. */
2304 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2305 {
2306 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2307 bytelen = ssize - bytepos;
2308 if (bytelen <= 0)
2309 abort ();
2310 }
2311
2312 /* If we won't be loading directly from memory, protect the real source
2313 from strange tricks we might play; but make sure that the source can
2314 be loaded directly into the destination. */
2315 src = orig_src;
2316 if (GET_CODE (orig_src) != MEM
2317 && (!CONSTANT_P (orig_src)
2318 || (GET_MODE (orig_src) != mode
2319 && GET_MODE (orig_src) != VOIDmode)))
2320 {
2321 if (GET_MODE (orig_src) == VOIDmode)
2322 src = gen_reg_rtx (mode);
2323 else
2324 src = gen_reg_rtx (GET_MODE (orig_src));
2325
2326 emit_move_insn (src, orig_src);
2327 }
2328
2329 /* Optimize the access just a bit. */
2330 if (GET_CODE (src) == MEM
2331 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2332 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2333 && bytelen == GET_MODE_SIZE (mode))
2334 {
2335 tmps[i] = gen_reg_rtx (mode);
2336 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2337 }
2338 else if (GET_CODE (src) == CONCAT)
2339 {
2340 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2341 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2342
2343 if ((bytepos == 0 && bytelen == slen0)
2344 || (bytepos != 0 && bytepos + bytelen <= slen))
2345 {
2346 /* The following assumes that the concatenated objects all
2347 have the same size. In this case, a simple calculation
2348 can be used to determine the object and the bit field
2349 to be extracted. */
2350 tmps[i] = XEXP (src, bytepos / slen0);
2351 if (! CONSTANT_P (tmps[i])
2352 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2353 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2354 (bytepos % slen0) * BITS_PER_UNIT,
2355 1, NULL_RTX, mode, mode, ssize);
2356 }
2357 else if (bytepos == 0)
2358 {
2359 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2360 emit_move_insn (mem, src);
2361 tmps[i] = adjust_address (mem, mode, 0);
2362 }
2363 else
2364 abort ();
2365 }
2366 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2367 SIMD register, which is currently broken. While we get GCC
2368 to emit proper RTL for these cases, let's dump to memory. */
2369 else if (VECTOR_MODE_P (GET_MODE (dst))
2370 && GET_CODE (src) == REG)
2371 {
2372 int slen = GET_MODE_SIZE (GET_MODE (src));
2373 rtx mem;
2374
2375 mem = assign_stack_temp (GET_MODE (src), slen, 0);
2376 emit_move_insn (mem, src);
2377 tmps[i] = adjust_address (mem, mode, (int) bytepos);
2378 }
2379 else if (CONSTANT_P (src)
2380 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2381 tmps[i] = src;
2382 else
2383 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2384 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2385 mode, mode, ssize);
2386
2387 if (BYTES_BIG_ENDIAN && shift)
2388 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2389 tmps[i], 0, OPTAB_WIDEN);
2390 }
2391
2392 emit_queue ();
2393
2394 /* Copy the extracted pieces into the proper (probable) hard regs. */
2395 for (i = start; i < XVECLEN (dst, 0); i++)
2396 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2397 }
2398
2399 /* Emit code to move a block SRC to block DST, where SRC and DST are
2400 non-consecutive groups of registers, each represented by a PARALLEL. */
2401
2402 void
2403 emit_group_move (dst, src)
2404 rtx dst, src;
2405 {
2406 int i;
2407
2408 if (GET_CODE (src) != PARALLEL
2409 || GET_CODE (dst) != PARALLEL
2410 || XVECLEN (src, 0) != XVECLEN (dst, 0))
2411 abort ();
2412
2413 /* Skip first entry if NULL. */
2414 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2415 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2416 XEXP (XVECEXP (src, 0, i), 0));
2417 }
2418
2419 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2420 registers represented by a PARALLEL. SSIZE represents the total size of
2421 block DST, or -1 if not known. */
2422
2423 void
2424 emit_group_store (orig_dst, src, ssize)
2425 rtx orig_dst, src;
2426 int ssize;
2427 {
2428 rtx *tmps, dst;
2429 int start, i;
2430
2431 if (GET_CODE (src) != PARALLEL)
2432 abort ();
2433
2434 /* Check for a NULL entry, used to indicate that the parameter goes
2435 both on the stack and in registers. */
2436 if (XEXP (XVECEXP (src, 0, 0), 0))
2437 start = 0;
2438 else
2439 start = 1;
2440
2441 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2442
2443 /* Copy the (probable) hard regs into pseudos. */
2444 for (i = start; i < XVECLEN (src, 0); i++)
2445 {
2446 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2447 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2448 emit_move_insn (tmps[i], reg);
2449 }
2450 emit_queue ();
2451
2452 /* If we won't be storing directly into memory, protect the real destination
2453 from strange tricks we might play. */
2454 dst = orig_dst;
2455 if (GET_CODE (dst) == PARALLEL)
2456 {
2457 rtx temp;
2458
2459 /* We can get a PARALLEL dst if there is a conditional expression in
2460 a return statement. In that case, the dst and src are the same,
2461 so no action is necessary. */
2462 if (rtx_equal_p (dst, src))
2463 return;
2464
2465 /* It is unclear if we can ever reach here, but we may as well handle
2466 it. Allocate a temporary, and split this into a store/load to/from
2467 the temporary. */
2468
2469 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2470 emit_group_store (temp, src, ssize);
2471 emit_group_load (dst, temp, ssize);
2472 return;
2473 }
2474 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2475 {
2476 dst = gen_reg_rtx (GET_MODE (orig_dst));
2477 /* Make life a bit easier for combine. */
2478 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2479 }
2480
2481 /* Process the pieces. */
2482 for (i = start; i < XVECLEN (src, 0); i++)
2483 {
2484 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2485 enum machine_mode mode = GET_MODE (tmps[i]);
2486 unsigned int bytelen = GET_MODE_SIZE (mode);
2487 rtx dest = dst;
2488
2489 /* Handle trailing fragments that run over the size of the struct. */
2490 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2491 {
2492 if (BYTES_BIG_ENDIAN)
2493 {
2494 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2495 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2496 tmps[i], 0, OPTAB_WIDEN);
2497 }
2498 bytelen = ssize - bytepos;
2499 }
2500
2501 if (GET_CODE (dst) == CONCAT)
2502 {
2503 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2504 dest = XEXP (dst, 0);
2505 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2506 {
2507 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2508 dest = XEXP (dst, 1);
2509 }
2510 else if (bytepos == 0 && XVECLEN (src, 0))
2511 {
2512 dest = assign_stack_temp (GET_MODE (dest),
2513 GET_MODE_SIZE (GET_MODE (dest)), 0);
2514 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2515 tmps[i]);
2516 dst = dest;
2517 break;
2518 }
2519 else
2520 abort ();
2521 }
2522
2523 /* Optimize the access just a bit. */
2524 if (GET_CODE (dest) == MEM
2525 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2526 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2527 && bytelen == GET_MODE_SIZE (mode))
2528 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2529 else
2530 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2531 mode, tmps[i], ssize);
2532 }
2533
2534 emit_queue ();
2535
2536 /* Copy from the pseudo into the (probable) hard reg. */
2537 if (orig_dst != dst)
2538 emit_move_insn (orig_dst, dst);
2539 }
2540
2541 /* Generate code to copy a BLKmode object of TYPE out of a
2542 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2543 is null, a stack temporary is created. TGTBLK is returned.
2544
2545 The primary purpose of this routine is to handle functions
2546 that return BLKmode structures in registers. Some machines
2547 (the PA for example) want to return all small structures
2548 in registers regardless of the structure's alignment. */
2549
2550 rtx
2551 copy_blkmode_from_reg (tgtblk, srcreg, type)
2552 rtx tgtblk;
2553 rtx srcreg;
2554 tree type;
2555 {
2556 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2557 rtx src = NULL, dst = NULL;
2558 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2559 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2560
2561 if (tgtblk == 0)
2562 {
2563 tgtblk = assign_temp (build_qualified_type (type,
2564 (TYPE_QUALS (type)
2565 | TYPE_QUAL_CONST)),
2566 0, 1, 1);
2567 preserve_temp_slots (tgtblk);
2568 }
2569
2570 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2571 into a new pseudo which is a full word. */
2572
2573 if (GET_MODE (srcreg) != BLKmode
2574 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2575 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2576
2577 /* Structures whose size is not a multiple of a word are aligned
2578 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2579 machine, this means we must skip the empty high order bytes when
2580 calculating the bit offset. */
2581 if (BYTES_BIG_ENDIAN
2582 && bytes % UNITS_PER_WORD)
2583 big_endian_correction
2584 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2585
2586 /* Copy the structure BITSIZE bites at a time.
2587
2588 We could probably emit more efficient code for machines which do not use
2589 strict alignment, but it doesn't seem worth the effort at the current
2590 time. */
2591 for (bitpos = 0, xbitpos = big_endian_correction;
2592 bitpos < bytes * BITS_PER_UNIT;
2593 bitpos += bitsize, xbitpos += bitsize)
2594 {
2595 /* We need a new source operand each time xbitpos is on a
2596 word boundary and when xbitpos == big_endian_correction
2597 (the first time through). */
2598 if (xbitpos % BITS_PER_WORD == 0
2599 || xbitpos == big_endian_correction)
2600 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2601 GET_MODE (srcreg));
2602
2603 /* We need a new destination operand each time bitpos is on
2604 a word boundary. */
2605 if (bitpos % BITS_PER_WORD == 0)
2606 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2607
2608 /* Use xbitpos for the source extraction (right justified) and
2609 xbitpos for the destination store (left justified). */
2610 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2611 extract_bit_field (src, bitsize,
2612 xbitpos % BITS_PER_WORD, 1,
2613 NULL_RTX, word_mode, word_mode,
2614 BITS_PER_WORD),
2615 BITS_PER_WORD);
2616 }
2617
2618 return tgtblk;
2619 }
2620
2621 /* Add a USE expression for REG to the (possibly empty) list pointed
2622 to by CALL_FUSAGE. REG must denote a hard register. */
2623
2624 void
2625 use_reg (call_fusage, reg)
2626 rtx *call_fusage, reg;
2627 {
2628 if (GET_CODE (reg) != REG
2629 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2630 abort ();
2631
2632 *call_fusage
2633 = gen_rtx_EXPR_LIST (VOIDmode,
2634 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2635 }
2636
2637 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2638 starting at REGNO. All of these registers must be hard registers. */
2639
2640 void
2641 use_regs (call_fusage, regno, nregs)
2642 rtx *call_fusage;
2643 int regno;
2644 int nregs;
2645 {
2646 int i;
2647
2648 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2649 abort ();
2650
2651 for (i = 0; i < nregs; i++)
2652 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2653 }
2654
2655 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2656 PARALLEL REGS. This is for calls that pass values in multiple
2657 non-contiguous locations. The Irix 6 ABI has examples of this. */
2658
2659 void
2660 use_group_regs (call_fusage, regs)
2661 rtx *call_fusage;
2662 rtx regs;
2663 {
2664 int i;
2665
2666 for (i = 0; i < XVECLEN (regs, 0); i++)
2667 {
2668 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2669
2670 /* A NULL entry means the parameter goes both on the stack and in
2671 registers. This can also be a MEM for targets that pass values
2672 partially on the stack and partially in registers. */
2673 if (reg != 0 && GET_CODE (reg) == REG)
2674 use_reg (call_fusage, reg);
2675 }
2676 }
2677 \f
2678
2679 /* Determine whether the LEN bytes generated by CONSTFUN can be
2680 stored to memory using several move instructions. CONSTFUNDATA is
2681 a pointer which will be passed as argument in every CONSTFUN call.
2682 ALIGN is maximum alignment we can assume. Return nonzero if a
2683 call to store_by_pieces should succeed. */
2684
2685 int
2686 can_store_by_pieces (len, constfun, constfundata, align)
2687 unsigned HOST_WIDE_INT len;
2688 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2689 PTR constfundata;
2690 unsigned int align;
2691 {
2692 unsigned HOST_WIDE_INT max_size, l;
2693 HOST_WIDE_INT offset = 0;
2694 enum machine_mode mode, tmode;
2695 enum insn_code icode;
2696 int reverse;
2697 rtx cst;
2698
2699 if (! STORE_BY_PIECES_P (len, align))
2700 return 0;
2701
2702 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2703 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2704 align = MOVE_MAX * BITS_PER_UNIT;
2705
2706 /* We would first store what we can in the largest integer mode, then go to
2707 successively smaller modes. */
2708
2709 for (reverse = 0;
2710 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2711 reverse++)
2712 {
2713 l = len;
2714 mode = VOIDmode;
2715 max_size = STORE_MAX_PIECES + 1;
2716 while (max_size > 1)
2717 {
2718 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2719 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2720 if (GET_MODE_SIZE (tmode) < max_size)
2721 mode = tmode;
2722
2723 if (mode == VOIDmode)
2724 break;
2725
2726 icode = mov_optab->handlers[(int) mode].insn_code;
2727 if (icode != CODE_FOR_nothing
2728 && align >= GET_MODE_ALIGNMENT (mode))
2729 {
2730 unsigned int size = GET_MODE_SIZE (mode);
2731
2732 while (l >= size)
2733 {
2734 if (reverse)
2735 offset -= size;
2736
2737 cst = (*constfun) (constfundata, offset, mode);
2738 if (!LEGITIMATE_CONSTANT_P (cst))
2739 return 0;
2740
2741 if (!reverse)
2742 offset += size;
2743
2744 l -= size;
2745 }
2746 }
2747
2748 max_size = GET_MODE_SIZE (mode);
2749 }
2750
2751 /* The code above should have handled everything. */
2752 if (l != 0)
2753 abort ();
2754 }
2755
2756 return 1;
2757 }
2758
2759 /* Generate several move instructions to store LEN bytes generated by
2760 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2761 pointer which will be passed as argument in every CONSTFUN call.
2762 ALIGN is maximum alignment we can assume. */
2763
2764 void
2765 store_by_pieces (to, len, constfun, constfundata, align)
2766 rtx to;
2767 unsigned HOST_WIDE_INT len;
2768 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2769 PTR constfundata;
2770 unsigned int align;
2771 {
2772 struct store_by_pieces data;
2773
2774 if (! STORE_BY_PIECES_P (len, align))
2775 abort ();
2776 to = protect_from_queue (to, 1);
2777 data.constfun = constfun;
2778 data.constfundata = constfundata;
2779 data.len = len;
2780 data.to = to;
2781 store_by_pieces_1 (&data, align);
2782 }
2783
2784 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2785 rtx with BLKmode). The caller must pass TO through protect_from_queue
2786 before calling. ALIGN is maximum alignment we can assume. */
2787
2788 static void
2789 clear_by_pieces (to, len, align)
2790 rtx to;
2791 unsigned HOST_WIDE_INT len;
2792 unsigned int align;
2793 {
2794 struct store_by_pieces data;
2795
2796 data.constfun = clear_by_pieces_1;
2797 data.constfundata = NULL;
2798 data.len = len;
2799 data.to = to;
2800 store_by_pieces_1 (&data, align);
2801 }
2802
2803 /* Callback routine for clear_by_pieces.
2804 Return const0_rtx unconditionally. */
2805
2806 static rtx
2807 clear_by_pieces_1 (data, offset, mode)
2808 PTR data ATTRIBUTE_UNUSED;
2809 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2810 enum machine_mode mode ATTRIBUTE_UNUSED;
2811 {
2812 return const0_rtx;
2813 }
2814
2815 /* Subroutine of clear_by_pieces and store_by_pieces.
2816 Generate several move instructions to store LEN bytes of block TO. (A MEM
2817 rtx with BLKmode). The caller must pass TO through protect_from_queue
2818 before calling. ALIGN is maximum alignment we can assume. */
2819
2820 static void
2821 store_by_pieces_1 (data, align)
2822 struct store_by_pieces *data;
2823 unsigned int align;
2824 {
2825 rtx to_addr = XEXP (data->to, 0);
2826 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2827 enum machine_mode mode = VOIDmode, tmode;
2828 enum insn_code icode;
2829
2830 data->offset = 0;
2831 data->to_addr = to_addr;
2832 data->autinc_to
2833 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2834 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2835
2836 data->explicit_inc_to = 0;
2837 data->reverse
2838 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2839 if (data->reverse)
2840 data->offset = data->len;
2841
2842 /* If storing requires more than two move insns,
2843 copy addresses to registers (to make displacements shorter)
2844 and use post-increment if available. */
2845 if (!data->autinc_to
2846 && move_by_pieces_ninsns (data->len, align) > 2)
2847 {
2848 /* Determine the main mode we'll be using. */
2849 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2850 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2851 if (GET_MODE_SIZE (tmode) < max_size)
2852 mode = tmode;
2853
2854 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2855 {
2856 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2857 data->autinc_to = 1;
2858 data->explicit_inc_to = -1;
2859 }
2860
2861 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2862 && ! data->autinc_to)
2863 {
2864 data->to_addr = copy_addr_to_reg (to_addr);
2865 data->autinc_to = 1;
2866 data->explicit_inc_to = 1;
2867 }
2868
2869 if ( !data->autinc_to && CONSTANT_P (to_addr))
2870 data->to_addr = copy_addr_to_reg (to_addr);
2871 }
2872
2873 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2874 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2875 align = MOVE_MAX * BITS_PER_UNIT;
2876
2877 /* First store what we can in the largest integer mode, then go to
2878 successively smaller modes. */
2879
2880 while (max_size > 1)
2881 {
2882 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2883 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2884 if (GET_MODE_SIZE (tmode) < max_size)
2885 mode = tmode;
2886
2887 if (mode == VOIDmode)
2888 break;
2889
2890 icode = mov_optab->handlers[(int) mode].insn_code;
2891 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2892 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2893
2894 max_size = GET_MODE_SIZE (mode);
2895 }
2896
2897 /* The code above should have handled everything. */
2898 if (data->len != 0)
2899 abort ();
2900 }
2901
2902 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2903 with move instructions for mode MODE. GENFUN is the gen_... function
2904 to make a move insn for that mode. DATA has all the other info. */
2905
2906 static void
2907 store_by_pieces_2 (genfun, mode, data)
2908 rtx (*genfun) PARAMS ((rtx, ...));
2909 enum machine_mode mode;
2910 struct store_by_pieces *data;
2911 {
2912 unsigned int size = GET_MODE_SIZE (mode);
2913 rtx to1, cst;
2914
2915 while (data->len >= size)
2916 {
2917 if (data->reverse)
2918 data->offset -= size;
2919
2920 if (data->autinc_to)
2921 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2922 data->offset);
2923 else
2924 to1 = adjust_address (data->to, mode, data->offset);
2925
2926 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2927 emit_insn (gen_add2_insn (data->to_addr,
2928 GEN_INT (-(HOST_WIDE_INT) size)));
2929
2930 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2931 emit_insn ((*genfun) (to1, cst));
2932
2933 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2934 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2935
2936 if (! data->reverse)
2937 data->offset += size;
2938
2939 data->len -= size;
2940 }
2941 }
2942 \f
2943 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2944 its length in bytes. */
2945
2946 rtx
2947 clear_storage (object, size)
2948 rtx object;
2949 rtx size;
2950 {
2951 rtx retval = 0;
2952 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2953 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2954
2955 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2956 just move a zero. Otherwise, do this a piece at a time. */
2957 if (GET_MODE (object) != BLKmode
2958 && GET_CODE (size) == CONST_INT
2959 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2960 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2961 else
2962 {
2963 object = protect_from_queue (object, 1);
2964 size = protect_from_queue (size, 0);
2965
2966 if (GET_CODE (size) == CONST_INT
2967 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2968 clear_by_pieces (object, INTVAL (size), align);
2969 else if (clear_storage_via_clrstr (object, size, align))
2970 ;
2971 else
2972 retval = clear_storage_via_libcall (object, size);
2973 }
2974
2975 return retval;
2976 }
2977
2978 /* A subroutine of clear_storage. Expand a clrstr pattern;
2979 return true if successful. */
2980
2981 static bool
2982 clear_storage_via_clrstr (object, size, align)
2983 rtx object, size;
2984 unsigned int align;
2985 {
2986 /* Try the most limited insn first, because there's no point
2987 including more than one in the machine description unless
2988 the more limited one has some advantage. */
2989
2990 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2991 enum machine_mode mode;
2992
2993 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2994 mode = GET_MODE_WIDER_MODE (mode))
2995 {
2996 enum insn_code code = clrstr_optab[(int) mode];
2997 insn_operand_predicate_fn pred;
2998
2999 if (code != CODE_FOR_nothing
3000 /* We don't need MODE to be narrower than
3001 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
3002 the mode mask, as it is returned by the macro, it will
3003 definitely be less than the actual mode mask. */
3004 && ((GET_CODE (size) == CONST_INT
3005 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3006 <= (GET_MODE_MASK (mode) >> 1)))
3007 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3008 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
3009 || (*pred) (object, BLKmode))
3010 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
3011 || (*pred) (opalign, VOIDmode)))
3012 {
3013 rtx op1;
3014 rtx last = get_last_insn ();
3015 rtx pat;
3016
3017 op1 = convert_to_mode (mode, size, 1);
3018 pred = insn_data[(int) code].operand[1].predicate;
3019 if (pred != 0 && ! (*pred) (op1, mode))
3020 op1 = copy_to_mode_reg (mode, op1);
3021
3022 pat = GEN_FCN ((int) code) (object, op1, opalign);
3023 if (pat)
3024 {
3025 emit_insn (pat);
3026 return true;
3027 }
3028 else
3029 delete_insns_since (last);
3030 }
3031 }
3032
3033 return false;
3034 }
3035
3036 /* A subroutine of clear_storage. Expand a call to memset or bzero.
3037 Return the return value of memset, 0 otherwise. */
3038
3039 static rtx
3040 clear_storage_via_libcall (object, size)
3041 rtx object, size;
3042 {
3043 tree call_expr, arg_list, fn, object_tree, size_tree;
3044 enum machine_mode size_mode;
3045 rtx retval;
3046
3047 /* OBJECT or SIZE may have been passed through protect_from_queue.
3048
3049 It is unsafe to save the value generated by protect_from_queue
3050 and reuse it later. Consider what happens if emit_queue is
3051 called before the return value from protect_from_queue is used.
3052
3053 Expansion of the CALL_EXPR below will call emit_queue before
3054 we are finished emitting RTL for argument setup. So if we are
3055 not careful we could get the wrong value for an argument.
3056
3057 To avoid this problem we go ahead and emit code to copy OBJECT
3058 and SIZE into new pseudos. We can then place those new pseudos
3059 into an RTL_EXPR and use them later, even after a call to
3060 emit_queue.
3061
3062 Note this is not strictly needed for library calls since they
3063 do not call emit_queue before loading their arguments. However,
3064 we may need to have library calls call emit_queue in the future
3065 since failing to do so could cause problems for targets which
3066 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3067
3068 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
3069
3070 if (TARGET_MEM_FUNCTIONS)
3071 size_mode = TYPE_MODE (sizetype);
3072 else
3073 size_mode = TYPE_MODE (unsigned_type_node);
3074 size = convert_to_mode (size_mode, size, 1);
3075 size = copy_to_mode_reg (size_mode, size);
3076
3077 /* It is incorrect to use the libcall calling conventions to call
3078 memset in this context. This could be a user call to memset and
3079 the user may wish to examine the return value from memset. For
3080 targets where libcalls and normal calls have different conventions
3081 for returning pointers, we could end up generating incorrect code.
3082
3083 For convenience, we generate the call to bzero this way as well. */
3084
3085 object_tree = make_tree (ptr_type_node, object);
3086 if (TARGET_MEM_FUNCTIONS)
3087 size_tree = make_tree (sizetype, size);
3088 else
3089 size_tree = make_tree (unsigned_type_node, size);
3090
3091 fn = clear_storage_libcall_fn (true);
3092 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3093 if (TARGET_MEM_FUNCTIONS)
3094 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3095 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3096
3097 /* Now we have to build up the CALL_EXPR itself. */
3098 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3099 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3100 call_expr, arg_list, NULL_TREE);
3101 TREE_SIDE_EFFECTS (call_expr) = 1;
3102
3103 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3104
3105 /* If we are initializing a readonly value, show the above call
3106 clobbered it. Otherwise, a load from it may erroneously be
3107 hoisted from a loop. */
3108 if (RTX_UNCHANGING_P (object))
3109 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3110
3111 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3112 }
3113
3114 /* A subroutine of clear_storage_via_libcall. Create the tree node
3115 for the function we use for block clears. The first time FOR_CALL
3116 is true, we call assemble_external. */
3117
3118 static GTY(()) tree block_clear_fn;
3119
3120 void
3121 init_block_clear_fn (asmspec)
3122 const char *asmspec;
3123 {
3124 if (!block_clear_fn)
3125 {
3126 tree fn, args;
3127
3128 if (TARGET_MEM_FUNCTIONS)
3129 {
3130 fn = get_identifier ("memset");
3131 args = build_function_type_list (ptr_type_node, ptr_type_node,
3132 integer_type_node, sizetype,
3133 NULL_TREE);
3134 }
3135 else
3136 {
3137 fn = get_identifier ("bzero");
3138 args = build_function_type_list (void_type_node, ptr_type_node,
3139 unsigned_type_node, NULL_TREE);
3140 }
3141
3142 fn = build_decl (FUNCTION_DECL, fn, args);
3143 DECL_EXTERNAL (fn) = 1;
3144 TREE_PUBLIC (fn) = 1;
3145 DECL_ARTIFICIAL (fn) = 1;
3146 TREE_NOTHROW (fn) = 1;
3147
3148 block_clear_fn = fn;
3149 }
3150
3151 if (asmspec)
3152 {
3153 SET_DECL_RTL (block_clear_fn, NULL_RTX);
3154 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
3155 }
3156 }
3157
3158 static tree
3159 clear_storage_libcall_fn (for_call)
3160 int for_call;
3161 {
3162 static bool emitted_extern;
3163
3164 if (!block_clear_fn)
3165 init_block_clear_fn (NULL);
3166
3167 if (for_call && !emitted_extern)
3168 {
3169 emitted_extern = true;
3170 make_decl_rtl (block_clear_fn, NULL);
3171 assemble_external (block_clear_fn);
3172 }
3173
3174 return block_clear_fn;
3175 }
3176 \f
3177 /* Generate code to copy Y into X.
3178 Both Y and X must have the same mode, except that
3179 Y can be a constant with VOIDmode.
3180 This mode cannot be BLKmode; use emit_block_move for that.
3181
3182 Return the last instruction emitted. */
3183
3184 rtx
3185 emit_move_insn (x, y)
3186 rtx x, y;
3187 {
3188 enum machine_mode mode = GET_MODE (x);
3189 rtx y_cst = NULL_RTX;
3190 rtx last_insn;
3191
3192 x = protect_from_queue (x, 1);
3193 y = protect_from_queue (y, 0);
3194
3195 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3196 abort ();
3197
3198 /* Never force constant_p_rtx to memory. */
3199 if (GET_CODE (y) == CONSTANT_P_RTX)
3200 ;
3201 else if (CONSTANT_P (y))
3202 {
3203 if (optimize
3204 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3205 && (last_insn = compress_float_constant (x, y)))
3206 return last_insn;
3207
3208 if (!LEGITIMATE_CONSTANT_P (y))
3209 {
3210 y_cst = y;
3211 y = force_const_mem (mode, y);
3212
3213 /* If the target's cannot_force_const_mem prevented the spill,
3214 assume that the target's move expanders will also take care
3215 of the non-legitimate constant. */
3216 if (!y)
3217 y = y_cst;
3218 }
3219 }
3220
3221 /* If X or Y are memory references, verify that their addresses are valid
3222 for the machine. */
3223 if (GET_CODE (x) == MEM
3224 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3225 && ! push_operand (x, GET_MODE (x)))
3226 || (flag_force_addr
3227 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3228 x = validize_mem (x);
3229
3230 if (GET_CODE (y) == MEM
3231 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3232 || (flag_force_addr
3233 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3234 y = validize_mem (y);
3235
3236 if (mode == BLKmode)
3237 abort ();
3238
3239 last_insn = emit_move_insn_1 (x, y);
3240
3241 if (y_cst && GET_CODE (x) == REG)
3242 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3243
3244 return last_insn;
3245 }
3246
3247 /* Low level part of emit_move_insn.
3248 Called just like emit_move_insn, but assumes X and Y
3249 are basically valid. */
3250
3251 rtx
3252 emit_move_insn_1 (x, y)
3253 rtx x, y;
3254 {
3255 enum machine_mode mode = GET_MODE (x);
3256 enum machine_mode submode;
3257 enum mode_class class = GET_MODE_CLASS (mode);
3258
3259 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3260 abort ();
3261
3262 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3263 return
3264 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3265
3266 /* Expand complex moves by moving real part and imag part, if possible. */
3267 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3268 && BLKmode != (submode = GET_MODE_INNER (mode))
3269 && (mov_optab->handlers[(int) submode].insn_code
3270 != CODE_FOR_nothing))
3271 {
3272 /* Don't split destination if it is a stack push. */
3273 int stack = push_operand (x, GET_MODE (x));
3274
3275 #ifdef PUSH_ROUNDING
3276 /* In case we output to the stack, but the size is smaller machine can
3277 push exactly, we need to use move instructions. */
3278 if (stack
3279 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3280 != GET_MODE_SIZE (submode)))
3281 {
3282 rtx temp;
3283 HOST_WIDE_INT offset1, offset2;
3284
3285 /* Do not use anti_adjust_stack, since we don't want to update
3286 stack_pointer_delta. */
3287 temp = expand_binop (Pmode,
3288 #ifdef STACK_GROWS_DOWNWARD
3289 sub_optab,
3290 #else
3291 add_optab,
3292 #endif
3293 stack_pointer_rtx,
3294 GEN_INT
3295 (PUSH_ROUNDING
3296 (GET_MODE_SIZE (GET_MODE (x)))),
3297 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3298
3299 if (temp != stack_pointer_rtx)
3300 emit_move_insn (stack_pointer_rtx, temp);
3301
3302 #ifdef STACK_GROWS_DOWNWARD
3303 offset1 = 0;
3304 offset2 = GET_MODE_SIZE (submode);
3305 #else
3306 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3307 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3308 + GET_MODE_SIZE (submode));
3309 #endif
3310
3311 emit_move_insn (change_address (x, submode,
3312 gen_rtx_PLUS (Pmode,
3313 stack_pointer_rtx,
3314 GEN_INT (offset1))),
3315 gen_realpart (submode, y));
3316 emit_move_insn (change_address (x, submode,
3317 gen_rtx_PLUS (Pmode,
3318 stack_pointer_rtx,
3319 GEN_INT (offset2))),
3320 gen_imagpart (submode, y));
3321 }
3322 else
3323 #endif
3324 /* If this is a stack, push the highpart first, so it
3325 will be in the argument order.
3326
3327 In that case, change_address is used only to convert
3328 the mode, not to change the address. */
3329 if (stack)
3330 {
3331 /* Note that the real part always precedes the imag part in memory
3332 regardless of machine's endianness. */
3333 #ifdef STACK_GROWS_DOWNWARD
3334 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3335 (gen_rtx_MEM (submode, XEXP (x, 0)),
3336 gen_imagpart (submode, y)));
3337 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3338 (gen_rtx_MEM (submode, XEXP (x, 0)),
3339 gen_realpart (submode, y)));
3340 #else
3341 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3342 (gen_rtx_MEM (submode, XEXP (x, 0)),
3343 gen_realpart (submode, y)));
3344 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3345 (gen_rtx_MEM (submode, XEXP (x, 0)),
3346 gen_imagpart (submode, y)));
3347 #endif
3348 }
3349 else
3350 {
3351 rtx realpart_x, realpart_y;
3352 rtx imagpart_x, imagpart_y;
3353
3354 /* If this is a complex value with each part being smaller than a
3355 word, the usual calling sequence will likely pack the pieces into
3356 a single register. Unfortunately, SUBREG of hard registers only
3357 deals in terms of words, so we have a problem converting input
3358 arguments to the CONCAT of two registers that is used elsewhere
3359 for complex values. If this is before reload, we can copy it into
3360 memory and reload. FIXME, we should see about using extract and
3361 insert on integer registers, but complex short and complex char
3362 variables should be rarely used. */
3363 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3364 && (reload_in_progress | reload_completed) == 0)
3365 {
3366 int packed_dest_p
3367 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3368 int packed_src_p
3369 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3370
3371 if (packed_dest_p || packed_src_p)
3372 {
3373 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3374 ? MODE_FLOAT : MODE_INT);
3375
3376 enum machine_mode reg_mode
3377 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3378
3379 if (reg_mode != BLKmode)
3380 {
3381 rtx mem = assign_stack_temp (reg_mode,
3382 GET_MODE_SIZE (mode), 0);
3383 rtx cmem = adjust_address (mem, mode, 0);
3384
3385 cfun->cannot_inline
3386 = N_("function using short complex types cannot be inline");
3387
3388 if (packed_dest_p)
3389 {
3390 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3391
3392 emit_move_insn_1 (cmem, y);
3393 return emit_move_insn_1 (sreg, mem);
3394 }
3395 else
3396 {
3397 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3398
3399 emit_move_insn_1 (mem, sreg);
3400 return emit_move_insn_1 (x, cmem);
3401 }
3402 }
3403 }
3404 }
3405
3406 realpart_x = gen_realpart (submode, x);
3407 realpart_y = gen_realpart (submode, y);
3408 imagpart_x = gen_imagpart (submode, x);
3409 imagpart_y = gen_imagpart (submode, y);
3410
3411 /* Show the output dies here. This is necessary for SUBREGs
3412 of pseudos since we cannot track their lifetimes correctly;
3413 hard regs shouldn't appear here except as return values.
3414 We never want to emit such a clobber after reload. */
3415 if (x != y
3416 && ! (reload_in_progress || reload_completed)
3417 && (GET_CODE (realpart_x) == SUBREG
3418 || GET_CODE (imagpart_x) == SUBREG))
3419 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3420
3421 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3422 (realpart_x, realpart_y));
3423 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3424 (imagpart_x, imagpart_y));
3425 }
3426
3427 return get_last_insn ();
3428 }
3429
3430 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3431 find a mode to do it in. If we have a movcc, use it. Otherwise,
3432 find the MODE_INT mode of the same width. */
3433 else if (GET_MODE_CLASS (mode) == MODE_CC
3434 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3435 {
3436 enum insn_code insn_code;
3437 enum machine_mode tmode = VOIDmode;
3438 rtx x1 = x, y1 = y;
3439
3440 if (mode != CCmode
3441 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3442 tmode = CCmode;
3443 else
3444 for (tmode = QImode; tmode != VOIDmode;
3445 tmode = GET_MODE_WIDER_MODE (tmode))
3446 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3447 break;
3448
3449 if (tmode == VOIDmode)
3450 abort ();
3451
3452 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3453 may call change_address which is not appropriate if we were
3454 called when a reload was in progress. We don't have to worry
3455 about changing the address since the size in bytes is supposed to
3456 be the same. Copy the MEM to change the mode and move any
3457 substitutions from the old MEM to the new one. */
3458
3459 if (reload_in_progress)
3460 {
3461 x = gen_lowpart_common (tmode, x1);
3462 if (x == 0 && GET_CODE (x1) == MEM)
3463 {
3464 x = adjust_address_nv (x1, tmode, 0);
3465 copy_replacements (x1, x);
3466 }
3467
3468 y = gen_lowpart_common (tmode, y1);
3469 if (y == 0 && GET_CODE (y1) == MEM)
3470 {
3471 y = adjust_address_nv (y1, tmode, 0);
3472 copy_replacements (y1, y);
3473 }
3474 }
3475 else
3476 {
3477 x = gen_lowpart (tmode, x);
3478 y = gen_lowpart (tmode, y);
3479 }
3480
3481 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3482 return emit_insn (GEN_FCN (insn_code) (x, y));
3483 }
3484
3485 /* This will handle any multi-word or full-word mode that lacks a move_insn
3486 pattern. However, you will get better code if you define such patterns,
3487 even if they must turn into multiple assembler instructions. */
3488 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3489 {
3490 rtx last_insn = 0;
3491 rtx seq, inner;
3492 int need_clobber;
3493 int i;
3494
3495 #ifdef PUSH_ROUNDING
3496
3497 /* If X is a push on the stack, do the push now and replace
3498 X with a reference to the stack pointer. */
3499 if (push_operand (x, GET_MODE (x)))
3500 {
3501 rtx temp;
3502 enum rtx_code code;
3503
3504 /* Do not use anti_adjust_stack, since we don't want to update
3505 stack_pointer_delta. */
3506 temp = expand_binop (Pmode,
3507 #ifdef STACK_GROWS_DOWNWARD
3508 sub_optab,
3509 #else
3510 add_optab,
3511 #endif
3512 stack_pointer_rtx,
3513 GEN_INT
3514 (PUSH_ROUNDING
3515 (GET_MODE_SIZE (GET_MODE (x)))),
3516 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3517
3518 if (temp != stack_pointer_rtx)
3519 emit_move_insn (stack_pointer_rtx, temp);
3520
3521 code = GET_CODE (XEXP (x, 0));
3522
3523 /* Just hope that small offsets off SP are OK. */
3524 if (code == POST_INC)
3525 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3526 GEN_INT (-((HOST_WIDE_INT)
3527 GET_MODE_SIZE (GET_MODE (x)))));
3528 else if (code == POST_DEC)
3529 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3530 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3531 else
3532 temp = stack_pointer_rtx;
3533
3534 x = change_address (x, VOIDmode, temp);
3535 }
3536 #endif
3537
3538 /* If we are in reload, see if either operand is a MEM whose address
3539 is scheduled for replacement. */
3540 if (reload_in_progress && GET_CODE (x) == MEM
3541 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3542 x = replace_equiv_address_nv (x, inner);
3543 if (reload_in_progress && GET_CODE (y) == MEM
3544 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3545 y = replace_equiv_address_nv (y, inner);
3546
3547 start_sequence ();
3548
3549 need_clobber = 0;
3550 for (i = 0;
3551 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3552 i++)
3553 {
3554 rtx xpart = operand_subword (x, i, 1, mode);
3555 rtx ypart = operand_subword (y, i, 1, mode);
3556
3557 /* If we can't get a part of Y, put Y into memory if it is a
3558 constant. Otherwise, force it into a register. If we still
3559 can't get a part of Y, abort. */
3560 if (ypart == 0 && CONSTANT_P (y))
3561 {
3562 y = force_const_mem (mode, y);
3563 ypart = operand_subword (y, i, 1, mode);
3564 }
3565 else if (ypart == 0)
3566 ypart = operand_subword_force (y, i, mode);
3567
3568 if (xpart == 0 || ypart == 0)
3569 abort ();
3570
3571 need_clobber |= (GET_CODE (xpart) == SUBREG);
3572
3573 last_insn = emit_move_insn (xpart, ypart);
3574 }
3575
3576 seq = get_insns ();
3577 end_sequence ();
3578
3579 /* Show the output dies here. This is necessary for SUBREGs
3580 of pseudos since we cannot track their lifetimes correctly;
3581 hard regs shouldn't appear here except as return values.
3582 We never want to emit such a clobber after reload. */
3583 if (x != y
3584 && ! (reload_in_progress || reload_completed)
3585 && need_clobber != 0)
3586 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3587
3588 emit_insn (seq);
3589
3590 return last_insn;
3591 }
3592 else
3593 abort ();
3594 }
3595
3596 /* If Y is representable exactly in a narrower mode, and the target can
3597 perform the extension directly from constant or memory, then emit the
3598 move as an extension. */
3599
3600 static rtx
3601 compress_float_constant (x, y)
3602 rtx x, y;
3603 {
3604 enum machine_mode dstmode = GET_MODE (x);
3605 enum machine_mode orig_srcmode = GET_MODE (y);
3606 enum machine_mode srcmode;
3607 REAL_VALUE_TYPE r;
3608
3609 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3610
3611 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3612 srcmode != orig_srcmode;
3613 srcmode = GET_MODE_WIDER_MODE (srcmode))
3614 {
3615 enum insn_code ic;
3616 rtx trunc_y, last_insn;
3617
3618 /* Skip if the target can't extend this way. */
3619 ic = can_extend_p (dstmode, srcmode, 0);
3620 if (ic == CODE_FOR_nothing)
3621 continue;
3622
3623 /* Skip if the narrowed value isn't exact. */
3624 if (! exact_real_truncate (srcmode, &r))
3625 continue;
3626
3627 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3628
3629 if (LEGITIMATE_CONSTANT_P (trunc_y))
3630 {
3631 /* Skip if the target needs extra instructions to perform
3632 the extension. */
3633 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3634 continue;
3635 }
3636 else if (float_extend_from_mem[dstmode][srcmode])
3637 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3638 else
3639 continue;
3640
3641 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3642 last_insn = get_last_insn ();
3643
3644 if (GET_CODE (x) == REG)
3645 REG_NOTES (last_insn)
3646 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3647
3648 return last_insn;
3649 }
3650
3651 return NULL_RTX;
3652 }
3653 \f
3654 /* Pushing data onto the stack. */
3655
3656 /* Push a block of length SIZE (perhaps variable)
3657 and return an rtx to address the beginning of the block.
3658 Note that it is not possible for the value returned to be a QUEUED.
3659 The value may be virtual_outgoing_args_rtx.
3660
3661 EXTRA is the number of bytes of padding to push in addition to SIZE.
3662 BELOW nonzero means this padding comes at low addresses;
3663 otherwise, the padding comes at high addresses. */
3664
3665 rtx
3666 push_block (size, extra, below)
3667 rtx size;
3668 int extra, below;
3669 {
3670 rtx temp;
3671
3672 size = convert_modes (Pmode, ptr_mode, size, 1);
3673 if (CONSTANT_P (size))
3674 anti_adjust_stack (plus_constant (size, extra));
3675 else if (GET_CODE (size) == REG && extra == 0)
3676 anti_adjust_stack (size);
3677 else
3678 {
3679 temp = copy_to_mode_reg (Pmode, size);
3680 if (extra != 0)
3681 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3682 temp, 0, OPTAB_LIB_WIDEN);
3683 anti_adjust_stack (temp);
3684 }
3685
3686 #ifndef STACK_GROWS_DOWNWARD
3687 if (0)
3688 #else
3689 if (1)
3690 #endif
3691 {
3692 temp = virtual_outgoing_args_rtx;
3693 if (extra != 0 && below)
3694 temp = plus_constant (temp, extra);
3695 }
3696 else
3697 {
3698 if (GET_CODE (size) == CONST_INT)
3699 temp = plus_constant (virtual_outgoing_args_rtx,
3700 -INTVAL (size) - (below ? 0 : extra));
3701 else if (extra != 0 && !below)
3702 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3703 negate_rtx (Pmode, plus_constant (size, extra)));
3704 else
3705 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3706 negate_rtx (Pmode, size));
3707 }
3708
3709 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3710 }
3711
3712 #ifdef PUSH_ROUNDING
3713
3714 /* Emit single push insn. */
3715
3716 static void
3717 emit_single_push_insn (mode, x, type)
3718 rtx x;
3719 enum machine_mode mode;
3720 tree type;
3721 {
3722 rtx dest_addr;
3723 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3724 rtx dest;
3725 enum insn_code icode;
3726 insn_operand_predicate_fn pred;
3727
3728 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3729 /* If there is push pattern, use it. Otherwise try old way of throwing
3730 MEM representing push operation to move expander. */
3731 icode = push_optab->handlers[(int) mode].insn_code;
3732 if (icode != CODE_FOR_nothing)
3733 {
3734 if (((pred = insn_data[(int) icode].operand[0].predicate)
3735 && !((*pred) (x, mode))))
3736 x = force_reg (mode, x);
3737 emit_insn (GEN_FCN (icode) (x));
3738 return;
3739 }
3740 if (GET_MODE_SIZE (mode) == rounded_size)
3741 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3742 else
3743 {
3744 #ifdef STACK_GROWS_DOWNWARD
3745 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3746 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3747 #else
3748 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3749 GEN_INT (rounded_size));
3750 #endif
3751 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3752 }
3753
3754 dest = gen_rtx_MEM (mode, dest_addr);
3755
3756 if (type != 0)
3757 {
3758 set_mem_attributes (dest, type, 1);
3759
3760 if (flag_optimize_sibling_calls)
3761 /* Function incoming arguments may overlap with sibling call
3762 outgoing arguments and we cannot allow reordering of reads
3763 from function arguments with stores to outgoing arguments
3764 of sibling calls. */
3765 set_mem_alias_set (dest, 0);
3766 }
3767 emit_move_insn (dest, x);
3768 }
3769 #endif
3770
3771 /* Generate code to push X onto the stack, assuming it has mode MODE and
3772 type TYPE.
3773 MODE is redundant except when X is a CONST_INT (since they don't
3774 carry mode info).
3775 SIZE is an rtx for the size of data to be copied (in bytes),
3776 needed only if X is BLKmode.
3777
3778 ALIGN (in bits) is maximum alignment we can assume.
3779
3780 If PARTIAL and REG are both nonzero, then copy that many of the first
3781 words of X into registers starting with REG, and push the rest of X.
3782 The amount of space pushed is decreased by PARTIAL words,
3783 rounded *down* to a multiple of PARM_BOUNDARY.
3784 REG must be a hard register in this case.
3785 If REG is zero but PARTIAL is not, take any all others actions for an
3786 argument partially in registers, but do not actually load any
3787 registers.
3788
3789 EXTRA is the amount in bytes of extra space to leave next to this arg.
3790 This is ignored if an argument block has already been allocated.
3791
3792 On a machine that lacks real push insns, ARGS_ADDR is the address of
3793 the bottom of the argument block for this call. We use indexing off there
3794 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3795 argument block has not been preallocated.
3796
3797 ARGS_SO_FAR is the size of args previously pushed for this call.
3798
3799 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3800 for arguments passed in registers. If nonzero, it will be the number
3801 of bytes required. */
3802
3803 void
3804 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3805 args_addr, args_so_far, reg_parm_stack_space,
3806 alignment_pad)
3807 rtx x;
3808 enum machine_mode mode;
3809 tree type;
3810 rtx size;
3811 unsigned int align;
3812 int partial;
3813 rtx reg;
3814 int extra;
3815 rtx args_addr;
3816 rtx args_so_far;
3817 int reg_parm_stack_space;
3818 rtx alignment_pad;
3819 {
3820 rtx xinner;
3821 enum direction stack_direction
3822 #ifdef STACK_GROWS_DOWNWARD
3823 = downward;
3824 #else
3825 = upward;
3826 #endif
3827
3828 /* Decide where to pad the argument: `downward' for below,
3829 `upward' for above, or `none' for don't pad it.
3830 Default is below for small data on big-endian machines; else above. */
3831 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3832
3833 /* Invert direction if stack is post-decrement.
3834 FIXME: why? */
3835 if (STACK_PUSH_CODE == POST_DEC)
3836 if (where_pad != none)
3837 where_pad = (where_pad == downward ? upward : downward);
3838
3839 xinner = x = protect_from_queue (x, 0);
3840
3841 if (mode == BLKmode)
3842 {
3843 /* Copy a block into the stack, entirely or partially. */
3844
3845 rtx temp;
3846 int used = partial * UNITS_PER_WORD;
3847 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3848 int skip;
3849
3850 if (size == 0)
3851 abort ();
3852
3853 used -= offset;
3854
3855 /* USED is now the # of bytes we need not copy to the stack
3856 because registers will take care of them. */
3857
3858 if (partial != 0)
3859 xinner = adjust_address (xinner, BLKmode, used);
3860
3861 /* If the partial register-part of the arg counts in its stack size,
3862 skip the part of stack space corresponding to the registers.
3863 Otherwise, start copying to the beginning of the stack space,
3864 by setting SKIP to 0. */
3865 skip = (reg_parm_stack_space == 0) ? 0 : used;
3866
3867 #ifdef PUSH_ROUNDING
3868 /* Do it with several push insns if that doesn't take lots of insns
3869 and if there is no difficulty with push insns that skip bytes
3870 on the stack for alignment purposes. */
3871 if (args_addr == 0
3872 && PUSH_ARGS
3873 && GET_CODE (size) == CONST_INT
3874 && skip == 0
3875 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3876 /* Here we avoid the case of a structure whose weak alignment
3877 forces many pushes of a small amount of data,
3878 and such small pushes do rounding that causes trouble. */
3879 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3880 || align >= BIGGEST_ALIGNMENT
3881 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3882 == (align / BITS_PER_UNIT)))
3883 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3884 {
3885 /* Push padding now if padding above and stack grows down,
3886 or if padding below and stack grows up.
3887 But if space already allocated, this has already been done. */
3888 if (extra && args_addr == 0
3889 && where_pad != none && where_pad != stack_direction)
3890 anti_adjust_stack (GEN_INT (extra));
3891
3892 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3893 }
3894 else
3895 #endif /* PUSH_ROUNDING */
3896 {
3897 rtx target;
3898
3899 /* Otherwise make space on the stack and copy the data
3900 to the address of that space. */
3901
3902 /* Deduct words put into registers from the size we must copy. */
3903 if (partial != 0)
3904 {
3905 if (GET_CODE (size) == CONST_INT)
3906 size = GEN_INT (INTVAL (size) - used);
3907 else
3908 size = expand_binop (GET_MODE (size), sub_optab, size,
3909 GEN_INT (used), NULL_RTX, 0,
3910 OPTAB_LIB_WIDEN);
3911 }
3912
3913 /* Get the address of the stack space.
3914 In this case, we do not deal with EXTRA separately.
3915 A single stack adjust will do. */
3916 if (! args_addr)
3917 {
3918 temp = push_block (size, extra, where_pad == downward);
3919 extra = 0;
3920 }
3921 else if (GET_CODE (args_so_far) == CONST_INT)
3922 temp = memory_address (BLKmode,
3923 plus_constant (args_addr,
3924 skip + INTVAL (args_so_far)));
3925 else
3926 temp = memory_address (BLKmode,
3927 plus_constant (gen_rtx_PLUS (Pmode,
3928 args_addr,
3929 args_so_far),
3930 skip));
3931
3932 if (!ACCUMULATE_OUTGOING_ARGS)
3933 {
3934 /* If the source is referenced relative to the stack pointer,
3935 copy it to another register to stabilize it. We do not need
3936 to do this if we know that we won't be changing sp. */
3937
3938 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3939 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3940 temp = copy_to_reg (temp);
3941 }
3942
3943 target = gen_rtx_MEM (BLKmode, temp);
3944
3945 if (type != 0)
3946 {
3947 set_mem_attributes (target, type, 1);
3948 /* Function incoming arguments may overlap with sibling call
3949 outgoing arguments and we cannot allow reordering of reads
3950 from function arguments with stores to outgoing arguments
3951 of sibling calls. */
3952 set_mem_alias_set (target, 0);
3953 }
3954
3955 /* ALIGN may well be better aligned than TYPE, e.g. due to
3956 PARM_BOUNDARY. Assume the caller isn't lying. */
3957 set_mem_align (target, align);
3958
3959 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3960 }
3961 }
3962 else if (partial > 0)
3963 {
3964 /* Scalar partly in registers. */
3965
3966 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3967 int i;
3968 int not_stack;
3969 /* # words of start of argument
3970 that we must make space for but need not store. */
3971 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3972 int args_offset = INTVAL (args_so_far);
3973 int skip;
3974
3975 /* Push padding now if padding above and stack grows down,
3976 or if padding below and stack grows up.
3977 But if space already allocated, this has already been done. */
3978 if (extra && args_addr == 0
3979 && where_pad != none && where_pad != stack_direction)
3980 anti_adjust_stack (GEN_INT (extra));
3981
3982 /* If we make space by pushing it, we might as well push
3983 the real data. Otherwise, we can leave OFFSET nonzero
3984 and leave the space uninitialized. */
3985 if (args_addr == 0)
3986 offset = 0;
3987
3988 /* Now NOT_STACK gets the number of words that we don't need to
3989 allocate on the stack. */
3990 not_stack = partial - offset;
3991
3992 /* If the partial register-part of the arg counts in its stack size,
3993 skip the part of stack space corresponding to the registers.
3994 Otherwise, start copying to the beginning of the stack space,
3995 by setting SKIP to 0. */
3996 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3997
3998 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3999 x = validize_mem (force_const_mem (mode, x));
4000
4001 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4002 SUBREGs of such registers are not allowed. */
4003 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
4004 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4005 x = copy_to_reg (x);
4006
4007 /* Loop over all the words allocated on the stack for this arg. */
4008 /* We can do it by words, because any scalar bigger than a word
4009 has a size a multiple of a word. */
4010 #ifndef PUSH_ARGS_REVERSED
4011 for (i = not_stack; i < size; i++)
4012 #else
4013 for (i = size - 1; i >= not_stack; i--)
4014 #endif
4015 if (i >= not_stack + offset)
4016 emit_push_insn (operand_subword_force (x, i, mode),
4017 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4018 0, args_addr,
4019 GEN_INT (args_offset + ((i - not_stack + skip)
4020 * UNITS_PER_WORD)),
4021 reg_parm_stack_space, alignment_pad);
4022 }
4023 else
4024 {
4025 rtx addr;
4026 rtx dest;
4027
4028 /* Push padding now if padding above and stack grows down,
4029 or if padding below and stack grows up.
4030 But if space already allocated, this has already been done. */
4031 if (extra && args_addr == 0
4032 && where_pad != none && where_pad != stack_direction)
4033 anti_adjust_stack (GEN_INT (extra));
4034
4035 #ifdef PUSH_ROUNDING
4036 if (args_addr == 0 && PUSH_ARGS)
4037 emit_single_push_insn (mode, x, type);
4038 else
4039 #endif
4040 {
4041 if (GET_CODE (args_so_far) == CONST_INT)
4042 addr
4043 = memory_address (mode,
4044 plus_constant (args_addr,
4045 INTVAL (args_so_far)));
4046 else
4047 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4048 args_so_far));
4049 dest = gen_rtx_MEM (mode, addr);
4050 if (type != 0)
4051 {
4052 set_mem_attributes (dest, type, 1);
4053 /* Function incoming arguments may overlap with sibling call
4054 outgoing arguments and we cannot allow reordering of reads
4055 from function arguments with stores to outgoing arguments
4056 of sibling calls. */
4057 set_mem_alias_set (dest, 0);
4058 }
4059
4060 emit_move_insn (dest, x);
4061 }
4062 }
4063
4064 /* If part should go in registers, copy that part
4065 into the appropriate registers. Do this now, at the end,
4066 since mem-to-mem copies above may do function calls. */
4067 if (partial > 0 && reg != 0)
4068 {
4069 /* Handle calls that pass values in multiple non-contiguous locations.
4070 The Irix 6 ABI has examples of this. */
4071 if (GET_CODE (reg) == PARALLEL)
4072 emit_group_load (reg, x, -1); /* ??? size? */
4073 else
4074 move_block_to_reg (REGNO (reg), x, partial, mode);
4075 }
4076
4077 if (extra && args_addr == 0 && where_pad == stack_direction)
4078 anti_adjust_stack (GEN_INT (extra));
4079
4080 if (alignment_pad && args_addr == 0)
4081 anti_adjust_stack (alignment_pad);
4082 }
4083 \f
4084 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4085 operations. */
4086
4087 static rtx
4088 get_subtarget (x)
4089 rtx x;
4090 {
4091 return ((x == 0
4092 /* Only registers can be subtargets. */
4093 || GET_CODE (x) != REG
4094 /* If the register is readonly, it can't be set more than once. */
4095 || RTX_UNCHANGING_P (x)
4096 /* Don't use hard regs to avoid extending their life. */
4097 || REGNO (x) < FIRST_PSEUDO_REGISTER
4098 /* Avoid subtargets inside loops,
4099 since they hide some invariant expressions. */
4100 || preserve_subexpressions_p ())
4101 ? 0 : x);
4102 }
4103
4104 /* Expand an assignment that stores the value of FROM into TO.
4105 If WANT_VALUE is nonzero, return an rtx for the value of TO.
4106 (This may contain a QUEUED rtx;
4107 if the value is constant, this rtx is a constant.)
4108 Otherwise, the returned value is NULL_RTX.
4109
4110 SUGGEST_REG is no longer actually used.
4111 It used to mean, copy the value through a register
4112 and return that register, if that is possible.
4113 We now use WANT_VALUE to decide whether to do this. */
4114
4115 rtx
4116 expand_assignment (to, from, want_value, suggest_reg)
4117 tree to, from;
4118 int want_value;
4119 int suggest_reg ATTRIBUTE_UNUSED;
4120 {
4121 rtx to_rtx = 0;
4122 rtx result;
4123
4124 /* Don't crash if the lhs of the assignment was erroneous. */
4125
4126 if (TREE_CODE (to) == ERROR_MARK)
4127 {
4128 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4129 return want_value ? result : NULL_RTX;
4130 }
4131
4132 /* Assignment of a structure component needs special treatment
4133 if the structure component's rtx is not simply a MEM.
4134 Assignment of an array element at a constant index, and assignment of
4135 an array element in an unaligned packed structure field, has the same
4136 problem. */
4137
4138 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
4139 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
4140 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4141 {
4142 enum machine_mode mode1;
4143 HOST_WIDE_INT bitsize, bitpos;
4144 rtx orig_to_rtx;
4145 tree offset;
4146 int unsignedp;
4147 int volatilep = 0;
4148 tree tem;
4149
4150 push_temp_slots ();
4151 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4152 &unsignedp, &volatilep);
4153
4154 /* If we are going to use store_bit_field and extract_bit_field,
4155 make sure to_rtx will be safe for multiple use. */
4156
4157 if (mode1 == VOIDmode && want_value)
4158 tem = stabilize_reference (tem);
4159
4160 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4161
4162 if (offset != 0)
4163 {
4164 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4165
4166 if (GET_CODE (to_rtx) != MEM)
4167 abort ();
4168
4169 #ifdef POINTERS_EXTEND_UNSIGNED
4170 if (GET_MODE (offset_rtx) != Pmode)
4171 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4172 #else
4173 if (GET_MODE (offset_rtx) != ptr_mode)
4174 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4175 #endif
4176
4177 /* A constant address in TO_RTX can have VOIDmode, we must not try
4178 to call force_reg for that case. Avoid that case. */
4179 if (GET_CODE (to_rtx) == MEM
4180 && GET_MODE (to_rtx) == BLKmode
4181 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4182 && bitsize > 0
4183 && (bitpos % bitsize) == 0
4184 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4185 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4186 {
4187 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4188 bitpos = 0;
4189 }
4190
4191 to_rtx = offset_address (to_rtx, offset_rtx,
4192 highest_pow2_factor_for_type (TREE_TYPE (to),
4193 offset));
4194 }
4195
4196 if (GET_CODE (to_rtx) == MEM)
4197 {
4198 /* If the field is at offset zero, we could have been given the
4199 DECL_RTX of the parent struct. Don't munge it. */
4200 to_rtx = shallow_copy_rtx (to_rtx);
4201
4202 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4203 }
4204
4205 /* Deal with volatile and readonly fields. The former is only done
4206 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4207 if (volatilep && GET_CODE (to_rtx) == MEM)
4208 {
4209 if (to_rtx == orig_to_rtx)
4210 to_rtx = copy_rtx (to_rtx);
4211 MEM_VOLATILE_P (to_rtx) = 1;
4212 }
4213
4214 if (TREE_CODE (to) == COMPONENT_REF
4215 && TREE_READONLY (TREE_OPERAND (to, 1)))
4216 {
4217 if (to_rtx == orig_to_rtx)
4218 to_rtx = copy_rtx (to_rtx);
4219 RTX_UNCHANGING_P (to_rtx) = 1;
4220 }
4221
4222 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4223 {
4224 if (to_rtx == orig_to_rtx)
4225 to_rtx = copy_rtx (to_rtx);
4226 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4227 }
4228
4229 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4230 (want_value
4231 /* Spurious cast for HPUX compiler. */
4232 ? ((enum machine_mode)
4233 TYPE_MODE (TREE_TYPE (to)))
4234 : VOIDmode),
4235 unsignedp, TREE_TYPE (tem), get_alias_set (to));
4236
4237 preserve_temp_slots (result);
4238 free_temp_slots ();
4239 pop_temp_slots ();
4240
4241 /* If the value is meaningful, convert RESULT to the proper mode.
4242 Otherwise, return nothing. */
4243 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4244 TYPE_MODE (TREE_TYPE (from)),
4245 result,
4246 TREE_UNSIGNED (TREE_TYPE (to)))
4247 : NULL_RTX);
4248 }
4249
4250 /* If the rhs is a function call and its value is not an aggregate,
4251 call the function before we start to compute the lhs.
4252 This is needed for correct code for cases such as
4253 val = setjmp (buf) on machines where reference to val
4254 requires loading up part of an address in a separate insn.
4255
4256 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4257 since it might be a promoted variable where the zero- or sign- extension
4258 needs to be done. Handling this in the normal way is safe because no
4259 computation is done before the call. */
4260 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
4261 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4262 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4263 && GET_CODE (DECL_RTL (to)) == REG))
4264 {
4265 rtx value;
4266
4267 push_temp_slots ();
4268 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4269 if (to_rtx == 0)
4270 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4271
4272 /* Handle calls that return values in multiple non-contiguous locations.
4273 The Irix 6 ABI has examples of this. */
4274 if (GET_CODE (to_rtx) == PARALLEL)
4275 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
4276 else if (GET_MODE (to_rtx) == BLKmode)
4277 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4278 else
4279 {
4280 #ifdef POINTERS_EXTEND_UNSIGNED
4281 if (POINTER_TYPE_P (TREE_TYPE (to))
4282 && GET_MODE (to_rtx) != GET_MODE (value))
4283 value = convert_memory_address (GET_MODE (to_rtx), value);
4284 #endif
4285 emit_move_insn (to_rtx, value);
4286 }
4287 preserve_temp_slots (to_rtx);
4288 free_temp_slots ();
4289 pop_temp_slots ();
4290 return want_value ? to_rtx : NULL_RTX;
4291 }
4292
4293 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4294 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4295
4296 if (to_rtx == 0)
4297 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4298
4299 /* Don't move directly into a return register. */
4300 if (TREE_CODE (to) == RESULT_DECL
4301 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4302 {
4303 rtx temp;
4304
4305 push_temp_slots ();
4306 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4307
4308 if (GET_CODE (to_rtx) == PARALLEL)
4309 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
4310 else
4311 emit_move_insn (to_rtx, temp);
4312
4313 preserve_temp_slots (to_rtx);
4314 free_temp_slots ();
4315 pop_temp_slots ();
4316 return want_value ? to_rtx : NULL_RTX;
4317 }
4318
4319 /* In case we are returning the contents of an object which overlaps
4320 the place the value is being stored, use a safe function when copying
4321 a value through a pointer into a structure value return block. */
4322 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4323 && current_function_returns_struct
4324 && !current_function_returns_pcc_struct)
4325 {
4326 rtx from_rtx, size;
4327
4328 push_temp_slots ();
4329 size = expr_size (from);
4330 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4331
4332 if (TARGET_MEM_FUNCTIONS)
4333 emit_library_call (memmove_libfunc, LCT_NORMAL,
4334 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4335 XEXP (from_rtx, 0), Pmode,
4336 convert_to_mode (TYPE_MODE (sizetype),
4337 size, TREE_UNSIGNED (sizetype)),
4338 TYPE_MODE (sizetype));
4339 else
4340 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4341 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4342 XEXP (to_rtx, 0), Pmode,
4343 convert_to_mode (TYPE_MODE (integer_type_node),
4344 size,
4345 TREE_UNSIGNED (integer_type_node)),
4346 TYPE_MODE (integer_type_node));
4347
4348 preserve_temp_slots (to_rtx);
4349 free_temp_slots ();
4350 pop_temp_slots ();
4351 return want_value ? to_rtx : NULL_RTX;
4352 }
4353
4354 /* Compute FROM and store the value in the rtx we got. */
4355
4356 push_temp_slots ();
4357 result = store_expr (from, to_rtx, want_value);
4358 preserve_temp_slots (result);
4359 free_temp_slots ();
4360 pop_temp_slots ();
4361 return want_value ? result : NULL_RTX;
4362 }
4363
4364 /* Generate code for computing expression EXP,
4365 and storing the value into TARGET.
4366 TARGET may contain a QUEUED rtx.
4367
4368 If WANT_VALUE & 1 is nonzero, return a copy of the value
4369 not in TARGET, so that we can be sure to use the proper
4370 value in a containing expression even if TARGET has something
4371 else stored in it. If possible, we copy the value through a pseudo
4372 and return that pseudo. Or, if the value is constant, we try to
4373 return the constant. In some cases, we return a pseudo
4374 copied *from* TARGET.
4375
4376 If the mode is BLKmode then we may return TARGET itself.
4377 It turns out that in BLKmode it doesn't cause a problem.
4378 because C has no operators that could combine two different
4379 assignments into the same BLKmode object with different values
4380 with no sequence point. Will other languages need this to
4381 be more thorough?
4382
4383 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4384 to catch quickly any cases where the caller uses the value
4385 and fails to set WANT_VALUE.
4386
4387 If WANT_VALUE & 2 is set, this is a store into a call param on the
4388 stack, and block moves may need to be treated specially. */
4389
4390 rtx
4391 store_expr (exp, target, want_value)
4392 tree exp;
4393 rtx target;
4394 int want_value;
4395 {
4396 rtx temp;
4397 int dont_return_target = 0;
4398 int dont_store_target = 0;
4399
4400 if (VOID_TYPE_P (TREE_TYPE (exp)))
4401 {
4402 /* C++ can generate ?: expressions with a throw expression in one
4403 branch and an rvalue in the other. Here, we resolve attempts to
4404 store the throw expression's nonexistant result. */
4405 if (want_value)
4406 abort ();
4407 expand_expr (exp, const0_rtx, VOIDmode, 0);
4408 return NULL_RTX;
4409 }
4410 if (TREE_CODE (exp) == COMPOUND_EXPR)
4411 {
4412 /* Perform first part of compound expression, then assign from second
4413 part. */
4414 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4415 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4416 emit_queue ();
4417 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4418 }
4419 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4420 {
4421 /* For conditional expression, get safe form of the target. Then
4422 test the condition, doing the appropriate assignment on either
4423 side. This avoids the creation of unnecessary temporaries.
4424 For non-BLKmode, it is more efficient not to do this. */
4425
4426 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4427
4428 emit_queue ();
4429 target = protect_from_queue (target, 1);
4430
4431 do_pending_stack_adjust ();
4432 NO_DEFER_POP;
4433 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4434 start_cleanup_deferral ();
4435 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4436 end_cleanup_deferral ();
4437 emit_queue ();
4438 emit_jump_insn (gen_jump (lab2));
4439 emit_barrier ();
4440 emit_label (lab1);
4441 start_cleanup_deferral ();
4442 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4443 end_cleanup_deferral ();
4444 emit_queue ();
4445 emit_label (lab2);
4446 OK_DEFER_POP;
4447
4448 return want_value & 1 ? target : NULL_RTX;
4449 }
4450 else if (queued_subexp_p (target))
4451 /* If target contains a postincrement, let's not risk
4452 using it as the place to generate the rhs. */
4453 {
4454 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4455 {
4456 /* Expand EXP into a new pseudo. */
4457 temp = gen_reg_rtx (GET_MODE (target));
4458 temp = expand_expr (exp, temp, GET_MODE (target),
4459 (want_value & 2
4460 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4461 }
4462 else
4463 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4464 (want_value & 2
4465 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4466
4467 /* If target is volatile, ANSI requires accessing the value
4468 *from* the target, if it is accessed. So make that happen.
4469 In no case return the target itself. */
4470 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4471 dont_return_target = 1;
4472 }
4473 else if ((want_value & 1) != 0
4474 && GET_CODE (target) == MEM
4475 && ! MEM_VOLATILE_P (target)
4476 && GET_MODE (target) != BLKmode)
4477 /* If target is in memory and caller wants value in a register instead,
4478 arrange that. Pass TARGET as target for expand_expr so that,
4479 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4480 We know expand_expr will not use the target in that case.
4481 Don't do this if TARGET is volatile because we are supposed
4482 to write it and then read it. */
4483 {
4484 temp = expand_expr (exp, target, GET_MODE (target),
4485 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4486 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4487 {
4488 /* If TEMP is already in the desired TARGET, only copy it from
4489 memory and don't store it there again. */
4490 if (temp == target
4491 || (rtx_equal_p (temp, target)
4492 && ! side_effects_p (temp) && ! side_effects_p (target)))
4493 dont_store_target = 1;
4494 temp = copy_to_reg (temp);
4495 }
4496 dont_return_target = 1;
4497 }
4498 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4499 /* If this is a scalar in a register that is stored in a wider mode
4500 than the declared mode, compute the result into its declared mode
4501 and then convert to the wider mode. Our value is the computed
4502 expression. */
4503 {
4504 rtx inner_target = 0;
4505
4506 /* If we don't want a value, we can do the conversion inside EXP,
4507 which will often result in some optimizations. Do the conversion
4508 in two steps: first change the signedness, if needed, then
4509 the extend. But don't do this if the type of EXP is a subtype
4510 of something else since then the conversion might involve
4511 more than just converting modes. */
4512 if ((want_value & 1) == 0
4513 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4514 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4515 {
4516 if (TREE_UNSIGNED (TREE_TYPE (exp))
4517 != SUBREG_PROMOTED_UNSIGNED_P (target))
4518 exp = convert
4519 ((*lang_hooks.types.signed_or_unsigned_type)
4520 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4521
4522 exp = convert ((*lang_hooks.types.type_for_mode)
4523 (GET_MODE (SUBREG_REG (target)),
4524 SUBREG_PROMOTED_UNSIGNED_P (target)),
4525 exp);
4526
4527 inner_target = SUBREG_REG (target);
4528 }
4529
4530 temp = expand_expr (exp, inner_target, VOIDmode,
4531 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4532
4533 /* If TEMP is a MEM and we want a result value, make the access
4534 now so it gets done only once. Strictly speaking, this is
4535 only necessary if the MEM is volatile, or if the address
4536 overlaps TARGET. But not performing the load twice also
4537 reduces the amount of rtl we generate and then have to CSE. */
4538 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4539 temp = copy_to_reg (temp);
4540
4541 /* If TEMP is a VOIDmode constant, use convert_modes to make
4542 sure that we properly convert it. */
4543 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4544 {
4545 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4546 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4547 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4548 GET_MODE (target), temp,
4549 SUBREG_PROMOTED_UNSIGNED_P (target));
4550 }
4551
4552 convert_move (SUBREG_REG (target), temp,
4553 SUBREG_PROMOTED_UNSIGNED_P (target));
4554
4555 /* If we promoted a constant, change the mode back down to match
4556 target. Otherwise, the caller might get confused by a result whose
4557 mode is larger than expected. */
4558
4559 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4560 {
4561 if (GET_MODE (temp) != VOIDmode)
4562 {
4563 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4564 SUBREG_PROMOTED_VAR_P (temp) = 1;
4565 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4566 SUBREG_PROMOTED_UNSIGNED_P (target));
4567 }
4568 else
4569 temp = convert_modes (GET_MODE (target),
4570 GET_MODE (SUBREG_REG (target)),
4571 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4572 }
4573
4574 return want_value & 1 ? temp : NULL_RTX;
4575 }
4576 else
4577 {
4578 temp = expand_expr (exp, target, GET_MODE (target),
4579 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4580 /* Return TARGET if it's a specified hardware register.
4581 If TARGET is a volatile mem ref, either return TARGET
4582 or return a reg copied *from* TARGET; ANSI requires this.
4583
4584 Otherwise, if TEMP is not TARGET, return TEMP
4585 if it is constant (for efficiency),
4586 or if we really want the correct value. */
4587 if (!(target && GET_CODE (target) == REG
4588 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4589 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4590 && ! rtx_equal_p (temp, target)
4591 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4592 dont_return_target = 1;
4593 }
4594
4595 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4596 the same as that of TARGET, adjust the constant. This is needed, for
4597 example, in case it is a CONST_DOUBLE and we want only a word-sized
4598 value. */
4599 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4600 && TREE_CODE (exp) != ERROR_MARK
4601 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4602 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4603 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4604
4605 /* If value was not generated in the target, store it there.
4606 Convert the value to TARGET's type first if necessary.
4607 If TEMP and TARGET compare equal according to rtx_equal_p, but
4608 one or both of them are volatile memory refs, we have to distinguish
4609 two cases:
4610 - expand_expr has used TARGET. In this case, we must not generate
4611 another copy. This can be detected by TARGET being equal according
4612 to == .
4613 - expand_expr has not used TARGET - that means that the source just
4614 happens to have the same RTX form. Since temp will have been created
4615 by expand_expr, it will compare unequal according to == .
4616 We must generate a copy in this case, to reach the correct number
4617 of volatile memory references. */
4618
4619 if ((! rtx_equal_p (temp, target)
4620 || (temp != target && (side_effects_p (temp)
4621 || side_effects_p (target))))
4622 && TREE_CODE (exp) != ERROR_MARK
4623 && ! dont_store_target
4624 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4625 but TARGET is not valid memory reference, TEMP will differ
4626 from TARGET although it is really the same location. */
4627 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4628 || target != DECL_RTL_IF_SET (exp))
4629 /* If there's nothing to copy, don't bother. Don't call expr_size
4630 unless necessary, because some front-ends (C++) expr_size-hook
4631 aborts on objects that are not supposed to be bit-copied or
4632 bit-initialized. */
4633 && expr_size (exp) != const0_rtx)
4634 {
4635 target = protect_from_queue (target, 1);
4636 if (GET_MODE (temp) != GET_MODE (target)
4637 && GET_MODE (temp) != VOIDmode)
4638 {
4639 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4640 if (dont_return_target)
4641 {
4642 /* In this case, we will return TEMP,
4643 so make sure it has the proper mode.
4644 But don't forget to store the value into TARGET. */
4645 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4646 emit_move_insn (target, temp);
4647 }
4648 else
4649 convert_move (target, temp, unsignedp);
4650 }
4651
4652 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4653 {
4654 /* Handle copying a string constant into an array. The string
4655 constant may be shorter than the array. So copy just the string's
4656 actual length, and clear the rest. First get the size of the data
4657 type of the string, which is actually the size of the target. */
4658 rtx size = expr_size (exp);
4659
4660 if (GET_CODE (size) == CONST_INT
4661 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4662 emit_block_move (target, temp, size,
4663 (want_value & 2
4664 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4665 else
4666 {
4667 /* Compute the size of the data to copy from the string. */
4668 tree copy_size
4669 = size_binop (MIN_EXPR,
4670 make_tree (sizetype, size),
4671 size_int (TREE_STRING_LENGTH (exp)));
4672 rtx copy_size_rtx
4673 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4674 (want_value & 2
4675 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4676 rtx label = 0;
4677
4678 /* Copy that much. */
4679 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4680 TREE_UNSIGNED (sizetype));
4681 emit_block_move (target, temp, copy_size_rtx,
4682 (want_value & 2
4683 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4684
4685 /* Figure out how much is left in TARGET that we have to clear.
4686 Do all calculations in ptr_mode. */
4687 if (GET_CODE (copy_size_rtx) == CONST_INT)
4688 {
4689 size = plus_constant (size, -INTVAL (copy_size_rtx));
4690 target = adjust_address (target, BLKmode,
4691 INTVAL (copy_size_rtx));
4692 }
4693 else
4694 {
4695 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4696 copy_size_rtx, NULL_RTX, 0,
4697 OPTAB_LIB_WIDEN);
4698
4699 #ifdef POINTERS_EXTEND_UNSIGNED
4700 if (GET_MODE (copy_size_rtx) != Pmode)
4701 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4702 TREE_UNSIGNED (sizetype));
4703 #endif
4704
4705 target = offset_address (target, copy_size_rtx,
4706 highest_pow2_factor (copy_size));
4707 label = gen_label_rtx ();
4708 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4709 GET_MODE (size), 0, label);
4710 }
4711
4712 if (size != const0_rtx)
4713 clear_storage (target, size);
4714
4715 if (label)
4716 emit_label (label);
4717 }
4718 }
4719 /* Handle calls that return values in multiple non-contiguous locations.
4720 The Irix 6 ABI has examples of this. */
4721 else if (GET_CODE (target) == PARALLEL)
4722 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4723 else if (GET_MODE (temp) == BLKmode)
4724 emit_block_move (target, temp, expr_size (exp),
4725 (want_value & 2
4726 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4727 else
4728 emit_move_insn (target, temp);
4729 }
4730
4731 /* If we don't want a value, return NULL_RTX. */
4732 if ((want_value & 1) == 0)
4733 return NULL_RTX;
4734
4735 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4736 ??? The latter test doesn't seem to make sense. */
4737 else if (dont_return_target && GET_CODE (temp) != MEM)
4738 return temp;
4739
4740 /* Return TARGET itself if it is a hard register. */
4741 else if ((want_value & 1) != 0
4742 && GET_MODE (target) != BLKmode
4743 && ! (GET_CODE (target) == REG
4744 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4745 return copy_to_reg (target);
4746
4747 else
4748 return target;
4749 }
4750 \f
4751 /* Return 1 if EXP just contains zeros. */
4752
4753 static int
4754 is_zeros_p (exp)
4755 tree exp;
4756 {
4757 tree elt;
4758
4759 switch (TREE_CODE (exp))
4760 {
4761 case CONVERT_EXPR:
4762 case NOP_EXPR:
4763 case NON_LVALUE_EXPR:
4764 case VIEW_CONVERT_EXPR:
4765 return is_zeros_p (TREE_OPERAND (exp, 0));
4766
4767 case INTEGER_CST:
4768 return integer_zerop (exp);
4769
4770 case COMPLEX_CST:
4771 return
4772 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4773
4774 case REAL_CST:
4775 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4776
4777 case VECTOR_CST:
4778 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4779 elt = TREE_CHAIN (elt))
4780 if (!is_zeros_p (TREE_VALUE (elt)))
4781 return 0;
4782
4783 return 1;
4784
4785 case CONSTRUCTOR:
4786 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4787 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4788 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4789 if (! is_zeros_p (TREE_VALUE (elt)))
4790 return 0;
4791
4792 return 1;
4793
4794 default:
4795 return 0;
4796 }
4797 }
4798
4799 /* Return 1 if EXP contains mostly (3/4) zeros. */
4800
4801 static int
4802 mostly_zeros_p (exp)
4803 tree exp;
4804 {
4805 if (TREE_CODE (exp) == CONSTRUCTOR)
4806 {
4807 int elts = 0, zeros = 0;
4808 tree elt = CONSTRUCTOR_ELTS (exp);
4809 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4810 {
4811 /* If there are no ranges of true bits, it is all zero. */
4812 return elt == NULL_TREE;
4813 }
4814 for (; elt; elt = TREE_CHAIN (elt))
4815 {
4816 /* We do not handle the case where the index is a RANGE_EXPR,
4817 so the statistic will be somewhat inaccurate.
4818 We do make a more accurate count in store_constructor itself,
4819 so since this function is only used for nested array elements,
4820 this should be close enough. */
4821 if (mostly_zeros_p (TREE_VALUE (elt)))
4822 zeros++;
4823 elts++;
4824 }
4825
4826 return 4 * zeros >= 3 * elts;
4827 }
4828
4829 return is_zeros_p (exp);
4830 }
4831 \f
4832 /* Helper function for store_constructor.
4833 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4834 TYPE is the type of the CONSTRUCTOR, not the element type.
4835 CLEARED is as for store_constructor.
4836 ALIAS_SET is the alias set to use for any stores.
4837
4838 This provides a recursive shortcut back to store_constructor when it isn't
4839 necessary to go through store_field. This is so that we can pass through
4840 the cleared field to let store_constructor know that we may not have to
4841 clear a substructure if the outer structure has already been cleared. */
4842
4843 static void
4844 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4845 alias_set)
4846 rtx target;
4847 unsigned HOST_WIDE_INT bitsize;
4848 HOST_WIDE_INT bitpos;
4849 enum machine_mode mode;
4850 tree exp, type;
4851 int cleared;
4852 int alias_set;
4853 {
4854 if (TREE_CODE (exp) == CONSTRUCTOR
4855 && bitpos % BITS_PER_UNIT == 0
4856 /* If we have a nonzero bitpos for a register target, then we just
4857 let store_field do the bitfield handling. This is unlikely to
4858 generate unnecessary clear instructions anyways. */
4859 && (bitpos == 0 || GET_CODE (target) == MEM))
4860 {
4861 if (GET_CODE (target) == MEM)
4862 target
4863 = adjust_address (target,
4864 GET_MODE (target) == BLKmode
4865 || 0 != (bitpos
4866 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4867 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4868
4869
4870 /* Update the alias set, if required. */
4871 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4872 && MEM_ALIAS_SET (target) != 0)
4873 {
4874 target = copy_rtx (target);
4875 set_mem_alias_set (target, alias_set);
4876 }
4877
4878 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4879 }
4880 else
4881 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4882 alias_set);
4883 }
4884
4885 /* Store the value of constructor EXP into the rtx TARGET.
4886 TARGET is either a REG or a MEM; we know it cannot conflict, since
4887 safe_from_p has been called.
4888 CLEARED is true if TARGET is known to have been zero'd.
4889 SIZE is the number of bytes of TARGET we are allowed to modify: this
4890 may not be the same as the size of EXP if we are assigning to a field
4891 which has been packed to exclude padding bits. */
4892
4893 static void
4894 store_constructor (exp, target, cleared, size)
4895 tree exp;
4896 rtx target;
4897 int cleared;
4898 HOST_WIDE_INT size;
4899 {
4900 tree type = TREE_TYPE (exp);
4901 #ifdef WORD_REGISTER_OPERATIONS
4902 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4903 #endif
4904
4905 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4906 || TREE_CODE (type) == QUAL_UNION_TYPE)
4907 {
4908 tree elt;
4909
4910 /* We either clear the aggregate or indicate the value is dead. */
4911 if ((TREE_CODE (type) == UNION_TYPE
4912 || TREE_CODE (type) == QUAL_UNION_TYPE)
4913 && ! cleared
4914 && ! CONSTRUCTOR_ELTS (exp))
4915 /* If the constructor is empty, clear the union. */
4916 {
4917 clear_storage (target, expr_size (exp));
4918 cleared = 1;
4919 }
4920
4921 /* If we are building a static constructor into a register,
4922 set the initial value as zero so we can fold the value into
4923 a constant. But if more than one register is involved,
4924 this probably loses. */
4925 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4926 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4927 {
4928 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4929 cleared = 1;
4930 }
4931
4932 /* If the constructor has fewer fields than the structure
4933 or if we are initializing the structure to mostly zeros,
4934 clear the whole structure first. Don't do this if TARGET is a
4935 register whose mode size isn't equal to SIZE since clear_storage
4936 can't handle this case. */
4937 else if (! cleared && size > 0
4938 && ((list_length (CONSTRUCTOR_ELTS (exp))
4939 != fields_length (type))
4940 || mostly_zeros_p (exp))
4941 && (GET_CODE (target) != REG
4942 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4943 == size)))
4944 {
4945 rtx xtarget = target;
4946
4947 if (readonly_fields_p (type))
4948 {
4949 xtarget = copy_rtx (xtarget);
4950 RTX_UNCHANGING_P (xtarget) = 1;
4951 }
4952
4953 clear_storage (xtarget, GEN_INT (size));
4954 cleared = 1;
4955 }
4956
4957 if (! cleared)
4958 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4959
4960 /* Store each element of the constructor into
4961 the corresponding field of TARGET. */
4962
4963 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4964 {
4965 tree field = TREE_PURPOSE (elt);
4966 tree value = TREE_VALUE (elt);
4967 enum machine_mode mode;
4968 HOST_WIDE_INT bitsize;
4969 HOST_WIDE_INT bitpos = 0;
4970 tree offset;
4971 rtx to_rtx = target;
4972
4973 /* Just ignore missing fields.
4974 We cleared the whole structure, above,
4975 if any fields are missing. */
4976 if (field == 0)
4977 continue;
4978
4979 if (cleared && is_zeros_p (value))
4980 continue;
4981
4982 if (host_integerp (DECL_SIZE (field), 1))
4983 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4984 else
4985 bitsize = -1;
4986
4987 mode = DECL_MODE (field);
4988 if (DECL_BIT_FIELD (field))
4989 mode = VOIDmode;
4990
4991 offset = DECL_FIELD_OFFSET (field);
4992 if (host_integerp (offset, 0)
4993 && host_integerp (bit_position (field), 0))
4994 {
4995 bitpos = int_bit_position (field);
4996 offset = 0;
4997 }
4998 else
4999 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5000
5001 if (offset)
5002 {
5003 rtx offset_rtx;
5004
5005 if (contains_placeholder_p (offset))
5006 offset = build (WITH_RECORD_EXPR, sizetype,
5007 offset, make_tree (TREE_TYPE (exp), target));
5008
5009 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5010 if (GET_CODE (to_rtx) != MEM)
5011 abort ();
5012
5013 #ifdef POINTERS_EXTEND_UNSIGNED
5014 if (GET_MODE (offset_rtx) != Pmode)
5015 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5016 #else
5017 if (GET_MODE (offset_rtx) != ptr_mode)
5018 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5019 #endif
5020
5021 to_rtx = offset_address (to_rtx, offset_rtx,
5022 highest_pow2_factor (offset));
5023 }
5024
5025 if (TREE_READONLY (field))
5026 {
5027 if (GET_CODE (to_rtx) == MEM)
5028 to_rtx = copy_rtx (to_rtx);
5029
5030 RTX_UNCHANGING_P (to_rtx) = 1;
5031 }
5032
5033 #ifdef WORD_REGISTER_OPERATIONS
5034 /* If this initializes a field that is smaller than a word, at the
5035 start of a word, try to widen it to a full word.
5036 This special case allows us to output C++ member function
5037 initializations in a form that the optimizers can understand. */
5038 if (GET_CODE (target) == REG
5039 && bitsize < BITS_PER_WORD
5040 && bitpos % BITS_PER_WORD == 0
5041 && GET_MODE_CLASS (mode) == MODE_INT
5042 && TREE_CODE (value) == INTEGER_CST
5043 && exp_size >= 0
5044 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5045 {
5046 tree type = TREE_TYPE (value);
5047
5048 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5049 {
5050 type = (*lang_hooks.types.type_for_size)
5051 (BITS_PER_WORD, TREE_UNSIGNED (type));
5052 value = convert (type, value);
5053 }
5054
5055 if (BYTES_BIG_ENDIAN)
5056 value
5057 = fold (build (LSHIFT_EXPR, type, value,
5058 build_int_2 (BITS_PER_WORD - bitsize, 0)));
5059 bitsize = BITS_PER_WORD;
5060 mode = word_mode;
5061 }
5062 #endif
5063
5064 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5065 && DECL_NONADDRESSABLE_P (field))
5066 {
5067 to_rtx = copy_rtx (to_rtx);
5068 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5069 }
5070
5071 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5072 value, type, cleared,
5073 get_alias_set (TREE_TYPE (field)));
5074 }
5075 }
5076 else if (TREE_CODE (type) == ARRAY_TYPE
5077 || TREE_CODE (type) == VECTOR_TYPE)
5078 {
5079 tree elt;
5080 int i;
5081 int need_to_clear;
5082 tree domain = TYPE_DOMAIN (type);
5083 tree elttype = TREE_TYPE (type);
5084 int const_bounds_p;
5085 HOST_WIDE_INT minelt = 0;
5086 HOST_WIDE_INT maxelt = 0;
5087
5088 /* Vectors are like arrays, but the domain is stored via an array
5089 type indirectly. */
5090 if (TREE_CODE (type) == VECTOR_TYPE)
5091 {
5092 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
5093 the same field as TYPE_DOMAIN, we are not guaranteed that
5094 it always will. */
5095 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
5096 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
5097 }
5098
5099 const_bounds_p = (TYPE_MIN_VALUE (domain)
5100 && TYPE_MAX_VALUE (domain)
5101 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5102 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5103
5104 /* If we have constant bounds for the range of the type, get them. */
5105 if (const_bounds_p)
5106 {
5107 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5108 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5109 }
5110
5111 /* If the constructor has fewer elements than the array,
5112 clear the whole array first. Similarly if this is
5113 static constructor of a non-BLKmode object. */
5114 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
5115 need_to_clear = 1;
5116 else
5117 {
5118 HOST_WIDE_INT count = 0, zero_count = 0;
5119 need_to_clear = ! const_bounds_p;
5120
5121 /* This loop is a more accurate version of the loop in
5122 mostly_zeros_p (it handles RANGE_EXPR in an index).
5123 It is also needed to check for missing elements. */
5124 for (elt = CONSTRUCTOR_ELTS (exp);
5125 elt != NULL_TREE && ! need_to_clear;
5126 elt = TREE_CHAIN (elt))
5127 {
5128 tree index = TREE_PURPOSE (elt);
5129 HOST_WIDE_INT this_node_count;
5130
5131 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5132 {
5133 tree lo_index = TREE_OPERAND (index, 0);
5134 tree hi_index = TREE_OPERAND (index, 1);
5135
5136 if (! host_integerp (lo_index, 1)
5137 || ! host_integerp (hi_index, 1))
5138 {
5139 need_to_clear = 1;
5140 break;
5141 }
5142
5143 this_node_count = (tree_low_cst (hi_index, 1)
5144 - tree_low_cst (lo_index, 1) + 1);
5145 }
5146 else
5147 this_node_count = 1;
5148
5149 count += this_node_count;
5150 if (mostly_zeros_p (TREE_VALUE (elt)))
5151 zero_count += this_node_count;
5152 }
5153
5154 /* Clear the entire array first if there are any missing elements,
5155 or if the incidence of zero elements is >= 75%. */
5156 if (! need_to_clear
5157 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
5158 need_to_clear = 1;
5159 }
5160
5161 if (need_to_clear && size > 0)
5162 {
5163 if (! cleared)
5164 {
5165 if (REG_P (target))
5166 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5167 else
5168 clear_storage (target, GEN_INT (size));
5169 }
5170 cleared = 1;
5171 }
5172 else if (REG_P (target))
5173 /* Inform later passes that the old value is dead. */
5174 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5175
5176 /* Store each element of the constructor into
5177 the corresponding element of TARGET, determined
5178 by counting the elements. */
5179 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5180 elt;
5181 elt = TREE_CHAIN (elt), i++)
5182 {
5183 enum machine_mode mode;
5184 HOST_WIDE_INT bitsize;
5185 HOST_WIDE_INT bitpos;
5186 int unsignedp;
5187 tree value = TREE_VALUE (elt);
5188 tree index = TREE_PURPOSE (elt);
5189 rtx xtarget = target;
5190
5191 if (cleared && is_zeros_p (value))
5192 continue;
5193
5194 unsignedp = TREE_UNSIGNED (elttype);
5195 mode = TYPE_MODE (elttype);
5196 if (mode == BLKmode)
5197 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5198 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5199 : -1);
5200 else
5201 bitsize = GET_MODE_BITSIZE (mode);
5202
5203 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5204 {
5205 tree lo_index = TREE_OPERAND (index, 0);
5206 tree hi_index = TREE_OPERAND (index, 1);
5207 rtx index_r, pos_rtx, loop_end;
5208 struct nesting *loop;
5209 HOST_WIDE_INT lo, hi, count;
5210 tree position;
5211
5212 /* If the range is constant and "small", unroll the loop. */
5213 if (const_bounds_p
5214 && host_integerp (lo_index, 0)
5215 && host_integerp (hi_index, 0)
5216 && (lo = tree_low_cst (lo_index, 0),
5217 hi = tree_low_cst (hi_index, 0),
5218 count = hi - lo + 1,
5219 (GET_CODE (target) != MEM
5220 || count <= 2
5221 || (host_integerp (TYPE_SIZE (elttype), 1)
5222 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5223 <= 40 * 8)))))
5224 {
5225 lo -= minelt; hi -= minelt;
5226 for (; lo <= hi; lo++)
5227 {
5228 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5229
5230 if (GET_CODE (target) == MEM
5231 && !MEM_KEEP_ALIAS_SET_P (target)
5232 && TREE_CODE (type) == ARRAY_TYPE
5233 && TYPE_NONALIASED_COMPONENT (type))
5234 {
5235 target = copy_rtx (target);
5236 MEM_KEEP_ALIAS_SET_P (target) = 1;
5237 }
5238
5239 store_constructor_field
5240 (target, bitsize, bitpos, mode, value, type, cleared,
5241 get_alias_set (elttype));
5242 }
5243 }
5244 else
5245 {
5246 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5247 loop_end = gen_label_rtx ();
5248
5249 unsignedp = TREE_UNSIGNED (domain);
5250
5251 index = build_decl (VAR_DECL, NULL_TREE, domain);
5252
5253 index_r
5254 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5255 &unsignedp, 0));
5256 SET_DECL_RTL (index, index_r);
5257 if (TREE_CODE (value) == SAVE_EXPR
5258 && SAVE_EXPR_RTL (value) == 0)
5259 {
5260 /* Make sure value gets expanded once before the
5261 loop. */
5262 expand_expr (value, const0_rtx, VOIDmode, 0);
5263 emit_queue ();
5264 }
5265 store_expr (lo_index, index_r, 0);
5266 loop = expand_start_loop (0);
5267
5268 /* Assign value to element index. */
5269 position
5270 = convert (ssizetype,
5271 fold (build (MINUS_EXPR, TREE_TYPE (index),
5272 index, TYPE_MIN_VALUE (domain))));
5273 position = size_binop (MULT_EXPR, position,
5274 convert (ssizetype,
5275 TYPE_SIZE_UNIT (elttype)));
5276
5277 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5278 xtarget = offset_address (target, pos_rtx,
5279 highest_pow2_factor (position));
5280 xtarget = adjust_address (xtarget, mode, 0);
5281 if (TREE_CODE (value) == CONSTRUCTOR)
5282 store_constructor (value, xtarget, cleared,
5283 bitsize / BITS_PER_UNIT);
5284 else
5285 store_expr (value, xtarget, 0);
5286
5287 expand_exit_loop_if_false (loop,
5288 build (LT_EXPR, integer_type_node,
5289 index, hi_index));
5290
5291 expand_increment (build (PREINCREMENT_EXPR,
5292 TREE_TYPE (index),
5293 index, integer_one_node), 0, 0);
5294 expand_end_loop ();
5295 emit_label (loop_end);
5296 }
5297 }
5298 else if ((index != 0 && ! host_integerp (index, 0))
5299 || ! host_integerp (TYPE_SIZE (elttype), 1))
5300 {
5301 tree position;
5302
5303 if (index == 0)
5304 index = ssize_int (1);
5305
5306 if (minelt)
5307 index = convert (ssizetype,
5308 fold (build (MINUS_EXPR, index,
5309 TYPE_MIN_VALUE (domain))));
5310
5311 position = size_binop (MULT_EXPR, index,
5312 convert (ssizetype,
5313 TYPE_SIZE_UNIT (elttype)));
5314 xtarget = offset_address (target,
5315 expand_expr (position, 0, VOIDmode, 0),
5316 highest_pow2_factor (position));
5317 xtarget = adjust_address (xtarget, mode, 0);
5318 store_expr (value, xtarget, 0);
5319 }
5320 else
5321 {
5322 if (index != 0)
5323 bitpos = ((tree_low_cst (index, 0) - minelt)
5324 * tree_low_cst (TYPE_SIZE (elttype), 1));
5325 else
5326 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5327
5328 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5329 && TREE_CODE (type) == ARRAY_TYPE
5330 && TYPE_NONALIASED_COMPONENT (type))
5331 {
5332 target = copy_rtx (target);
5333 MEM_KEEP_ALIAS_SET_P (target) = 1;
5334 }
5335
5336 store_constructor_field (target, bitsize, bitpos, mode, value,
5337 type, cleared, get_alias_set (elttype));
5338
5339 }
5340 }
5341 }
5342
5343 /* Set constructor assignments. */
5344 else if (TREE_CODE (type) == SET_TYPE)
5345 {
5346 tree elt = CONSTRUCTOR_ELTS (exp);
5347 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5348 tree domain = TYPE_DOMAIN (type);
5349 tree domain_min, domain_max, bitlength;
5350
5351 /* The default implementation strategy is to extract the constant
5352 parts of the constructor, use that to initialize the target,
5353 and then "or" in whatever non-constant ranges we need in addition.
5354
5355 If a large set is all zero or all ones, it is
5356 probably better to set it using memset (if available) or bzero.
5357 Also, if a large set has just a single range, it may also be
5358 better to first clear all the first clear the set (using
5359 bzero/memset), and set the bits we want. */
5360
5361 /* Check for all zeros. */
5362 if (elt == NULL_TREE && size > 0)
5363 {
5364 if (!cleared)
5365 clear_storage (target, GEN_INT (size));
5366 return;
5367 }
5368
5369 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5370 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5371 bitlength = size_binop (PLUS_EXPR,
5372 size_diffop (domain_max, domain_min),
5373 ssize_int (1));
5374
5375 nbits = tree_low_cst (bitlength, 1);
5376
5377 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5378 are "complicated" (more than one range), initialize (the
5379 constant parts) by copying from a constant. */
5380 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5381 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5382 {
5383 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5384 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5385 char *bit_buffer = (char *) alloca (nbits);
5386 HOST_WIDE_INT word = 0;
5387 unsigned int bit_pos = 0;
5388 unsigned int ibit = 0;
5389 unsigned int offset = 0; /* In bytes from beginning of set. */
5390
5391 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5392 for (;;)
5393 {
5394 if (bit_buffer[ibit])
5395 {
5396 if (BYTES_BIG_ENDIAN)
5397 word |= (1 << (set_word_size - 1 - bit_pos));
5398 else
5399 word |= 1 << bit_pos;
5400 }
5401
5402 bit_pos++; ibit++;
5403 if (bit_pos >= set_word_size || ibit == nbits)
5404 {
5405 if (word != 0 || ! cleared)
5406 {
5407 rtx datum = GEN_INT (word);
5408 rtx to_rtx;
5409
5410 /* The assumption here is that it is safe to use
5411 XEXP if the set is multi-word, but not if
5412 it's single-word. */
5413 if (GET_CODE (target) == MEM)
5414 to_rtx = adjust_address (target, mode, offset);
5415 else if (offset == 0)
5416 to_rtx = target;
5417 else
5418 abort ();
5419 emit_move_insn (to_rtx, datum);
5420 }
5421
5422 if (ibit == nbits)
5423 break;
5424 word = 0;
5425 bit_pos = 0;
5426 offset += set_word_size / BITS_PER_UNIT;
5427 }
5428 }
5429 }
5430 else if (!cleared)
5431 /* Don't bother clearing storage if the set is all ones. */
5432 if (TREE_CHAIN (elt) != NULL_TREE
5433 || (TREE_PURPOSE (elt) == NULL_TREE
5434 ? nbits != 1
5435 : ( ! host_integerp (TREE_VALUE (elt), 0)
5436 || ! host_integerp (TREE_PURPOSE (elt), 0)
5437 || (tree_low_cst (TREE_VALUE (elt), 0)
5438 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5439 != (HOST_WIDE_INT) nbits))))
5440 clear_storage (target, expr_size (exp));
5441
5442 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5443 {
5444 /* Start of range of element or NULL. */
5445 tree startbit = TREE_PURPOSE (elt);
5446 /* End of range of element, or element value. */
5447 tree endbit = TREE_VALUE (elt);
5448 HOST_WIDE_INT startb, endb;
5449 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5450
5451 bitlength_rtx = expand_expr (bitlength,
5452 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5453
5454 /* Handle non-range tuple element like [ expr ]. */
5455 if (startbit == NULL_TREE)
5456 {
5457 startbit = save_expr (endbit);
5458 endbit = startbit;
5459 }
5460
5461 startbit = convert (sizetype, startbit);
5462 endbit = convert (sizetype, endbit);
5463 if (! integer_zerop (domain_min))
5464 {
5465 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5466 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5467 }
5468 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5469 EXPAND_CONST_ADDRESS);
5470 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5471 EXPAND_CONST_ADDRESS);
5472
5473 if (REG_P (target))
5474 {
5475 targetx
5476 = assign_temp
5477 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5478 (GET_MODE (target), 0),
5479 TYPE_QUAL_CONST)),
5480 0, 1, 1);
5481 emit_move_insn (targetx, target);
5482 }
5483
5484 else if (GET_CODE (target) == MEM)
5485 targetx = target;
5486 else
5487 abort ();
5488
5489 /* Optimization: If startbit and endbit are constants divisible
5490 by BITS_PER_UNIT, call memset instead. */
5491 if (TARGET_MEM_FUNCTIONS
5492 && TREE_CODE (startbit) == INTEGER_CST
5493 && TREE_CODE (endbit) == INTEGER_CST
5494 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5495 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5496 {
5497 emit_library_call (memset_libfunc, LCT_NORMAL,
5498 VOIDmode, 3,
5499 plus_constant (XEXP (targetx, 0),
5500 startb / BITS_PER_UNIT),
5501 Pmode,
5502 constm1_rtx, TYPE_MODE (integer_type_node),
5503 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5504 TYPE_MODE (sizetype));
5505 }
5506 else
5507 emit_library_call (setbits_libfunc, LCT_NORMAL,
5508 VOIDmode, 4, XEXP (targetx, 0),
5509 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5510 startbit_rtx, TYPE_MODE (sizetype),
5511 endbit_rtx, TYPE_MODE (sizetype));
5512
5513 if (REG_P (target))
5514 emit_move_insn (target, targetx);
5515 }
5516 }
5517
5518 else
5519 abort ();
5520 }
5521
5522 /* Store the value of EXP (an expression tree)
5523 into a subfield of TARGET which has mode MODE and occupies
5524 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5525 If MODE is VOIDmode, it means that we are storing into a bit-field.
5526
5527 If VALUE_MODE is VOIDmode, return nothing in particular.
5528 UNSIGNEDP is not used in this case.
5529
5530 Otherwise, return an rtx for the value stored. This rtx
5531 has mode VALUE_MODE if that is convenient to do.
5532 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5533
5534 TYPE is the type of the underlying object,
5535
5536 ALIAS_SET is the alias set for the destination. This value will
5537 (in general) be different from that for TARGET, since TARGET is a
5538 reference to the containing structure. */
5539
5540 static rtx
5541 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5542 alias_set)
5543 rtx target;
5544 HOST_WIDE_INT bitsize;
5545 HOST_WIDE_INT bitpos;
5546 enum machine_mode mode;
5547 tree exp;
5548 enum machine_mode value_mode;
5549 int unsignedp;
5550 tree type;
5551 int alias_set;
5552 {
5553 HOST_WIDE_INT width_mask = 0;
5554
5555 if (TREE_CODE (exp) == ERROR_MARK)
5556 return const0_rtx;
5557
5558 /* If we have nothing to store, do nothing unless the expression has
5559 side-effects. */
5560 if (bitsize == 0)
5561 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5562 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5563 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5564
5565 /* If we are storing into an unaligned field of an aligned union that is
5566 in a register, we may have the mode of TARGET being an integer mode but
5567 MODE == BLKmode. In that case, get an aligned object whose size and
5568 alignment are the same as TARGET and store TARGET into it (we can avoid
5569 the store if the field being stored is the entire width of TARGET). Then
5570 call ourselves recursively to store the field into a BLKmode version of
5571 that object. Finally, load from the object into TARGET. This is not
5572 very efficient in general, but should only be slightly more expensive
5573 than the otherwise-required unaligned accesses. Perhaps this can be
5574 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5575 twice, once with emit_move_insn and once via store_field. */
5576
5577 if (mode == BLKmode
5578 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5579 {
5580 rtx object = assign_temp (type, 0, 1, 1);
5581 rtx blk_object = adjust_address (object, BLKmode, 0);
5582
5583 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5584 emit_move_insn (object, target);
5585
5586 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5587 alias_set);
5588
5589 emit_move_insn (target, object);
5590
5591 /* We want to return the BLKmode version of the data. */
5592 return blk_object;
5593 }
5594
5595 if (GET_CODE (target) == CONCAT)
5596 {
5597 /* We're storing into a struct containing a single __complex. */
5598
5599 if (bitpos != 0)
5600 abort ();
5601 return store_expr (exp, target, 0);
5602 }
5603
5604 /* If the structure is in a register or if the component
5605 is a bit field, we cannot use addressing to access it.
5606 Use bit-field techniques or SUBREG to store in it. */
5607
5608 if (mode == VOIDmode
5609 || (mode != BLKmode && ! direct_store[(int) mode]
5610 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5611 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5612 || GET_CODE (target) == REG
5613 || GET_CODE (target) == SUBREG
5614 /* If the field isn't aligned enough to store as an ordinary memref,
5615 store it as a bit field. */
5616 || (mode != BLKmode
5617 && ((SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5618 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)))
5619 || bitpos % GET_MODE_ALIGNMENT (mode)))
5620 /* If the RHS and field are a constant size and the size of the
5621 RHS isn't the same size as the bitfield, we must use bitfield
5622 operations. */
5623 || (bitsize >= 0
5624 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5625 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5626 {
5627 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5628
5629 /* If BITSIZE is narrower than the size of the type of EXP
5630 we will be narrowing TEMP. Normally, what's wanted are the
5631 low-order bits. However, if EXP's type is a record and this is
5632 big-endian machine, we want the upper BITSIZE bits. */
5633 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5634 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5635 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5636 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5637 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5638 - bitsize),
5639 temp, 1);
5640
5641 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5642 MODE. */
5643 if (mode != VOIDmode && mode != BLKmode
5644 && mode != TYPE_MODE (TREE_TYPE (exp)))
5645 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5646
5647 /* If the modes of TARGET and TEMP are both BLKmode, both
5648 must be in memory and BITPOS must be aligned on a byte
5649 boundary. If so, we simply do a block copy. */
5650 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5651 {
5652 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5653 || bitpos % BITS_PER_UNIT != 0)
5654 abort ();
5655
5656 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5657 emit_block_move (target, temp,
5658 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5659 / BITS_PER_UNIT),
5660 BLOCK_OP_NORMAL);
5661
5662 return value_mode == VOIDmode ? const0_rtx : target;
5663 }
5664
5665 /* Store the value in the bitfield. */
5666 store_bit_field (target, bitsize, bitpos, mode, temp,
5667 int_size_in_bytes (type));
5668
5669 if (value_mode != VOIDmode)
5670 {
5671 /* The caller wants an rtx for the value.
5672 If possible, avoid refetching from the bitfield itself. */
5673 if (width_mask != 0
5674 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5675 {
5676 tree count;
5677 enum machine_mode tmode;
5678
5679 tmode = GET_MODE (temp);
5680 if (tmode == VOIDmode)
5681 tmode = value_mode;
5682
5683 if (unsignedp)
5684 return expand_and (tmode, temp,
5685 gen_int_mode (width_mask, tmode),
5686 NULL_RTX);
5687
5688 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5689 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5690 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5691 }
5692
5693 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5694 NULL_RTX, value_mode, VOIDmode,
5695 int_size_in_bytes (type));
5696 }
5697 return const0_rtx;
5698 }
5699 else
5700 {
5701 rtx addr = XEXP (target, 0);
5702 rtx to_rtx = target;
5703
5704 /* If a value is wanted, it must be the lhs;
5705 so make the address stable for multiple use. */
5706
5707 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5708 && ! CONSTANT_ADDRESS_P (addr)
5709 /* A frame-pointer reference is already stable. */
5710 && ! (GET_CODE (addr) == PLUS
5711 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5712 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5713 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5714 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5715
5716 /* Now build a reference to just the desired component. */
5717
5718 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5719
5720 if (to_rtx == target)
5721 to_rtx = copy_rtx (to_rtx);
5722
5723 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5724 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5725 set_mem_alias_set (to_rtx, alias_set);
5726
5727 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5728 }
5729 }
5730 \f
5731 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5732 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5733 codes and find the ultimate containing object, which we return.
5734
5735 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5736 bit position, and *PUNSIGNEDP to the signedness of the field.
5737 If the position of the field is variable, we store a tree
5738 giving the variable offset (in units) in *POFFSET.
5739 This offset is in addition to the bit position.
5740 If the position is not variable, we store 0 in *POFFSET.
5741
5742 If any of the extraction expressions is volatile,
5743 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5744
5745 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5746 is a mode that can be used to access the field. In that case, *PBITSIZE
5747 is redundant.
5748
5749 If the field describes a variable-sized object, *PMODE is set to
5750 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5751 this case, but the address of the object can be found. */
5752
5753 tree
5754 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5755 punsignedp, pvolatilep)
5756 tree exp;
5757 HOST_WIDE_INT *pbitsize;
5758 HOST_WIDE_INT *pbitpos;
5759 tree *poffset;
5760 enum machine_mode *pmode;
5761 int *punsignedp;
5762 int *pvolatilep;
5763 {
5764 tree size_tree = 0;
5765 enum machine_mode mode = VOIDmode;
5766 tree offset = size_zero_node;
5767 tree bit_offset = bitsize_zero_node;
5768 tree placeholder_ptr = 0;
5769 tree tem;
5770
5771 /* First get the mode, signedness, and size. We do this from just the
5772 outermost expression. */
5773 if (TREE_CODE (exp) == COMPONENT_REF)
5774 {
5775 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5776 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5777 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5778
5779 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5780 }
5781 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5782 {
5783 size_tree = TREE_OPERAND (exp, 1);
5784 *punsignedp = TREE_UNSIGNED (exp);
5785 }
5786 else
5787 {
5788 mode = TYPE_MODE (TREE_TYPE (exp));
5789 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5790
5791 if (mode == BLKmode)
5792 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5793 else
5794 *pbitsize = GET_MODE_BITSIZE (mode);
5795 }
5796
5797 if (size_tree != 0)
5798 {
5799 if (! host_integerp (size_tree, 1))
5800 mode = BLKmode, *pbitsize = -1;
5801 else
5802 *pbitsize = tree_low_cst (size_tree, 1);
5803 }
5804
5805 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5806 and find the ultimate containing object. */
5807 while (1)
5808 {
5809 if (TREE_CODE (exp) == BIT_FIELD_REF)
5810 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5811 else if (TREE_CODE (exp) == COMPONENT_REF)
5812 {
5813 tree field = TREE_OPERAND (exp, 1);
5814 tree this_offset = DECL_FIELD_OFFSET (field);
5815
5816 /* If this field hasn't been filled in yet, don't go
5817 past it. This should only happen when folding expressions
5818 made during type construction. */
5819 if (this_offset == 0)
5820 break;
5821 else if (! TREE_CONSTANT (this_offset)
5822 && contains_placeholder_p (this_offset))
5823 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5824
5825 offset = size_binop (PLUS_EXPR, offset, this_offset);
5826 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5827 DECL_FIELD_BIT_OFFSET (field));
5828
5829 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5830 }
5831
5832 else if (TREE_CODE (exp) == ARRAY_REF
5833 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5834 {
5835 tree index = TREE_OPERAND (exp, 1);
5836 tree array = TREE_OPERAND (exp, 0);
5837 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5838 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5839 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5840
5841 /* We assume all arrays have sizes that are a multiple of a byte.
5842 First subtract the lower bound, if any, in the type of the
5843 index, then convert to sizetype and multiply by the size of the
5844 array element. */
5845 if (low_bound != 0 && ! integer_zerop (low_bound))
5846 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5847 index, low_bound));
5848
5849 /* If the index has a self-referential type, pass it to a
5850 WITH_RECORD_EXPR; if the component size is, pass our
5851 component to one. */
5852 if (! TREE_CONSTANT (index)
5853 && contains_placeholder_p (index))
5854 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5855 if (! TREE_CONSTANT (unit_size)
5856 && contains_placeholder_p (unit_size))
5857 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5858
5859 offset = size_binop (PLUS_EXPR, offset,
5860 size_binop (MULT_EXPR,
5861 convert (sizetype, index),
5862 unit_size));
5863 }
5864
5865 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5866 {
5867 tree new = find_placeholder (exp, &placeholder_ptr);
5868
5869 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5870 We might have been called from tree optimization where we
5871 haven't set up an object yet. */
5872 if (new == 0)
5873 break;
5874 else
5875 exp = new;
5876
5877 continue;
5878 }
5879 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5880 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5881 && ! ((TREE_CODE (exp) == NOP_EXPR
5882 || TREE_CODE (exp) == CONVERT_EXPR)
5883 && (TYPE_MODE (TREE_TYPE (exp))
5884 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5885 break;
5886
5887 /* If any reference in the chain is volatile, the effect is volatile. */
5888 if (TREE_THIS_VOLATILE (exp))
5889 *pvolatilep = 1;
5890
5891 exp = TREE_OPERAND (exp, 0);
5892 }
5893
5894 /* If OFFSET is constant, see if we can return the whole thing as a
5895 constant bit position. Otherwise, split it up. */
5896 if (host_integerp (offset, 0)
5897 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5898 bitsize_unit_node))
5899 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5900 && host_integerp (tem, 0))
5901 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5902 else
5903 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5904
5905 *pmode = mode;
5906 return exp;
5907 }
5908
5909 /* Return 1 if T is an expression that get_inner_reference handles. */
5910
5911 int
5912 handled_component_p (t)
5913 tree t;
5914 {
5915 switch (TREE_CODE (t))
5916 {
5917 case BIT_FIELD_REF:
5918 case COMPONENT_REF:
5919 case ARRAY_REF:
5920 case ARRAY_RANGE_REF:
5921 case NON_LVALUE_EXPR:
5922 case VIEW_CONVERT_EXPR:
5923 return 1;
5924
5925 case NOP_EXPR:
5926 case CONVERT_EXPR:
5927 return (TYPE_MODE (TREE_TYPE (t))
5928 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5929
5930 default:
5931 return 0;
5932 }
5933 }
5934 \f
5935 /* Given an rtx VALUE that may contain additions and multiplications, return
5936 an equivalent value that just refers to a register, memory, or constant.
5937 This is done by generating instructions to perform the arithmetic and
5938 returning a pseudo-register containing the value.
5939
5940 The returned value may be a REG, SUBREG, MEM or constant. */
5941
5942 rtx
5943 force_operand (value, target)
5944 rtx value, target;
5945 {
5946 rtx op1, op2;
5947 /* Use subtarget as the target for operand 0 of a binary operation. */
5948 rtx subtarget = get_subtarget (target);
5949 enum rtx_code code = GET_CODE (value);
5950
5951 /* Check for a PIC address load. */
5952 if ((code == PLUS || code == MINUS)
5953 && XEXP (value, 0) == pic_offset_table_rtx
5954 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5955 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5956 || GET_CODE (XEXP (value, 1)) == CONST))
5957 {
5958 if (!subtarget)
5959 subtarget = gen_reg_rtx (GET_MODE (value));
5960 emit_move_insn (subtarget, value);
5961 return subtarget;
5962 }
5963
5964 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5965 {
5966 if (!target)
5967 target = gen_reg_rtx (GET_MODE (value));
5968 convert_move (target, force_operand (XEXP (value, 0), NULL),
5969 code == ZERO_EXTEND);
5970 return target;
5971 }
5972
5973 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5974 {
5975 op2 = XEXP (value, 1);
5976 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5977 subtarget = 0;
5978 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5979 {
5980 code = PLUS;
5981 op2 = negate_rtx (GET_MODE (value), op2);
5982 }
5983
5984 /* Check for an addition with OP2 a constant integer and our first
5985 operand a PLUS of a virtual register and something else. In that
5986 case, we want to emit the sum of the virtual register and the
5987 constant first and then add the other value. This allows virtual
5988 register instantiation to simply modify the constant rather than
5989 creating another one around this addition. */
5990 if (code == PLUS && GET_CODE (op2) == CONST_INT
5991 && GET_CODE (XEXP (value, 0)) == PLUS
5992 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5993 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5994 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5995 {
5996 rtx temp = expand_simple_binop (GET_MODE (value), code,
5997 XEXP (XEXP (value, 0), 0), op2,
5998 subtarget, 0, OPTAB_LIB_WIDEN);
5999 return expand_simple_binop (GET_MODE (value), code, temp,
6000 force_operand (XEXP (XEXP (value,
6001 0), 1), 0),
6002 target, 0, OPTAB_LIB_WIDEN);
6003 }
6004
6005 op1 = force_operand (XEXP (value, 0), subtarget);
6006 op2 = force_operand (op2, NULL_RTX);
6007 switch (code)
6008 {
6009 case MULT:
6010 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6011 case DIV:
6012 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6013 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6014 target, 1, OPTAB_LIB_WIDEN);
6015 else
6016 return expand_divmod (0,
6017 FLOAT_MODE_P (GET_MODE (value))
6018 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6019 GET_MODE (value), op1, op2, target, 0);
6020 break;
6021 case MOD:
6022 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6023 target, 0);
6024 break;
6025 case UDIV:
6026 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6027 target, 1);
6028 break;
6029 case UMOD:
6030 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6031 target, 1);
6032 break;
6033 case ASHIFTRT:
6034 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6035 target, 0, OPTAB_LIB_WIDEN);
6036 break;
6037 default:
6038 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6039 target, 1, OPTAB_LIB_WIDEN);
6040 }
6041 }
6042 if (GET_RTX_CLASS (code) == '1')
6043 {
6044 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6045 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6046 }
6047
6048 #ifdef INSN_SCHEDULING
6049 /* On machines that have insn scheduling, we want all memory reference to be
6050 explicit, so we need to deal with such paradoxical SUBREGs. */
6051 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
6052 && (GET_MODE_SIZE (GET_MODE (value))
6053 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6054 value
6055 = simplify_gen_subreg (GET_MODE (value),
6056 force_reg (GET_MODE (SUBREG_REG (value)),
6057 force_operand (SUBREG_REG (value),
6058 NULL_RTX)),
6059 GET_MODE (SUBREG_REG (value)),
6060 SUBREG_BYTE (value));
6061 #endif
6062
6063 return value;
6064 }
6065 \f
6066 /* Subroutine of expand_expr: return nonzero iff there is no way that
6067 EXP can reference X, which is being modified. TOP_P is nonzero if this
6068 call is going to be used to determine whether we need a temporary
6069 for EXP, as opposed to a recursive call to this function.
6070
6071 It is always safe for this routine to return zero since it merely
6072 searches for optimization opportunities. */
6073
6074 int
6075 safe_from_p (x, exp, top_p)
6076 rtx x;
6077 tree exp;
6078 int top_p;
6079 {
6080 rtx exp_rtl = 0;
6081 int i, nops;
6082 static tree save_expr_list;
6083
6084 if (x == 0
6085 /* If EXP has varying size, we MUST use a target since we currently
6086 have no way of allocating temporaries of variable size
6087 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6088 So we assume here that something at a higher level has prevented a
6089 clash. This is somewhat bogus, but the best we can do. Only
6090 do this when X is BLKmode and when we are at the top level. */
6091 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6092 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6093 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6094 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6095 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6096 != INTEGER_CST)
6097 && GET_MODE (x) == BLKmode)
6098 /* If X is in the outgoing argument area, it is always safe. */
6099 || (GET_CODE (x) == MEM
6100 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6101 || (GET_CODE (XEXP (x, 0)) == PLUS
6102 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6103 return 1;
6104
6105 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6106 find the underlying pseudo. */
6107 if (GET_CODE (x) == SUBREG)
6108 {
6109 x = SUBREG_REG (x);
6110 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6111 return 0;
6112 }
6113
6114 /* A SAVE_EXPR might appear many times in the expression passed to the
6115 top-level safe_from_p call, and if it has a complex subexpression,
6116 examining it multiple times could result in a combinatorial explosion.
6117 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6118 with optimization took about 28 minutes to compile -- even though it was
6119 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6120 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6121 we have processed. Note that the only test of top_p was above. */
6122
6123 if (top_p)
6124 {
6125 int rtn;
6126 tree t;
6127
6128 save_expr_list = 0;
6129
6130 rtn = safe_from_p (x, exp, 0);
6131
6132 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
6133 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
6134
6135 return rtn;
6136 }
6137
6138 /* Now look at our tree code and possibly recurse. */
6139 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6140 {
6141 case 'd':
6142 exp_rtl = DECL_RTL_IF_SET (exp);
6143 break;
6144
6145 case 'c':
6146 return 1;
6147
6148 case 'x':
6149 if (TREE_CODE (exp) == TREE_LIST)
6150 {
6151 while (1)
6152 {
6153 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6154 return 0;
6155 exp = TREE_CHAIN (exp);
6156 if (!exp)
6157 return 1;
6158 if (TREE_CODE (exp) != TREE_LIST)
6159 return safe_from_p (x, exp, 0);
6160 }
6161 }
6162 else if (TREE_CODE (exp) == ERROR_MARK)
6163 return 1; /* An already-visited SAVE_EXPR? */
6164 else
6165 return 0;
6166
6167 case '2':
6168 case '<':
6169 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6170 return 0;
6171 /* FALLTHRU */
6172
6173 case '1':
6174 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6175
6176 case 'e':
6177 case 'r':
6178 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6179 the expression. If it is set, we conflict iff we are that rtx or
6180 both are in memory. Otherwise, we check all operands of the
6181 expression recursively. */
6182
6183 switch (TREE_CODE (exp))
6184 {
6185 case ADDR_EXPR:
6186 /* If the operand is static or we are static, we can't conflict.
6187 Likewise if we don't conflict with the operand at all. */
6188 if (staticp (TREE_OPERAND (exp, 0))
6189 || TREE_STATIC (exp)
6190 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6191 return 1;
6192
6193 /* Otherwise, the only way this can conflict is if we are taking
6194 the address of a DECL a that address if part of X, which is
6195 very rare. */
6196 exp = TREE_OPERAND (exp, 0);
6197 if (DECL_P (exp))
6198 {
6199 if (!DECL_RTL_SET_P (exp)
6200 || GET_CODE (DECL_RTL (exp)) != MEM)
6201 return 0;
6202 else
6203 exp_rtl = XEXP (DECL_RTL (exp), 0);
6204 }
6205 break;
6206
6207 case INDIRECT_REF:
6208 if (GET_CODE (x) == MEM
6209 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6210 get_alias_set (exp)))
6211 return 0;
6212 break;
6213
6214 case CALL_EXPR:
6215 /* Assume that the call will clobber all hard registers and
6216 all of memory. */
6217 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6218 || GET_CODE (x) == MEM)
6219 return 0;
6220 break;
6221
6222 case RTL_EXPR:
6223 /* If a sequence exists, we would have to scan every instruction
6224 in the sequence to see if it was safe. This is probably not
6225 worthwhile. */
6226 if (RTL_EXPR_SEQUENCE (exp))
6227 return 0;
6228
6229 exp_rtl = RTL_EXPR_RTL (exp);
6230 break;
6231
6232 case WITH_CLEANUP_EXPR:
6233 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6234 break;
6235
6236 case CLEANUP_POINT_EXPR:
6237 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6238
6239 case SAVE_EXPR:
6240 exp_rtl = SAVE_EXPR_RTL (exp);
6241 if (exp_rtl)
6242 break;
6243
6244 /* If we've already scanned this, don't do it again. Otherwise,
6245 show we've scanned it and record for clearing the flag if we're
6246 going on. */
6247 if (TREE_PRIVATE (exp))
6248 return 1;
6249
6250 TREE_PRIVATE (exp) = 1;
6251 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6252 {
6253 TREE_PRIVATE (exp) = 0;
6254 return 0;
6255 }
6256
6257 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6258 return 1;
6259
6260 case BIND_EXPR:
6261 /* The only operand we look at is operand 1. The rest aren't
6262 part of the expression. */
6263 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6264
6265 case METHOD_CALL_EXPR:
6266 /* This takes an rtx argument, but shouldn't appear here. */
6267 abort ();
6268
6269 default:
6270 break;
6271 }
6272
6273 /* If we have an rtx, we do not need to scan our operands. */
6274 if (exp_rtl)
6275 break;
6276
6277 nops = first_rtl_op (TREE_CODE (exp));
6278 for (i = 0; i < nops; i++)
6279 if (TREE_OPERAND (exp, i) != 0
6280 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6281 return 0;
6282
6283 /* If this is a language-specific tree code, it may require
6284 special handling. */
6285 if ((unsigned int) TREE_CODE (exp)
6286 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6287 && !(*lang_hooks.safe_from_p) (x, exp))
6288 return 0;
6289 }
6290
6291 /* If we have an rtl, find any enclosed object. Then see if we conflict
6292 with it. */
6293 if (exp_rtl)
6294 {
6295 if (GET_CODE (exp_rtl) == SUBREG)
6296 {
6297 exp_rtl = SUBREG_REG (exp_rtl);
6298 if (GET_CODE (exp_rtl) == REG
6299 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6300 return 0;
6301 }
6302
6303 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6304 are memory and they conflict. */
6305 return ! (rtx_equal_p (x, exp_rtl)
6306 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6307 && true_dependence (exp_rtl, VOIDmode, x,
6308 rtx_addr_varies_p)));
6309 }
6310
6311 /* If we reach here, it is safe. */
6312 return 1;
6313 }
6314
6315 /* Subroutine of expand_expr: return rtx if EXP is a
6316 variable or parameter; else return 0. */
6317
6318 static rtx
6319 var_rtx (exp)
6320 tree exp;
6321 {
6322 STRIP_NOPS (exp);
6323 switch (TREE_CODE (exp))
6324 {
6325 case PARM_DECL:
6326 case VAR_DECL:
6327 return DECL_RTL (exp);
6328 default:
6329 return 0;
6330 }
6331 }
6332
6333 #ifdef MAX_INTEGER_COMPUTATION_MODE
6334
6335 void
6336 check_max_integer_computation_mode (exp)
6337 tree exp;
6338 {
6339 enum tree_code code;
6340 enum machine_mode mode;
6341
6342 /* Strip any NOPs that don't change the mode. */
6343 STRIP_NOPS (exp);
6344 code = TREE_CODE (exp);
6345
6346 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6347 if (code == NOP_EXPR
6348 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6349 return;
6350
6351 /* First check the type of the overall operation. We need only look at
6352 unary, binary and relational operations. */
6353 if (TREE_CODE_CLASS (code) == '1'
6354 || TREE_CODE_CLASS (code) == '2'
6355 || TREE_CODE_CLASS (code) == '<')
6356 {
6357 mode = TYPE_MODE (TREE_TYPE (exp));
6358 if (GET_MODE_CLASS (mode) == MODE_INT
6359 && mode > MAX_INTEGER_COMPUTATION_MODE)
6360 internal_error ("unsupported wide integer operation");
6361 }
6362
6363 /* Check operand of a unary op. */
6364 if (TREE_CODE_CLASS (code) == '1')
6365 {
6366 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6367 if (GET_MODE_CLASS (mode) == MODE_INT
6368 && mode > MAX_INTEGER_COMPUTATION_MODE)
6369 internal_error ("unsupported wide integer operation");
6370 }
6371
6372 /* Check operands of a binary/comparison op. */
6373 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6374 {
6375 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6376 if (GET_MODE_CLASS (mode) == MODE_INT
6377 && mode > MAX_INTEGER_COMPUTATION_MODE)
6378 internal_error ("unsupported wide integer operation");
6379
6380 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6381 if (GET_MODE_CLASS (mode) == MODE_INT
6382 && mode > MAX_INTEGER_COMPUTATION_MODE)
6383 internal_error ("unsupported wide integer operation");
6384 }
6385 }
6386 #endif
6387 \f
6388 /* Return the highest power of two that EXP is known to be a multiple of.
6389 This is used in updating alignment of MEMs in array references. */
6390
6391 static unsigned HOST_WIDE_INT
6392 highest_pow2_factor (exp)
6393 tree exp;
6394 {
6395 unsigned HOST_WIDE_INT c0, c1;
6396
6397 switch (TREE_CODE (exp))
6398 {
6399 case INTEGER_CST:
6400 /* We can find the lowest bit that's a one. If the low
6401 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6402 We need to handle this case since we can find it in a COND_EXPR,
6403 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6404 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6405 later ICE. */
6406 if (TREE_CONSTANT_OVERFLOW (exp))
6407 return BIGGEST_ALIGNMENT;
6408 else
6409 {
6410 /* Note: tree_low_cst is intentionally not used here,
6411 we don't care about the upper bits. */
6412 c0 = TREE_INT_CST_LOW (exp);
6413 c0 &= -c0;
6414 return c0 ? c0 : BIGGEST_ALIGNMENT;
6415 }
6416 break;
6417
6418 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6419 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6420 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6421 return MIN (c0, c1);
6422
6423 case MULT_EXPR:
6424 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6425 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6426 return c0 * c1;
6427
6428 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6429 case CEIL_DIV_EXPR:
6430 if (integer_pow2p (TREE_OPERAND (exp, 1))
6431 && host_integerp (TREE_OPERAND (exp, 1), 1))
6432 {
6433 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6434 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6435 return MAX (1, c0 / c1);
6436 }
6437 break;
6438
6439 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6440 case SAVE_EXPR: case WITH_RECORD_EXPR:
6441 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6442
6443 case COMPOUND_EXPR:
6444 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6445
6446 case COND_EXPR:
6447 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6448 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6449 return MIN (c0, c1);
6450
6451 default:
6452 break;
6453 }
6454
6455 return 1;
6456 }
6457
6458 /* Similar, except that it is known that the expression must be a multiple
6459 of the alignment of TYPE. */
6460
6461 static unsigned HOST_WIDE_INT
6462 highest_pow2_factor_for_type (type, exp)
6463 tree type;
6464 tree exp;
6465 {
6466 unsigned HOST_WIDE_INT type_align, factor;
6467
6468 factor = highest_pow2_factor (exp);
6469 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6470 return MAX (factor, type_align);
6471 }
6472 \f
6473 /* Return an object on the placeholder list that matches EXP, a
6474 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6475 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6476 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6477 is a location which initially points to a starting location in the
6478 placeholder list (zero means start of the list) and where a pointer into
6479 the placeholder list at which the object is found is placed. */
6480
6481 tree
6482 find_placeholder (exp, plist)
6483 tree exp;
6484 tree *plist;
6485 {
6486 tree type = TREE_TYPE (exp);
6487 tree placeholder_expr;
6488
6489 for (placeholder_expr
6490 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6491 placeholder_expr != 0;
6492 placeholder_expr = TREE_CHAIN (placeholder_expr))
6493 {
6494 tree need_type = TYPE_MAIN_VARIANT (type);
6495 tree elt;
6496
6497 /* Find the outermost reference that is of the type we want. If none,
6498 see if any object has a type that is a pointer to the type we
6499 want. */
6500 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6501 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6502 || TREE_CODE (elt) == COND_EXPR)
6503 ? TREE_OPERAND (elt, 1)
6504 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6505 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6506 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6507 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6508 ? TREE_OPERAND (elt, 0) : 0))
6509 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6510 {
6511 if (plist)
6512 *plist = placeholder_expr;
6513 return elt;
6514 }
6515
6516 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6517 elt
6518 = ((TREE_CODE (elt) == COMPOUND_EXPR
6519 || TREE_CODE (elt) == COND_EXPR)
6520 ? TREE_OPERAND (elt, 1)
6521 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6522 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6523 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6524 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6525 ? TREE_OPERAND (elt, 0) : 0))
6526 if (POINTER_TYPE_P (TREE_TYPE (elt))
6527 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6528 == need_type))
6529 {
6530 if (plist)
6531 *plist = placeholder_expr;
6532 return build1 (INDIRECT_REF, need_type, elt);
6533 }
6534 }
6535
6536 return 0;
6537 }
6538 \f
6539 /* expand_expr: generate code for computing expression EXP.
6540 An rtx for the computed value is returned. The value is never null.
6541 In the case of a void EXP, const0_rtx is returned.
6542
6543 The value may be stored in TARGET if TARGET is nonzero.
6544 TARGET is just a suggestion; callers must assume that
6545 the rtx returned may not be the same as TARGET.
6546
6547 If TARGET is CONST0_RTX, it means that the value will be ignored.
6548
6549 If TMODE is not VOIDmode, it suggests generating the
6550 result in mode TMODE. But this is done only when convenient.
6551 Otherwise, TMODE is ignored and the value generated in its natural mode.
6552 TMODE is just a suggestion; callers must assume that
6553 the rtx returned may not have mode TMODE.
6554
6555 Note that TARGET may have neither TMODE nor MODE. In that case, it
6556 probably will not be used.
6557
6558 If MODIFIER is EXPAND_SUM then when EXP is an addition
6559 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6560 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6561 products as above, or REG or MEM, or constant.
6562 Ordinarily in such cases we would output mul or add instructions
6563 and then return a pseudo reg containing the sum.
6564
6565 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6566 it also marks a label as absolutely required (it can't be dead).
6567 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6568 This is used for outputting expressions used in initializers.
6569
6570 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6571 with a constant address even if that address is not normally legitimate.
6572 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6573
6574 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6575 a call parameter. Such targets require special care as we haven't yet
6576 marked TARGET so that it's safe from being trashed by libcalls. We
6577 don't want to use TARGET for anything but the final result;
6578 Intermediate values must go elsewhere. Additionally, calls to
6579 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6580
6581 rtx
6582 expand_expr (exp, target, tmode, modifier)
6583 tree exp;
6584 rtx target;
6585 enum machine_mode tmode;
6586 enum expand_modifier modifier;
6587 {
6588 rtx op0, op1, temp;
6589 tree type = TREE_TYPE (exp);
6590 int unsignedp = TREE_UNSIGNED (type);
6591 enum machine_mode mode;
6592 enum tree_code code = TREE_CODE (exp);
6593 optab this_optab;
6594 rtx subtarget, original_target;
6595 int ignore;
6596 tree context;
6597
6598 /* Handle ERROR_MARK before anybody tries to access its type. */
6599 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6600 {
6601 op0 = CONST0_RTX (tmode);
6602 if (op0 != 0)
6603 return op0;
6604 return const0_rtx;
6605 }
6606
6607 mode = TYPE_MODE (type);
6608 /* Use subtarget as the target for operand 0 of a binary operation. */
6609 subtarget = get_subtarget (target);
6610 original_target = target;
6611 ignore = (target == const0_rtx
6612 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6613 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6614 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6615 && TREE_CODE (type) == VOID_TYPE));
6616
6617 /* If we are going to ignore this result, we need only do something
6618 if there is a side-effect somewhere in the expression. If there
6619 is, short-circuit the most common cases here. Note that we must
6620 not call expand_expr with anything but const0_rtx in case this
6621 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6622
6623 if (ignore)
6624 {
6625 if (! TREE_SIDE_EFFECTS (exp))
6626 return const0_rtx;
6627
6628 /* Ensure we reference a volatile object even if value is ignored, but
6629 don't do this if all we are doing is taking its address. */
6630 if (TREE_THIS_VOLATILE (exp)
6631 && TREE_CODE (exp) != FUNCTION_DECL
6632 && mode != VOIDmode && mode != BLKmode
6633 && modifier != EXPAND_CONST_ADDRESS)
6634 {
6635 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6636 if (GET_CODE (temp) == MEM)
6637 temp = copy_to_reg (temp);
6638 return const0_rtx;
6639 }
6640
6641 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6642 || code == INDIRECT_REF || code == BUFFER_REF)
6643 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6644 modifier);
6645
6646 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6647 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6648 {
6649 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6650 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6651 return const0_rtx;
6652 }
6653 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6654 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6655 /* If the second operand has no side effects, just evaluate
6656 the first. */
6657 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6658 modifier);
6659 else if (code == BIT_FIELD_REF)
6660 {
6661 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6662 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6663 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6664 return const0_rtx;
6665 }
6666
6667 target = 0;
6668 }
6669
6670 #ifdef MAX_INTEGER_COMPUTATION_MODE
6671 /* Only check stuff here if the mode we want is different from the mode
6672 of the expression; if it's the same, check_max_integer_computation_mode
6673 will handle it. Do we really need to check this stuff at all? */
6674
6675 if (target
6676 && GET_MODE (target) != mode
6677 && TREE_CODE (exp) != INTEGER_CST
6678 && TREE_CODE (exp) != PARM_DECL
6679 && TREE_CODE (exp) != ARRAY_REF
6680 && TREE_CODE (exp) != ARRAY_RANGE_REF
6681 && TREE_CODE (exp) != COMPONENT_REF
6682 && TREE_CODE (exp) != BIT_FIELD_REF
6683 && TREE_CODE (exp) != INDIRECT_REF
6684 && TREE_CODE (exp) != CALL_EXPR
6685 && TREE_CODE (exp) != VAR_DECL
6686 && TREE_CODE (exp) != RTL_EXPR)
6687 {
6688 enum machine_mode mode = GET_MODE (target);
6689
6690 if (GET_MODE_CLASS (mode) == MODE_INT
6691 && mode > MAX_INTEGER_COMPUTATION_MODE)
6692 internal_error ("unsupported wide integer operation");
6693 }
6694
6695 if (tmode != mode
6696 && TREE_CODE (exp) != INTEGER_CST
6697 && TREE_CODE (exp) != PARM_DECL
6698 && TREE_CODE (exp) != ARRAY_REF
6699 && TREE_CODE (exp) != ARRAY_RANGE_REF
6700 && TREE_CODE (exp) != COMPONENT_REF
6701 && TREE_CODE (exp) != BIT_FIELD_REF
6702 && TREE_CODE (exp) != INDIRECT_REF
6703 && TREE_CODE (exp) != VAR_DECL
6704 && TREE_CODE (exp) != CALL_EXPR
6705 && TREE_CODE (exp) != RTL_EXPR
6706 && GET_MODE_CLASS (tmode) == MODE_INT
6707 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6708 internal_error ("unsupported wide integer operation");
6709
6710 check_max_integer_computation_mode (exp);
6711 #endif
6712
6713 /* If will do cse, generate all results into pseudo registers
6714 since 1) that allows cse to find more things
6715 and 2) otherwise cse could produce an insn the machine
6716 cannot support. An exception is a CONSTRUCTOR into a multi-word
6717 MEM: that's much more likely to be most efficient into the MEM.
6718 Another is a CALL_EXPR which must return in memory. */
6719
6720 if (! cse_not_expected && mode != BLKmode && target
6721 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6722 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6723 && ! (code == CALL_EXPR && aggregate_value_p (exp)))
6724 target = 0;
6725
6726 switch (code)
6727 {
6728 case LABEL_DECL:
6729 {
6730 tree function = decl_function_context (exp);
6731 /* Handle using a label in a containing function. */
6732 if (function != current_function_decl
6733 && function != inline_function_decl && function != 0)
6734 {
6735 struct function *p = find_function_data (function);
6736 p->expr->x_forced_labels
6737 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6738 p->expr->x_forced_labels);
6739 }
6740 else
6741 {
6742 if (modifier == EXPAND_INITIALIZER)
6743 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6744 label_rtx (exp),
6745 forced_labels);
6746 }
6747
6748 temp = gen_rtx_MEM (FUNCTION_MODE,
6749 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6750 if (function != current_function_decl
6751 && function != inline_function_decl && function != 0)
6752 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6753 return temp;
6754 }
6755
6756 case PARM_DECL:
6757 if (!DECL_RTL_SET_P (exp))
6758 {
6759 error_with_decl (exp, "prior parameter's size depends on `%s'");
6760 return CONST0_RTX (mode);
6761 }
6762
6763 /* ... fall through ... */
6764
6765 case VAR_DECL:
6766 /* If a static var's type was incomplete when the decl was written,
6767 but the type is complete now, lay out the decl now. */
6768 if (DECL_SIZE (exp) == 0
6769 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6770 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6771 layout_decl (exp, 0);
6772
6773 /* ... fall through ... */
6774
6775 case FUNCTION_DECL:
6776 case RESULT_DECL:
6777 if (DECL_RTL (exp) == 0)
6778 abort ();
6779
6780 /* Ensure variable marked as used even if it doesn't go through
6781 a parser. If it hasn't be used yet, write out an external
6782 definition. */
6783 if (! TREE_USED (exp))
6784 {
6785 assemble_external (exp);
6786 TREE_USED (exp) = 1;
6787 }
6788
6789 /* Show we haven't gotten RTL for this yet. */
6790 temp = 0;
6791
6792 /* Handle variables inherited from containing functions. */
6793 context = decl_function_context (exp);
6794
6795 /* We treat inline_function_decl as an alias for the current function
6796 because that is the inline function whose vars, types, etc.
6797 are being merged into the current function.
6798 See expand_inline_function. */
6799
6800 if (context != 0 && context != current_function_decl
6801 && context != inline_function_decl
6802 /* If var is static, we don't need a static chain to access it. */
6803 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6804 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6805 {
6806 rtx addr;
6807
6808 /* Mark as non-local and addressable. */
6809 DECL_NONLOCAL (exp) = 1;
6810 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6811 abort ();
6812 (*lang_hooks.mark_addressable) (exp);
6813 if (GET_CODE (DECL_RTL (exp)) != MEM)
6814 abort ();
6815 addr = XEXP (DECL_RTL (exp), 0);
6816 if (GET_CODE (addr) == MEM)
6817 addr
6818 = replace_equiv_address (addr,
6819 fix_lexical_addr (XEXP (addr, 0), exp));
6820 else
6821 addr = fix_lexical_addr (addr, exp);
6822
6823 temp = replace_equiv_address (DECL_RTL (exp), addr);
6824 }
6825
6826 /* This is the case of an array whose size is to be determined
6827 from its initializer, while the initializer is still being parsed.
6828 See expand_decl. */
6829
6830 else if (GET_CODE (DECL_RTL (exp)) == MEM
6831 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6832 temp = validize_mem (DECL_RTL (exp));
6833
6834 /* If DECL_RTL is memory, we are in the normal case and either
6835 the address is not valid or it is not a register and -fforce-addr
6836 is specified, get the address into a register. */
6837
6838 else if (GET_CODE (DECL_RTL (exp)) == MEM
6839 && modifier != EXPAND_CONST_ADDRESS
6840 && modifier != EXPAND_SUM
6841 && modifier != EXPAND_INITIALIZER
6842 && (! memory_address_p (DECL_MODE (exp),
6843 XEXP (DECL_RTL (exp), 0))
6844 || (flag_force_addr
6845 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6846 temp = replace_equiv_address (DECL_RTL (exp),
6847 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6848
6849 /* If we got something, return it. But first, set the alignment
6850 if the address is a register. */
6851 if (temp != 0)
6852 {
6853 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6854 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6855
6856 return temp;
6857 }
6858
6859 /* If the mode of DECL_RTL does not match that of the decl, it
6860 must be a promoted value. We return a SUBREG of the wanted mode,
6861 but mark it so that we know that it was already extended. */
6862
6863 if (GET_CODE (DECL_RTL (exp)) == REG
6864 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6865 {
6866 /* Get the signedness used for this variable. Ensure we get the
6867 same mode we got when the variable was declared. */
6868 if (GET_MODE (DECL_RTL (exp))
6869 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6870 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6871 abort ();
6872
6873 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6874 SUBREG_PROMOTED_VAR_P (temp) = 1;
6875 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6876 return temp;
6877 }
6878
6879 return DECL_RTL (exp);
6880
6881 case INTEGER_CST:
6882 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6883 TREE_INT_CST_HIGH (exp), mode);
6884
6885 /* ??? If overflow is set, fold will have done an incomplete job,
6886 which can result in (plus xx (const_int 0)), which can get
6887 simplified by validate_replace_rtx during virtual register
6888 instantiation, which can result in unrecognizable insns.
6889 Avoid this by forcing all overflows into registers. */
6890 if (TREE_CONSTANT_OVERFLOW (exp)
6891 && modifier != EXPAND_INITIALIZER)
6892 temp = force_reg (mode, temp);
6893
6894 return temp;
6895
6896 case VECTOR_CST:
6897 return const_vector_from_tree (exp);
6898
6899 case CONST_DECL:
6900 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6901
6902 case REAL_CST:
6903 /* If optimized, generate immediate CONST_DOUBLE
6904 which will be turned into memory by reload if necessary.
6905
6906 We used to force a register so that loop.c could see it. But
6907 this does not allow gen_* patterns to perform optimizations with
6908 the constants. It also produces two insns in cases like "x = 1.0;".
6909 On most machines, floating-point constants are not permitted in
6910 many insns, so we'd end up copying it to a register in any case.
6911
6912 Now, we do the copying in expand_binop, if appropriate. */
6913 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6914 TYPE_MODE (TREE_TYPE (exp)));
6915
6916 case COMPLEX_CST:
6917 case STRING_CST:
6918 temp = output_constant_def (exp, 1);
6919
6920 /* temp contains a constant address.
6921 On RISC machines where a constant address isn't valid,
6922 make some insns to get that address into a register. */
6923 if (modifier != EXPAND_CONST_ADDRESS
6924 && modifier != EXPAND_INITIALIZER
6925 && modifier != EXPAND_SUM
6926 && (! memory_address_p (mode, XEXP (temp, 0))
6927 || flag_force_addr))
6928 return replace_equiv_address (temp,
6929 copy_rtx (XEXP (temp, 0)));
6930 return temp;
6931
6932 case EXPR_WITH_FILE_LOCATION:
6933 {
6934 rtx to_return;
6935 const char *saved_input_filename = input_filename;
6936 int saved_lineno = lineno;
6937 input_filename = EXPR_WFL_FILENAME (exp);
6938 lineno = EXPR_WFL_LINENO (exp);
6939 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6940 emit_line_note (input_filename, lineno);
6941 /* Possibly avoid switching back and forth here. */
6942 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6943 input_filename = saved_input_filename;
6944 lineno = saved_lineno;
6945 return to_return;
6946 }
6947
6948 case SAVE_EXPR:
6949 context = decl_function_context (exp);
6950
6951 /* If this SAVE_EXPR was at global context, assume we are an
6952 initialization function and move it into our context. */
6953 if (context == 0)
6954 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6955
6956 /* We treat inline_function_decl as an alias for the current function
6957 because that is the inline function whose vars, types, etc.
6958 are being merged into the current function.
6959 See expand_inline_function. */
6960 if (context == current_function_decl || context == inline_function_decl)
6961 context = 0;
6962
6963 /* If this is non-local, handle it. */
6964 if (context)
6965 {
6966 /* The following call just exists to abort if the context is
6967 not of a containing function. */
6968 find_function_data (context);
6969
6970 temp = SAVE_EXPR_RTL (exp);
6971 if (temp && GET_CODE (temp) == REG)
6972 {
6973 put_var_into_stack (exp, /*rescan=*/true);
6974 temp = SAVE_EXPR_RTL (exp);
6975 }
6976 if (temp == 0 || GET_CODE (temp) != MEM)
6977 abort ();
6978 return
6979 replace_equiv_address (temp,
6980 fix_lexical_addr (XEXP (temp, 0), exp));
6981 }
6982 if (SAVE_EXPR_RTL (exp) == 0)
6983 {
6984 if (mode == VOIDmode)
6985 temp = const0_rtx;
6986 else
6987 temp = assign_temp (build_qualified_type (type,
6988 (TYPE_QUALS (type)
6989 | TYPE_QUAL_CONST)),
6990 3, 0, 0);
6991
6992 SAVE_EXPR_RTL (exp) = temp;
6993 if (!optimize && GET_CODE (temp) == REG)
6994 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6995 save_expr_regs);
6996
6997 /* If the mode of TEMP does not match that of the expression, it
6998 must be a promoted value. We pass store_expr a SUBREG of the
6999 wanted mode but mark it so that we know that it was already
7000 extended. */
7001
7002 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
7003 {
7004 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
7005 promote_mode (type, mode, &unsignedp, 0);
7006 SUBREG_PROMOTED_VAR_P (temp) = 1;
7007 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7008 }
7009
7010 if (temp == const0_rtx)
7011 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7012 else
7013 store_expr (TREE_OPERAND (exp, 0), temp,
7014 modifier == EXPAND_STACK_PARM ? 2 : 0);
7015
7016 TREE_USED (exp) = 1;
7017 }
7018
7019 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
7020 must be a promoted value. We return a SUBREG of the wanted mode,
7021 but mark it so that we know that it was already extended. */
7022
7023 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
7024 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
7025 {
7026 /* Compute the signedness and make the proper SUBREG. */
7027 promote_mode (type, mode, &unsignedp, 0);
7028 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
7029 SUBREG_PROMOTED_VAR_P (temp) = 1;
7030 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7031 return temp;
7032 }
7033
7034 return SAVE_EXPR_RTL (exp);
7035
7036 case UNSAVE_EXPR:
7037 {
7038 rtx temp;
7039 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7040 TREE_OPERAND (exp, 0)
7041 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
7042 return temp;
7043 }
7044
7045 case PLACEHOLDER_EXPR:
7046 {
7047 tree old_list = placeholder_list;
7048 tree placeholder_expr = 0;
7049
7050 exp = find_placeholder (exp, &placeholder_expr);
7051 if (exp == 0)
7052 abort ();
7053
7054 placeholder_list = TREE_CHAIN (placeholder_expr);
7055 temp = expand_expr (exp, original_target, tmode, modifier);
7056 placeholder_list = old_list;
7057 return temp;
7058 }
7059
7060 case WITH_RECORD_EXPR:
7061 /* Put the object on the placeholder list, expand our first operand,
7062 and pop the list. */
7063 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
7064 placeholder_list);
7065 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
7066 modifier);
7067 placeholder_list = TREE_CHAIN (placeholder_list);
7068 return target;
7069
7070 case GOTO_EXPR:
7071 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7072 expand_goto (TREE_OPERAND (exp, 0));
7073 else
7074 expand_computed_goto (TREE_OPERAND (exp, 0));
7075 return const0_rtx;
7076
7077 case EXIT_EXPR:
7078 expand_exit_loop_if_false (NULL,
7079 invert_truthvalue (TREE_OPERAND (exp, 0)));
7080 return const0_rtx;
7081
7082 case LABELED_BLOCK_EXPR:
7083 if (LABELED_BLOCK_BODY (exp))
7084 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
7085 /* Should perhaps use expand_label, but this is simpler and safer. */
7086 do_pending_stack_adjust ();
7087 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
7088 return const0_rtx;
7089
7090 case EXIT_BLOCK_EXPR:
7091 if (EXIT_BLOCK_RETURN (exp))
7092 sorry ("returned value in block_exit_expr");
7093 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
7094 return const0_rtx;
7095
7096 case LOOP_EXPR:
7097 push_temp_slots ();
7098 expand_start_loop (1);
7099 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
7100 expand_end_loop ();
7101 pop_temp_slots ();
7102
7103 return const0_rtx;
7104
7105 case BIND_EXPR:
7106 {
7107 tree vars = TREE_OPERAND (exp, 0);
7108
7109 /* Need to open a binding contour here because
7110 if there are any cleanups they must be contained here. */
7111 expand_start_bindings (2);
7112
7113 /* Mark the corresponding BLOCK for output in its proper place. */
7114 if (TREE_OPERAND (exp, 2) != 0
7115 && ! TREE_USED (TREE_OPERAND (exp, 2)))
7116 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
7117
7118 /* If VARS have not yet been expanded, expand them now. */
7119 while (vars)
7120 {
7121 if (!DECL_RTL_SET_P (vars))
7122 expand_decl (vars);
7123 expand_decl_init (vars);
7124 vars = TREE_CHAIN (vars);
7125 }
7126
7127 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
7128
7129 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7130
7131 return temp;
7132 }
7133
7134 case RTL_EXPR:
7135 if (RTL_EXPR_SEQUENCE (exp))
7136 {
7137 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
7138 abort ();
7139 emit_insn (RTL_EXPR_SEQUENCE (exp));
7140 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
7141 }
7142 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
7143 free_temps_for_rtl_expr (exp);
7144 return RTL_EXPR_RTL (exp);
7145
7146 case CONSTRUCTOR:
7147 /* If we don't need the result, just ensure we evaluate any
7148 subexpressions. */
7149 if (ignore)
7150 {
7151 tree elt;
7152
7153 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
7154 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7155
7156 return const0_rtx;
7157 }
7158
7159 /* All elts simple constants => refer to a constant in memory. But
7160 if this is a non-BLKmode mode, let it store a field at a time
7161 since that should make a CONST_INT or CONST_DOUBLE when we
7162 fold. Likewise, if we have a target we can use, it is best to
7163 store directly into the target unless the type is large enough
7164 that memcpy will be used. If we are making an initializer and
7165 all operands are constant, put it in memory as well.
7166
7167 FIXME: Avoid trying to fill vector constructors piece-meal.
7168 Output them with output_constant_def below unless we're sure
7169 they're zeros. This should go away when vector initializers
7170 are treated like VECTOR_CST instead of arrays.
7171 */
7172 else if ((TREE_STATIC (exp)
7173 && ((mode == BLKmode
7174 && ! (target != 0 && safe_from_p (target, exp, 1)))
7175 || TREE_ADDRESSABLE (exp)
7176 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7177 && (! MOVE_BY_PIECES_P
7178 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7179 TYPE_ALIGN (type)))
7180 && ((TREE_CODE (type) == VECTOR_TYPE
7181 && !is_zeros_p (exp))
7182 || ! mostly_zeros_p (exp)))))
7183 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
7184 {
7185 rtx constructor = output_constant_def (exp, 1);
7186
7187 if (modifier != EXPAND_CONST_ADDRESS
7188 && modifier != EXPAND_INITIALIZER
7189 && modifier != EXPAND_SUM)
7190 constructor = validize_mem (constructor);
7191
7192 return constructor;
7193 }
7194 else
7195 {
7196 /* Handle calls that pass values in multiple non-contiguous
7197 locations. The Irix 6 ABI has examples of this. */
7198 if (target == 0 || ! safe_from_p (target, exp, 1)
7199 || GET_CODE (target) == PARALLEL
7200 || modifier == EXPAND_STACK_PARM)
7201 target
7202 = assign_temp (build_qualified_type (type,
7203 (TYPE_QUALS (type)
7204 | (TREE_READONLY (exp)
7205 * TYPE_QUAL_CONST))),
7206 0, TREE_ADDRESSABLE (exp), 1);
7207
7208 store_constructor (exp, target, 0, int_expr_size (exp));
7209 return target;
7210 }
7211
7212 case INDIRECT_REF:
7213 {
7214 tree exp1 = TREE_OPERAND (exp, 0);
7215 tree index;
7216 tree string = string_constant (exp1, &index);
7217
7218 /* Try to optimize reads from const strings. */
7219 if (string
7220 && TREE_CODE (string) == STRING_CST
7221 && TREE_CODE (index) == INTEGER_CST
7222 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7223 && GET_MODE_CLASS (mode) == MODE_INT
7224 && GET_MODE_SIZE (mode) == 1
7225 && modifier != EXPAND_WRITE)
7226 return gen_int_mode (TREE_STRING_POINTER (string)
7227 [TREE_INT_CST_LOW (index)], mode);
7228
7229 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7230 op0 = memory_address (mode, op0);
7231 temp = gen_rtx_MEM (mode, op0);
7232 set_mem_attributes (temp, exp, 0);
7233
7234 /* If we are writing to this object and its type is a record with
7235 readonly fields, we must mark it as readonly so it will
7236 conflict with readonly references to those fields. */
7237 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7238 RTX_UNCHANGING_P (temp) = 1;
7239
7240 return temp;
7241 }
7242
7243 case ARRAY_REF:
7244 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7245 abort ();
7246
7247 {
7248 tree array = TREE_OPERAND (exp, 0);
7249 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7250 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7251 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7252 HOST_WIDE_INT i;
7253
7254 /* Optimize the special-case of a zero lower bound.
7255
7256 We convert the low_bound to sizetype to avoid some problems
7257 with constant folding. (E.g. suppose the lower bound is 1,
7258 and its mode is QI. Without the conversion, (ARRAY
7259 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7260 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7261
7262 if (! integer_zerop (low_bound))
7263 index = size_diffop (index, convert (sizetype, low_bound));
7264
7265 /* Fold an expression like: "foo"[2].
7266 This is not done in fold so it won't happen inside &.
7267 Don't fold if this is for wide characters since it's too
7268 difficult to do correctly and this is a very rare case. */
7269
7270 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7271 && TREE_CODE (array) == STRING_CST
7272 && TREE_CODE (index) == INTEGER_CST
7273 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7274 && GET_MODE_CLASS (mode) == MODE_INT
7275 && GET_MODE_SIZE (mode) == 1)
7276 return gen_int_mode (TREE_STRING_POINTER (array)
7277 [TREE_INT_CST_LOW (index)], mode);
7278
7279 /* If this is a constant index into a constant array,
7280 just get the value from the array. Handle both the cases when
7281 we have an explicit constructor and when our operand is a variable
7282 that was declared const. */
7283
7284 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7285 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
7286 && TREE_CODE (index) == INTEGER_CST
7287 && 0 > compare_tree_int (index,
7288 list_length (CONSTRUCTOR_ELTS
7289 (TREE_OPERAND (exp, 0)))))
7290 {
7291 tree elem;
7292
7293 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7294 i = TREE_INT_CST_LOW (index);
7295 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7296 ;
7297
7298 if (elem)
7299 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7300 modifier);
7301 }
7302
7303 else if (optimize >= 1
7304 && modifier != EXPAND_CONST_ADDRESS
7305 && modifier != EXPAND_INITIALIZER
7306 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7307 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7308 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7309 {
7310 if (TREE_CODE (index) == INTEGER_CST)
7311 {
7312 tree init = DECL_INITIAL (array);
7313
7314 if (TREE_CODE (init) == CONSTRUCTOR)
7315 {
7316 tree elem;
7317
7318 for (elem = CONSTRUCTOR_ELTS (init);
7319 (elem
7320 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7321 elem = TREE_CHAIN (elem))
7322 ;
7323
7324 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7325 return expand_expr (fold (TREE_VALUE (elem)), target,
7326 tmode, modifier);
7327 }
7328 else if (TREE_CODE (init) == STRING_CST
7329 && 0 > compare_tree_int (index,
7330 TREE_STRING_LENGTH (init)))
7331 {
7332 tree type = TREE_TYPE (TREE_TYPE (init));
7333 enum machine_mode mode = TYPE_MODE (type);
7334
7335 if (GET_MODE_CLASS (mode) == MODE_INT
7336 && GET_MODE_SIZE (mode) == 1)
7337 return gen_int_mode (TREE_STRING_POINTER (init)
7338 [TREE_INT_CST_LOW (index)], mode);
7339 }
7340 }
7341 }
7342 }
7343 goto normal_inner_ref;
7344
7345 case COMPONENT_REF:
7346 /* If the operand is a CONSTRUCTOR, we can just extract the
7347 appropriate field if it is present. */
7348 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7349 {
7350 tree elt;
7351
7352 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7353 elt = TREE_CHAIN (elt))
7354 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7355 /* We can normally use the value of the field in the
7356 CONSTRUCTOR. However, if this is a bitfield in
7357 an integral mode that we can fit in a HOST_WIDE_INT,
7358 we must mask only the number of bits in the bitfield,
7359 since this is done implicitly by the constructor. If
7360 the bitfield does not meet either of those conditions,
7361 we can't do this optimization. */
7362 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7363 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7364 == MODE_INT)
7365 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7366 <= HOST_BITS_PER_WIDE_INT))))
7367 {
7368 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7369 && modifier == EXPAND_STACK_PARM)
7370 target = 0;
7371 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7372 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7373 {
7374 HOST_WIDE_INT bitsize
7375 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7376 enum machine_mode imode
7377 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7378
7379 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7380 {
7381 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7382 op0 = expand_and (imode, op0, op1, target);
7383 }
7384 else
7385 {
7386 tree count
7387 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7388 0);
7389
7390 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7391 target, 0);
7392 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7393 target, 0);
7394 }
7395 }
7396
7397 return op0;
7398 }
7399 }
7400 goto normal_inner_ref;
7401
7402 case BIT_FIELD_REF:
7403 case ARRAY_RANGE_REF:
7404 normal_inner_ref:
7405 {
7406 enum machine_mode mode1;
7407 HOST_WIDE_INT bitsize, bitpos;
7408 tree offset;
7409 int volatilep = 0;
7410 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7411 &mode1, &unsignedp, &volatilep);
7412 rtx orig_op0;
7413
7414 /* If we got back the original object, something is wrong. Perhaps
7415 we are evaluating an expression too early. In any event, don't
7416 infinitely recurse. */
7417 if (tem == exp)
7418 abort ();
7419
7420 /* If TEM's type is a union of variable size, pass TARGET to the inner
7421 computation, since it will need a temporary and TARGET is known
7422 to have to do. This occurs in unchecked conversion in Ada. */
7423
7424 orig_op0 = op0
7425 = expand_expr (tem,
7426 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7427 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7428 != INTEGER_CST)
7429 && modifier != EXPAND_STACK_PARM
7430 ? target : NULL_RTX),
7431 VOIDmode,
7432 (modifier == EXPAND_INITIALIZER
7433 || modifier == EXPAND_CONST_ADDRESS
7434 || modifier == EXPAND_STACK_PARM)
7435 ? modifier : EXPAND_NORMAL);
7436
7437 /* If this is a constant, put it into a register if it is a
7438 legitimate constant and OFFSET is 0 and memory if it isn't. */
7439 if (CONSTANT_P (op0))
7440 {
7441 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7442 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7443 && offset == 0)
7444 op0 = force_reg (mode, op0);
7445 else
7446 op0 = validize_mem (force_const_mem (mode, op0));
7447 }
7448
7449 if (offset != 0)
7450 {
7451 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7452 EXPAND_SUM);
7453
7454 /* If this object is in a register, put it into memory.
7455 This case can't occur in C, but can in Ada if we have
7456 unchecked conversion of an expression from a scalar type to
7457 an array or record type. */
7458 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7459 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7460 {
7461 /* If the operand is a SAVE_EXPR, we can deal with this by
7462 forcing the SAVE_EXPR into memory. */
7463 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7464 {
7465 put_var_into_stack (TREE_OPERAND (exp, 0),
7466 /*rescan=*/true);
7467 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7468 }
7469 else
7470 {
7471 tree nt
7472 = build_qualified_type (TREE_TYPE (tem),
7473 (TYPE_QUALS (TREE_TYPE (tem))
7474 | TYPE_QUAL_CONST));
7475 rtx memloc = assign_temp (nt, 1, 1, 1);
7476
7477 emit_move_insn (memloc, op0);
7478 op0 = memloc;
7479 }
7480 }
7481
7482 if (GET_CODE (op0) != MEM)
7483 abort ();
7484
7485 #ifdef POINTERS_EXTEND_UNSIGNED
7486 if (GET_MODE (offset_rtx) != Pmode)
7487 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7488 #else
7489 if (GET_MODE (offset_rtx) != ptr_mode)
7490 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7491 #endif
7492
7493 /* A constant address in OP0 can have VOIDmode, we must not try
7494 to call force_reg for that case. Avoid that case. */
7495 if (GET_CODE (op0) == MEM
7496 && GET_MODE (op0) == BLKmode
7497 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7498 && bitsize != 0
7499 && (bitpos % bitsize) == 0
7500 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7501 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7502 {
7503 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7504 bitpos = 0;
7505 }
7506
7507 op0 = offset_address (op0, offset_rtx,
7508 highest_pow2_factor (offset));
7509 }
7510
7511 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7512 record its alignment as BIGGEST_ALIGNMENT. */
7513 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7514 && is_aligning_offset (offset, tem))
7515 set_mem_align (op0, BIGGEST_ALIGNMENT);
7516
7517 /* Don't forget about volatility even if this is a bitfield. */
7518 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7519 {
7520 if (op0 == orig_op0)
7521 op0 = copy_rtx (op0);
7522
7523 MEM_VOLATILE_P (op0) = 1;
7524 }
7525
7526 /* The following code doesn't handle CONCAT.
7527 Assume only bitpos == 0 can be used for CONCAT, due to
7528 one element arrays having the same mode as its element. */
7529 if (GET_CODE (op0) == CONCAT)
7530 {
7531 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7532 abort ();
7533 return op0;
7534 }
7535
7536 /* In cases where an aligned union has an unaligned object
7537 as a field, we might be extracting a BLKmode value from
7538 an integer-mode (e.g., SImode) object. Handle this case
7539 by doing the extract into an object as wide as the field
7540 (which we know to be the width of a basic mode), then
7541 storing into memory, and changing the mode to BLKmode. */
7542 if (mode1 == VOIDmode
7543 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7544 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7545 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7546 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7547 && modifier != EXPAND_CONST_ADDRESS
7548 && modifier != EXPAND_INITIALIZER)
7549 /* If the field isn't aligned enough to fetch as a memref,
7550 fetch it as a bit field. */
7551 || (mode1 != BLKmode
7552 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7553 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0)))
7554 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7555 /* If the type and the field are a constant size and the
7556 size of the type isn't the same size as the bitfield,
7557 we must use bitfield operations. */
7558 || (bitsize >= 0
7559 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7560 == INTEGER_CST)
7561 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7562 bitsize)))
7563 {
7564 enum machine_mode ext_mode = mode;
7565
7566 if (ext_mode == BLKmode
7567 && ! (target != 0 && GET_CODE (op0) == MEM
7568 && GET_CODE (target) == MEM
7569 && bitpos % BITS_PER_UNIT == 0))
7570 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7571
7572 if (ext_mode == BLKmode)
7573 {
7574 /* In this case, BITPOS must start at a byte boundary and
7575 TARGET, if specified, must be a MEM. */
7576 if (GET_CODE (op0) != MEM
7577 || (target != 0 && GET_CODE (target) != MEM)
7578 || bitpos % BITS_PER_UNIT != 0)
7579 abort ();
7580
7581 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7582 if (target == 0)
7583 target = assign_temp (type, 0, 1, 1);
7584
7585 emit_block_move (target, op0,
7586 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7587 / BITS_PER_UNIT),
7588 (modifier == EXPAND_STACK_PARM
7589 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7590
7591 return target;
7592 }
7593
7594 op0 = validize_mem (op0);
7595
7596 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7597 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7598
7599 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7600 (modifier == EXPAND_STACK_PARM
7601 ? NULL_RTX : target),
7602 ext_mode, ext_mode,
7603 int_size_in_bytes (TREE_TYPE (tem)));
7604
7605 /* If the result is a record type and BITSIZE is narrower than
7606 the mode of OP0, an integral mode, and this is a big endian
7607 machine, we must put the field into the high-order bits. */
7608 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7609 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7610 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7611 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7612 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7613 - bitsize),
7614 op0, 1);
7615
7616 if (mode == BLKmode)
7617 {
7618 rtx new = assign_temp (build_qualified_type
7619 ((*lang_hooks.types.type_for_mode)
7620 (ext_mode, 0),
7621 TYPE_QUAL_CONST), 0, 1, 1);
7622
7623 emit_move_insn (new, op0);
7624 op0 = copy_rtx (new);
7625 PUT_MODE (op0, BLKmode);
7626 set_mem_attributes (op0, exp, 1);
7627 }
7628
7629 return op0;
7630 }
7631
7632 /* If the result is BLKmode, use that to access the object
7633 now as well. */
7634 if (mode == BLKmode)
7635 mode1 = BLKmode;
7636
7637 /* Get a reference to just this component. */
7638 if (modifier == EXPAND_CONST_ADDRESS
7639 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7640 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7641 else
7642 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7643
7644 if (op0 == orig_op0)
7645 op0 = copy_rtx (op0);
7646
7647 set_mem_attributes (op0, exp, 0);
7648 if (GET_CODE (XEXP (op0, 0)) == REG)
7649 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7650
7651 MEM_VOLATILE_P (op0) |= volatilep;
7652 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7653 || modifier == EXPAND_CONST_ADDRESS
7654 || modifier == EXPAND_INITIALIZER)
7655 return op0;
7656 else if (target == 0)
7657 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7658
7659 convert_move (target, op0, unsignedp);
7660 return target;
7661 }
7662
7663 case VTABLE_REF:
7664 {
7665 rtx insn, before = get_last_insn (), vtbl_ref;
7666
7667 /* Evaluate the interior expression. */
7668 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7669 tmode, modifier);
7670
7671 /* Get or create an instruction off which to hang a note. */
7672 if (REG_P (subtarget))
7673 {
7674 target = subtarget;
7675 insn = get_last_insn ();
7676 if (insn == before)
7677 abort ();
7678 if (! INSN_P (insn))
7679 insn = prev_nonnote_insn (insn);
7680 }
7681 else
7682 {
7683 target = gen_reg_rtx (GET_MODE (subtarget));
7684 insn = emit_move_insn (target, subtarget);
7685 }
7686
7687 /* Collect the data for the note. */
7688 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7689 vtbl_ref = plus_constant (vtbl_ref,
7690 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7691 /* Discard the initial CONST that was added. */
7692 vtbl_ref = XEXP (vtbl_ref, 0);
7693
7694 REG_NOTES (insn)
7695 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7696
7697 return target;
7698 }
7699
7700 /* Intended for a reference to a buffer of a file-object in Pascal.
7701 But it's not certain that a special tree code will really be
7702 necessary for these. INDIRECT_REF might work for them. */
7703 case BUFFER_REF:
7704 abort ();
7705
7706 case IN_EXPR:
7707 {
7708 /* Pascal set IN expression.
7709
7710 Algorithm:
7711 rlo = set_low - (set_low%bits_per_word);
7712 the_word = set [ (index - rlo)/bits_per_word ];
7713 bit_index = index % bits_per_word;
7714 bitmask = 1 << bit_index;
7715 return !!(the_word & bitmask); */
7716
7717 tree set = TREE_OPERAND (exp, 0);
7718 tree index = TREE_OPERAND (exp, 1);
7719 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7720 tree set_type = TREE_TYPE (set);
7721 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7722 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7723 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7724 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7725 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7726 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7727 rtx setaddr = XEXP (setval, 0);
7728 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7729 rtx rlow;
7730 rtx diff, quo, rem, addr, bit, result;
7731
7732 /* If domain is empty, answer is no. Likewise if index is constant
7733 and out of bounds. */
7734 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7735 && TREE_CODE (set_low_bound) == INTEGER_CST
7736 && tree_int_cst_lt (set_high_bound, set_low_bound))
7737 || (TREE_CODE (index) == INTEGER_CST
7738 && TREE_CODE (set_low_bound) == INTEGER_CST
7739 && tree_int_cst_lt (index, set_low_bound))
7740 || (TREE_CODE (set_high_bound) == INTEGER_CST
7741 && TREE_CODE (index) == INTEGER_CST
7742 && tree_int_cst_lt (set_high_bound, index))))
7743 return const0_rtx;
7744
7745 if (target == 0)
7746 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7747
7748 /* If we get here, we have to generate the code for both cases
7749 (in range and out of range). */
7750
7751 op0 = gen_label_rtx ();
7752 op1 = gen_label_rtx ();
7753
7754 if (! (GET_CODE (index_val) == CONST_INT
7755 && GET_CODE (lo_r) == CONST_INT))
7756 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7757 GET_MODE (index_val), iunsignedp, op1);
7758
7759 if (! (GET_CODE (index_val) == CONST_INT
7760 && GET_CODE (hi_r) == CONST_INT))
7761 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7762 GET_MODE (index_val), iunsignedp, op1);
7763
7764 /* Calculate the element number of bit zero in the first word
7765 of the set. */
7766 if (GET_CODE (lo_r) == CONST_INT)
7767 rlow = GEN_INT (INTVAL (lo_r)
7768 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7769 else
7770 rlow = expand_binop (index_mode, and_optab, lo_r,
7771 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7772 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7773
7774 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7775 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7776
7777 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7778 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7779 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7780 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7781
7782 addr = memory_address (byte_mode,
7783 expand_binop (index_mode, add_optab, diff,
7784 setaddr, NULL_RTX, iunsignedp,
7785 OPTAB_LIB_WIDEN));
7786
7787 /* Extract the bit we want to examine. */
7788 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7789 gen_rtx_MEM (byte_mode, addr),
7790 make_tree (TREE_TYPE (index), rem),
7791 NULL_RTX, 1);
7792 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7793 GET_MODE (target) == byte_mode ? target : 0,
7794 1, OPTAB_LIB_WIDEN);
7795
7796 if (result != target)
7797 convert_move (target, result, 1);
7798
7799 /* Output the code to handle the out-of-range case. */
7800 emit_jump (op0);
7801 emit_label (op1);
7802 emit_move_insn (target, const0_rtx);
7803 emit_label (op0);
7804 return target;
7805 }
7806
7807 case WITH_CLEANUP_EXPR:
7808 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7809 {
7810 WITH_CLEANUP_EXPR_RTL (exp)
7811 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7812 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7813 CLEANUP_EH_ONLY (exp));
7814
7815 /* That's it for this cleanup. */
7816 TREE_OPERAND (exp, 1) = 0;
7817 }
7818 return WITH_CLEANUP_EXPR_RTL (exp);
7819
7820 case CLEANUP_POINT_EXPR:
7821 {
7822 /* Start a new binding layer that will keep track of all cleanup
7823 actions to be performed. */
7824 expand_start_bindings (2);
7825
7826 target_temp_slot_level = temp_slot_level;
7827
7828 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7829 /* If we're going to use this value, load it up now. */
7830 if (! ignore)
7831 op0 = force_not_mem (op0);
7832 preserve_temp_slots (op0);
7833 expand_end_bindings (NULL_TREE, 0, 0);
7834 }
7835 return op0;
7836
7837 case CALL_EXPR:
7838 /* Check for a built-in function. */
7839 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7840 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7841 == FUNCTION_DECL)
7842 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7843 {
7844 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7845 == BUILT_IN_FRONTEND)
7846 return (*lang_hooks.expand_expr) (exp, original_target,
7847 tmode, modifier);
7848 else
7849 return expand_builtin (exp, target, subtarget, tmode, ignore);
7850 }
7851
7852 return expand_call (exp, target, ignore);
7853
7854 case NON_LVALUE_EXPR:
7855 case NOP_EXPR:
7856 case CONVERT_EXPR:
7857 case REFERENCE_EXPR:
7858 if (TREE_OPERAND (exp, 0) == error_mark_node)
7859 return const0_rtx;
7860
7861 if (TREE_CODE (type) == UNION_TYPE)
7862 {
7863 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7864
7865 /* If both input and output are BLKmode, this conversion isn't doing
7866 anything except possibly changing memory attribute. */
7867 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7868 {
7869 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7870 modifier);
7871
7872 result = copy_rtx (result);
7873 set_mem_attributes (result, exp, 0);
7874 return result;
7875 }
7876
7877 if (target == 0)
7878 target = assign_temp (type, 0, 1, 1);
7879
7880 if (GET_CODE (target) == MEM)
7881 /* Store data into beginning of memory target. */
7882 store_expr (TREE_OPERAND (exp, 0),
7883 adjust_address (target, TYPE_MODE (valtype), 0),
7884 modifier == EXPAND_STACK_PARM ? 2 : 0);
7885
7886 else if (GET_CODE (target) == REG)
7887 /* Store this field into a union of the proper type. */
7888 store_field (target,
7889 MIN ((int_size_in_bytes (TREE_TYPE
7890 (TREE_OPERAND (exp, 0)))
7891 * BITS_PER_UNIT),
7892 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7893 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7894 VOIDmode, 0, type, 0);
7895 else
7896 abort ();
7897
7898 /* Return the entire union. */
7899 return target;
7900 }
7901
7902 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7903 {
7904 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7905 modifier);
7906
7907 /* If the signedness of the conversion differs and OP0 is
7908 a promoted SUBREG, clear that indication since we now
7909 have to do the proper extension. */
7910 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7911 && GET_CODE (op0) == SUBREG)
7912 SUBREG_PROMOTED_VAR_P (op0) = 0;
7913
7914 return op0;
7915 }
7916
7917 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7918 if (GET_MODE (op0) == mode)
7919 return op0;
7920
7921 /* If OP0 is a constant, just convert it into the proper mode. */
7922 if (CONSTANT_P (op0))
7923 {
7924 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7925 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7926
7927 if (modifier == EXPAND_INITIALIZER)
7928 return simplify_gen_subreg (mode, op0, inner_mode,
7929 subreg_lowpart_offset (mode,
7930 inner_mode));
7931 else
7932 return convert_modes (mode, inner_mode, op0,
7933 TREE_UNSIGNED (inner_type));
7934 }
7935
7936 if (modifier == EXPAND_INITIALIZER)
7937 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7938
7939 if (target == 0)
7940 return
7941 convert_to_mode (mode, op0,
7942 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7943 else
7944 convert_move (target, op0,
7945 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7946 return target;
7947
7948 case VIEW_CONVERT_EXPR:
7949 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7950
7951 /* If the input and output modes are both the same, we are done.
7952 Otherwise, if neither mode is BLKmode and both are within a word, we
7953 can use gen_lowpart. If neither is true, make sure the operand is
7954 in memory and convert the MEM to the new mode. */
7955 if (TYPE_MODE (type) == GET_MODE (op0))
7956 ;
7957 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7958 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7959 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7960 op0 = gen_lowpart (TYPE_MODE (type), op0);
7961 else if (GET_CODE (op0) != MEM)
7962 {
7963 /* If the operand is not a MEM, force it into memory. Since we
7964 are going to be be changing the mode of the MEM, don't call
7965 force_const_mem for constants because we don't allow pool
7966 constants to change mode. */
7967 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7968
7969 if (TREE_ADDRESSABLE (exp))
7970 abort ();
7971
7972 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7973 target
7974 = assign_stack_temp_for_type
7975 (TYPE_MODE (inner_type),
7976 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7977
7978 emit_move_insn (target, op0);
7979 op0 = target;
7980 }
7981
7982 /* At this point, OP0 is in the correct mode. If the output type is such
7983 that the operand is known to be aligned, indicate that it is.
7984 Otherwise, we need only be concerned about alignment for non-BLKmode
7985 results. */
7986 if (GET_CODE (op0) == MEM)
7987 {
7988 op0 = copy_rtx (op0);
7989
7990 if (TYPE_ALIGN_OK (type))
7991 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7992 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7993 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7994 {
7995 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7996 HOST_WIDE_INT temp_size
7997 = MAX (int_size_in_bytes (inner_type),
7998 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7999 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
8000 temp_size, 0, type);
8001 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
8002
8003 if (TREE_ADDRESSABLE (exp))
8004 abort ();
8005
8006 if (GET_MODE (op0) == BLKmode)
8007 emit_block_move (new_with_op0_mode, op0,
8008 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8009 (modifier == EXPAND_STACK_PARM
8010 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8011 else
8012 emit_move_insn (new_with_op0_mode, op0);
8013
8014 op0 = new;
8015 }
8016
8017 op0 = adjust_address (op0, TYPE_MODE (type), 0);
8018 }
8019
8020 return op0;
8021
8022 case PLUS_EXPR:
8023 this_optab = ! unsignedp && flag_trapv
8024 && (GET_MODE_CLASS (mode) == MODE_INT)
8025 ? addv_optab : add_optab;
8026
8027 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
8028 something else, make sure we add the register to the constant and
8029 then to the other thing. This case can occur during strength
8030 reduction and doing it this way will produce better code if the
8031 frame pointer or argument pointer is eliminated.
8032
8033 fold-const.c will ensure that the constant is always in the inner
8034 PLUS_EXPR, so the only case we need to do anything about is if
8035 sp, ap, or fp is our second argument, in which case we must swap
8036 the innermost first argument and our second argument. */
8037
8038 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8039 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8040 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
8041 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8042 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8043 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8044 {
8045 tree t = TREE_OPERAND (exp, 1);
8046
8047 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8048 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8049 }
8050
8051 /* If the result is to be ptr_mode and we are adding an integer to
8052 something, we might be forming a constant. So try to use
8053 plus_constant. If it produces a sum and we can't accept it,
8054 use force_operand. This allows P = &ARR[const] to generate
8055 efficient code on machines where a SYMBOL_REF is not a valid
8056 address.
8057
8058 If this is an EXPAND_SUM call, always return the sum. */
8059 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8060 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8061 {
8062 if (modifier == EXPAND_STACK_PARM)
8063 target = 0;
8064 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8065 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8066 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8067 {
8068 rtx constant_part;
8069
8070 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8071 EXPAND_SUM);
8072 /* Use immed_double_const to ensure that the constant is
8073 truncated according to the mode of OP1, then sign extended
8074 to a HOST_WIDE_INT. Using the constant directly can result
8075 in non-canonical RTL in a 64x32 cross compile. */
8076 constant_part
8077 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8078 (HOST_WIDE_INT) 0,
8079 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8080 op1 = plus_constant (op1, INTVAL (constant_part));
8081 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8082 op1 = force_operand (op1, target);
8083 return op1;
8084 }
8085
8086 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8087 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
8088 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8089 {
8090 rtx constant_part;
8091
8092 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8093 (modifier == EXPAND_INITIALIZER
8094 ? EXPAND_INITIALIZER : EXPAND_SUM));
8095 if (! CONSTANT_P (op0))
8096 {
8097 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8098 VOIDmode, modifier);
8099 /* Don't go to both_summands if modifier
8100 says it's not right to return a PLUS. */
8101 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8102 goto binop2;
8103 goto both_summands;
8104 }
8105 /* Use immed_double_const to ensure that the constant is
8106 truncated according to the mode of OP1, then sign extended
8107 to a HOST_WIDE_INT. Using the constant directly can result
8108 in non-canonical RTL in a 64x32 cross compile. */
8109 constant_part
8110 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8111 (HOST_WIDE_INT) 0,
8112 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8113 op0 = plus_constant (op0, INTVAL (constant_part));
8114 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8115 op0 = force_operand (op0, target);
8116 return op0;
8117 }
8118 }
8119
8120 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8121 subtarget = 0;
8122
8123 /* No sense saving up arithmetic to be done
8124 if it's all in the wrong mode to form part of an address.
8125 And force_operand won't know whether to sign-extend or
8126 zero-extend. */
8127 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8128 || mode != ptr_mode)
8129 {
8130 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8131 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8132 if (op0 == const0_rtx)
8133 return op1;
8134 if (op1 == const0_rtx)
8135 return op0;
8136 goto binop2;
8137 }
8138
8139 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8140 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8141
8142 /* We come here from MINUS_EXPR when the second operand is a
8143 constant. */
8144 both_summands:
8145 /* Make sure any term that's a sum with a constant comes last. */
8146 if (GET_CODE (op0) == PLUS
8147 && CONSTANT_P (XEXP (op0, 1)))
8148 {
8149 temp = op0;
8150 op0 = op1;
8151 op1 = temp;
8152 }
8153 /* If adding to a sum including a constant,
8154 associate it to put the constant outside. */
8155 if (GET_CODE (op1) == PLUS
8156 && CONSTANT_P (XEXP (op1, 1)))
8157 {
8158 rtx constant_term = const0_rtx;
8159
8160 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
8161 if (temp != 0)
8162 op0 = temp;
8163 /* Ensure that MULT comes first if there is one. */
8164 else if (GET_CODE (op0) == MULT)
8165 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
8166 else
8167 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
8168
8169 /* Let's also eliminate constants from op0 if possible. */
8170 op0 = eliminate_constant_term (op0, &constant_term);
8171
8172 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8173 their sum should be a constant. Form it into OP1, since the
8174 result we want will then be OP0 + OP1. */
8175
8176 temp = simplify_binary_operation (PLUS, mode, constant_term,
8177 XEXP (op1, 1));
8178 if (temp != 0)
8179 op1 = temp;
8180 else
8181 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
8182 }
8183
8184 /* Put a constant term last and put a multiplication first. */
8185 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
8186 temp = op1, op1 = op0, op0 = temp;
8187
8188 temp = simplify_binary_operation (PLUS, mode, op0, op1);
8189 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
8190
8191 case MINUS_EXPR:
8192 /* For initializers, we are allowed to return a MINUS of two
8193 symbolic constants. Here we handle all cases when both operands
8194 are constant. */
8195 /* Handle difference of two symbolic constants,
8196 for the sake of an initializer. */
8197 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8198 && really_constant_p (TREE_OPERAND (exp, 0))
8199 && really_constant_p (TREE_OPERAND (exp, 1)))
8200 {
8201 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
8202 modifier);
8203 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8204 modifier);
8205
8206 /* If the last operand is a CONST_INT, use plus_constant of
8207 the negated constant. Else make the MINUS. */
8208 if (GET_CODE (op1) == CONST_INT)
8209 return plus_constant (op0, - INTVAL (op1));
8210 else
8211 return gen_rtx_MINUS (mode, op0, op1);
8212 }
8213
8214 this_optab = ! unsignedp && flag_trapv
8215 && (GET_MODE_CLASS(mode) == MODE_INT)
8216 ? subv_optab : sub_optab;
8217
8218 /* No sense saving up arithmetic to be done
8219 if it's all in the wrong mode to form part of an address.
8220 And force_operand won't know whether to sign-extend or
8221 zero-extend. */
8222 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8223 || mode != ptr_mode)
8224 goto binop;
8225
8226 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8227 subtarget = 0;
8228
8229 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8230 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8231
8232 /* Convert A - const to A + (-const). */
8233 if (GET_CODE (op1) == CONST_INT)
8234 {
8235 op1 = negate_rtx (mode, op1);
8236 goto both_summands;
8237 }
8238
8239 goto binop2;
8240
8241 case MULT_EXPR:
8242 /* If first operand is constant, swap them.
8243 Thus the following special case checks need only
8244 check the second operand. */
8245 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8246 {
8247 tree t1 = TREE_OPERAND (exp, 0);
8248 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8249 TREE_OPERAND (exp, 1) = t1;
8250 }
8251
8252 /* Attempt to return something suitable for generating an
8253 indexed address, for machines that support that. */
8254
8255 if (modifier == EXPAND_SUM && mode == ptr_mode
8256 && host_integerp (TREE_OPERAND (exp, 1), 0))
8257 {
8258 tree exp1 = TREE_OPERAND (exp, 1);
8259
8260 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8261 EXPAND_SUM);
8262
8263 /* If we knew for certain that this is arithmetic for an array
8264 reference, and we knew the bounds of the array, then we could
8265 apply the distributive law across (PLUS X C) for constant C.
8266 Without such knowledge, we risk overflowing the computation
8267 when both X and C are large, but X+C isn't. */
8268 /* ??? Could perhaps special-case EXP being unsigned and C being
8269 positive. In that case we are certain that X+C is no smaller
8270 than X and so the transformed expression will overflow iff the
8271 original would have. */
8272
8273 if (GET_CODE (op0) != REG)
8274 op0 = force_operand (op0, NULL_RTX);
8275 if (GET_CODE (op0) != REG)
8276 op0 = copy_to_mode_reg (mode, op0);
8277
8278 return gen_rtx_MULT (mode, op0,
8279 gen_int_mode (tree_low_cst (exp1, 0),
8280 TYPE_MODE (TREE_TYPE (exp1))));
8281 }
8282
8283 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8284 subtarget = 0;
8285
8286 if (modifier == EXPAND_STACK_PARM)
8287 target = 0;
8288
8289 /* Check for multiplying things that have been extended
8290 from a narrower type. If this machine supports multiplying
8291 in that narrower type with a result in the desired type,
8292 do it that way, and avoid the explicit type-conversion. */
8293 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8294 && TREE_CODE (type) == INTEGER_TYPE
8295 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8296 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8297 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8298 && int_fits_type_p (TREE_OPERAND (exp, 1),
8299 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8300 /* Don't use a widening multiply if a shift will do. */
8301 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8302 > HOST_BITS_PER_WIDE_INT)
8303 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8304 ||
8305 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8306 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8307 ==
8308 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8309 /* If both operands are extended, they must either both
8310 be zero-extended or both be sign-extended. */
8311 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8312 ==
8313 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8314 {
8315 enum machine_mode innermode
8316 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8317 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8318 ? smul_widen_optab : umul_widen_optab);
8319 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8320 ? umul_widen_optab : smul_widen_optab);
8321 if (mode == GET_MODE_WIDER_MODE (innermode))
8322 {
8323 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8324 {
8325 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8326 NULL_RTX, VOIDmode, 0);
8327 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8328 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8329 VOIDmode, 0);
8330 else
8331 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8332 NULL_RTX, VOIDmode, 0);
8333 goto binop2;
8334 }
8335 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8336 && innermode == word_mode)
8337 {
8338 rtx htem;
8339 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8340 NULL_RTX, VOIDmode, 0);
8341 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8342 op1 = convert_modes (innermode, mode,
8343 expand_expr (TREE_OPERAND (exp, 1),
8344 NULL_RTX, VOIDmode, 0),
8345 unsignedp);
8346 else
8347 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8348 NULL_RTX, VOIDmode, 0);
8349 temp = expand_binop (mode, other_optab, op0, op1, target,
8350 unsignedp, OPTAB_LIB_WIDEN);
8351 htem = expand_mult_highpart_adjust (innermode,
8352 gen_highpart (innermode, temp),
8353 op0, op1,
8354 gen_highpart (innermode, temp),
8355 unsignedp);
8356 emit_move_insn (gen_highpart (innermode, temp), htem);
8357 return temp;
8358 }
8359 }
8360 }
8361 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8362 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8363 return expand_mult (mode, op0, op1, target, unsignedp);
8364
8365 case TRUNC_DIV_EXPR:
8366 case FLOOR_DIV_EXPR:
8367 case CEIL_DIV_EXPR:
8368 case ROUND_DIV_EXPR:
8369 case EXACT_DIV_EXPR:
8370 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8371 subtarget = 0;
8372 if (modifier == EXPAND_STACK_PARM)
8373 target = 0;
8374 /* Possible optimization: compute the dividend with EXPAND_SUM
8375 then if the divisor is constant can optimize the case
8376 where some terms of the dividend have coeffs divisible by it. */
8377 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8378 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8379 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8380
8381 case RDIV_EXPR:
8382 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8383 expensive divide. If not, combine will rebuild the original
8384 computation. */
8385 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8386 && TREE_CODE (type) == REAL_TYPE
8387 && !real_onep (TREE_OPERAND (exp, 0)))
8388 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8389 build (RDIV_EXPR, type,
8390 build_real (type, dconst1),
8391 TREE_OPERAND (exp, 1))),
8392 target, tmode, modifier);
8393 this_optab = sdiv_optab;
8394 goto binop;
8395
8396 case TRUNC_MOD_EXPR:
8397 case FLOOR_MOD_EXPR:
8398 case CEIL_MOD_EXPR:
8399 case ROUND_MOD_EXPR:
8400 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8401 subtarget = 0;
8402 if (modifier == EXPAND_STACK_PARM)
8403 target = 0;
8404 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8405 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8406 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8407
8408 case FIX_ROUND_EXPR:
8409 case FIX_FLOOR_EXPR:
8410 case FIX_CEIL_EXPR:
8411 abort (); /* Not used for C. */
8412
8413 case FIX_TRUNC_EXPR:
8414 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8415 if (target == 0 || modifier == EXPAND_STACK_PARM)
8416 target = gen_reg_rtx (mode);
8417 expand_fix (target, op0, unsignedp);
8418 return target;
8419
8420 case FLOAT_EXPR:
8421 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8422 if (target == 0 || modifier == EXPAND_STACK_PARM)
8423 target = gen_reg_rtx (mode);
8424 /* expand_float can't figure out what to do if FROM has VOIDmode.
8425 So give it the correct mode. With -O, cse will optimize this. */
8426 if (GET_MODE (op0) == VOIDmode)
8427 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8428 op0);
8429 expand_float (target, op0,
8430 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8431 return target;
8432
8433 case NEGATE_EXPR:
8434 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8435 if (modifier == EXPAND_STACK_PARM)
8436 target = 0;
8437 temp = expand_unop (mode,
8438 ! unsignedp && flag_trapv
8439 && (GET_MODE_CLASS(mode) == MODE_INT)
8440 ? negv_optab : neg_optab, op0, target, 0);
8441 if (temp == 0)
8442 abort ();
8443 return temp;
8444
8445 case ABS_EXPR:
8446 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8447 if (modifier == EXPAND_STACK_PARM)
8448 target = 0;
8449
8450 /* Handle complex values specially. */
8451 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8452 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8453 return expand_complex_abs (mode, op0, target, unsignedp);
8454
8455 /* Unsigned abs is simply the operand. Testing here means we don't
8456 risk generating incorrect code below. */
8457 if (TREE_UNSIGNED (type))
8458 return op0;
8459
8460 return expand_abs (mode, op0, target, unsignedp,
8461 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8462
8463 case MAX_EXPR:
8464 case MIN_EXPR:
8465 target = original_target;
8466 if (target == 0
8467 || modifier == EXPAND_STACK_PARM
8468 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8469 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8470 || GET_MODE (target) != mode
8471 || (GET_CODE (target) == REG
8472 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8473 target = gen_reg_rtx (mode);
8474 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8475 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8476
8477 /* First try to do it with a special MIN or MAX instruction.
8478 If that does not win, use a conditional jump to select the proper
8479 value. */
8480 this_optab = (TREE_UNSIGNED (type)
8481 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8482 : (code == MIN_EXPR ? smin_optab : smax_optab));
8483
8484 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8485 OPTAB_WIDEN);
8486 if (temp != 0)
8487 return temp;
8488
8489 /* At this point, a MEM target is no longer useful; we will get better
8490 code without it. */
8491
8492 if (GET_CODE (target) == MEM)
8493 target = gen_reg_rtx (mode);
8494
8495 if (target != op0)
8496 emit_move_insn (target, op0);
8497
8498 op0 = gen_label_rtx ();
8499
8500 /* If this mode is an integer too wide to compare properly,
8501 compare word by word. Rely on cse to optimize constant cases. */
8502 if (GET_MODE_CLASS (mode) == MODE_INT
8503 && ! can_compare_p (GE, mode, ccp_jump))
8504 {
8505 if (code == MAX_EXPR)
8506 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8507 target, op1, NULL_RTX, op0);
8508 else
8509 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8510 op1, target, NULL_RTX, op0);
8511 }
8512 else
8513 {
8514 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8515 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8516 unsignedp, mode, NULL_RTX, NULL_RTX,
8517 op0);
8518 }
8519 emit_move_insn (target, op1);
8520 emit_label (op0);
8521 return target;
8522
8523 case BIT_NOT_EXPR:
8524 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8525 if (modifier == EXPAND_STACK_PARM)
8526 target = 0;
8527 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8528 if (temp == 0)
8529 abort ();
8530 return temp;
8531
8532 case FFS_EXPR:
8533 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8534 if (modifier == EXPAND_STACK_PARM)
8535 target = 0;
8536 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8537 if (temp == 0)
8538 abort ();
8539 return temp;
8540
8541 case CLZ_EXPR:
8542 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8543 temp = expand_unop (mode, clz_optab, op0, target, 1);
8544 if (temp == 0)
8545 abort ();
8546 return temp;
8547
8548 case CTZ_EXPR:
8549 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8550 temp = expand_unop (mode, ctz_optab, op0, target, 1);
8551 if (temp == 0)
8552 abort ();
8553 return temp;
8554
8555 case POPCOUNT_EXPR:
8556 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8557 temp = expand_unop (mode, popcount_optab, op0, target, 1);
8558 if (temp == 0)
8559 abort ();
8560 return temp;
8561
8562 case PARITY_EXPR:
8563 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8564 temp = expand_unop (mode, parity_optab, op0, target, 1);
8565 if (temp == 0)
8566 abort ();
8567 return temp;
8568
8569 /* ??? Can optimize bitwise operations with one arg constant.
8570 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8571 and (a bitwise1 b) bitwise2 b (etc)
8572 but that is probably not worth while. */
8573
8574 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8575 boolean values when we want in all cases to compute both of them. In
8576 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8577 as actual zero-or-1 values and then bitwise anding. In cases where
8578 there cannot be any side effects, better code would be made by
8579 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8580 how to recognize those cases. */
8581
8582 case TRUTH_AND_EXPR:
8583 case BIT_AND_EXPR:
8584 this_optab = and_optab;
8585 goto binop;
8586
8587 case TRUTH_OR_EXPR:
8588 case BIT_IOR_EXPR:
8589 this_optab = ior_optab;
8590 goto binop;
8591
8592 case TRUTH_XOR_EXPR:
8593 case BIT_XOR_EXPR:
8594 this_optab = xor_optab;
8595 goto binop;
8596
8597 case LSHIFT_EXPR:
8598 case RSHIFT_EXPR:
8599 case LROTATE_EXPR:
8600 case RROTATE_EXPR:
8601 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8602 subtarget = 0;
8603 if (modifier == EXPAND_STACK_PARM)
8604 target = 0;
8605 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8606 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8607 unsignedp);
8608
8609 /* Could determine the answer when only additive constants differ. Also,
8610 the addition of one can be handled by changing the condition. */
8611 case LT_EXPR:
8612 case LE_EXPR:
8613 case GT_EXPR:
8614 case GE_EXPR:
8615 case EQ_EXPR:
8616 case NE_EXPR:
8617 case UNORDERED_EXPR:
8618 case ORDERED_EXPR:
8619 case UNLT_EXPR:
8620 case UNLE_EXPR:
8621 case UNGT_EXPR:
8622 case UNGE_EXPR:
8623 case UNEQ_EXPR:
8624 temp = do_store_flag (exp,
8625 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8626 tmode != VOIDmode ? tmode : mode, 0);
8627 if (temp != 0)
8628 return temp;
8629
8630 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8631 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8632 && original_target
8633 && GET_CODE (original_target) == REG
8634 && (GET_MODE (original_target)
8635 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8636 {
8637 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8638 VOIDmode, 0);
8639
8640 /* If temp is constant, we can just compute the result. */
8641 if (GET_CODE (temp) == CONST_INT)
8642 {
8643 if (INTVAL (temp) != 0)
8644 emit_move_insn (target, const1_rtx);
8645 else
8646 emit_move_insn (target, const0_rtx);
8647
8648 return target;
8649 }
8650
8651 if (temp != original_target)
8652 {
8653 enum machine_mode mode1 = GET_MODE (temp);
8654 if (mode1 == VOIDmode)
8655 mode1 = tmode != VOIDmode ? tmode : mode;
8656
8657 temp = copy_to_mode_reg (mode1, temp);
8658 }
8659
8660 op1 = gen_label_rtx ();
8661 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8662 GET_MODE (temp), unsignedp, op1);
8663 emit_move_insn (temp, const1_rtx);
8664 emit_label (op1);
8665 return temp;
8666 }
8667
8668 /* If no set-flag instruction, must generate a conditional
8669 store into a temporary variable. Drop through
8670 and handle this like && and ||. */
8671
8672 case TRUTH_ANDIF_EXPR:
8673 case TRUTH_ORIF_EXPR:
8674 if (! ignore
8675 && (target == 0
8676 || modifier == EXPAND_STACK_PARM
8677 || ! safe_from_p (target, exp, 1)
8678 /* Make sure we don't have a hard reg (such as function's return
8679 value) live across basic blocks, if not optimizing. */
8680 || (!optimize && GET_CODE (target) == REG
8681 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8682 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8683
8684 if (target)
8685 emit_clr_insn (target);
8686
8687 op1 = gen_label_rtx ();
8688 jumpifnot (exp, op1);
8689
8690 if (target)
8691 emit_0_to_1_insn (target);
8692
8693 emit_label (op1);
8694 return ignore ? const0_rtx : target;
8695
8696 case TRUTH_NOT_EXPR:
8697 if (modifier == EXPAND_STACK_PARM)
8698 target = 0;
8699 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8700 /* The parser is careful to generate TRUTH_NOT_EXPR
8701 only with operands that are always zero or one. */
8702 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8703 target, 1, OPTAB_LIB_WIDEN);
8704 if (temp == 0)
8705 abort ();
8706 return temp;
8707
8708 case COMPOUND_EXPR:
8709 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8710 emit_queue ();
8711 return expand_expr (TREE_OPERAND (exp, 1),
8712 (ignore ? const0_rtx : target),
8713 VOIDmode, modifier);
8714
8715 case COND_EXPR:
8716 /* If we would have a "singleton" (see below) were it not for a
8717 conversion in each arm, bring that conversion back out. */
8718 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8719 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8720 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8721 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8722 {
8723 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8724 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8725
8726 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8727 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8728 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8729 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8730 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8731 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8732 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8733 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8734 return expand_expr (build1 (NOP_EXPR, type,
8735 build (COND_EXPR, TREE_TYPE (iftrue),
8736 TREE_OPERAND (exp, 0),
8737 iftrue, iffalse)),
8738 target, tmode, modifier);
8739 }
8740
8741 {
8742 /* Note that COND_EXPRs whose type is a structure or union
8743 are required to be constructed to contain assignments of
8744 a temporary variable, so that we can evaluate them here
8745 for side effect only. If type is void, we must do likewise. */
8746
8747 /* If an arm of the branch requires a cleanup,
8748 only that cleanup is performed. */
8749
8750 tree singleton = 0;
8751 tree binary_op = 0, unary_op = 0;
8752
8753 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8754 convert it to our mode, if necessary. */
8755 if (integer_onep (TREE_OPERAND (exp, 1))
8756 && integer_zerop (TREE_OPERAND (exp, 2))
8757 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8758 {
8759 if (ignore)
8760 {
8761 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8762 modifier);
8763 return const0_rtx;
8764 }
8765
8766 if (modifier == EXPAND_STACK_PARM)
8767 target = 0;
8768 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8769 if (GET_MODE (op0) == mode)
8770 return op0;
8771
8772 if (target == 0)
8773 target = gen_reg_rtx (mode);
8774 convert_move (target, op0, unsignedp);
8775 return target;
8776 }
8777
8778 /* Check for X ? A + B : A. If we have this, we can copy A to the
8779 output and conditionally add B. Similarly for unary operations.
8780 Don't do this if X has side-effects because those side effects
8781 might affect A or B and the "?" operation is a sequence point in
8782 ANSI. (operand_equal_p tests for side effects.) */
8783
8784 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8785 && operand_equal_p (TREE_OPERAND (exp, 2),
8786 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8787 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8788 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8789 && operand_equal_p (TREE_OPERAND (exp, 1),
8790 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8791 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8792 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8793 && operand_equal_p (TREE_OPERAND (exp, 2),
8794 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8795 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8796 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8797 && operand_equal_p (TREE_OPERAND (exp, 1),
8798 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8799 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8800
8801 /* If we are not to produce a result, we have no target. Otherwise,
8802 if a target was specified use it; it will not be used as an
8803 intermediate target unless it is safe. If no target, use a
8804 temporary. */
8805
8806 if (ignore)
8807 temp = 0;
8808 else if (modifier == EXPAND_STACK_PARM)
8809 temp = assign_temp (type, 0, 0, 1);
8810 else if (original_target
8811 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8812 || (singleton && GET_CODE (original_target) == REG
8813 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8814 && original_target == var_rtx (singleton)))
8815 && GET_MODE (original_target) == mode
8816 #ifdef HAVE_conditional_move
8817 && (! can_conditionally_move_p (mode)
8818 || GET_CODE (original_target) == REG
8819 || TREE_ADDRESSABLE (type))
8820 #endif
8821 && (GET_CODE (original_target) != MEM
8822 || TREE_ADDRESSABLE (type)))
8823 temp = original_target;
8824 else if (TREE_ADDRESSABLE (type))
8825 abort ();
8826 else
8827 temp = assign_temp (type, 0, 0, 1);
8828
8829 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8830 do the test of X as a store-flag operation, do this as
8831 A + ((X != 0) << log C). Similarly for other simple binary
8832 operators. Only do for C == 1 if BRANCH_COST is low. */
8833 if (temp && singleton && binary_op
8834 && (TREE_CODE (binary_op) == PLUS_EXPR
8835 || TREE_CODE (binary_op) == MINUS_EXPR
8836 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8837 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8838 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8839 : integer_onep (TREE_OPERAND (binary_op, 1)))
8840 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8841 {
8842 rtx result;
8843 tree cond;
8844 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8845 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8846 ? addv_optab : add_optab)
8847 : TREE_CODE (binary_op) == MINUS_EXPR
8848 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8849 ? subv_optab : sub_optab)
8850 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8851 : xor_optab);
8852
8853 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8854 if (singleton == TREE_OPERAND (exp, 1))
8855 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8856 else
8857 cond = TREE_OPERAND (exp, 0);
8858
8859 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8860 ? temp : NULL_RTX),
8861 mode, BRANCH_COST <= 1);
8862
8863 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8864 result = expand_shift (LSHIFT_EXPR, mode, result,
8865 build_int_2 (tree_log2
8866 (TREE_OPERAND
8867 (binary_op, 1)),
8868 0),
8869 (safe_from_p (temp, singleton, 1)
8870 ? temp : NULL_RTX), 0);
8871
8872 if (result)
8873 {
8874 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8875 return expand_binop (mode, boptab, op1, result, temp,
8876 unsignedp, OPTAB_LIB_WIDEN);
8877 }
8878 }
8879
8880 do_pending_stack_adjust ();
8881 NO_DEFER_POP;
8882 op0 = gen_label_rtx ();
8883
8884 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8885 {
8886 if (temp != 0)
8887 {
8888 /* If the target conflicts with the other operand of the
8889 binary op, we can't use it. Also, we can't use the target
8890 if it is a hard register, because evaluating the condition
8891 might clobber it. */
8892 if ((binary_op
8893 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8894 || (GET_CODE (temp) == REG
8895 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8896 temp = gen_reg_rtx (mode);
8897 store_expr (singleton, temp,
8898 modifier == EXPAND_STACK_PARM ? 2 : 0);
8899 }
8900 else
8901 expand_expr (singleton,
8902 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8903 if (singleton == TREE_OPERAND (exp, 1))
8904 jumpif (TREE_OPERAND (exp, 0), op0);
8905 else
8906 jumpifnot (TREE_OPERAND (exp, 0), op0);
8907
8908 start_cleanup_deferral ();
8909 if (binary_op && temp == 0)
8910 /* Just touch the other operand. */
8911 expand_expr (TREE_OPERAND (binary_op, 1),
8912 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8913 else if (binary_op)
8914 store_expr (build (TREE_CODE (binary_op), type,
8915 make_tree (type, temp),
8916 TREE_OPERAND (binary_op, 1)),
8917 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8918 else
8919 store_expr (build1 (TREE_CODE (unary_op), type,
8920 make_tree (type, temp)),
8921 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8922 op1 = op0;
8923 }
8924 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8925 comparison operator. If we have one of these cases, set the
8926 output to A, branch on A (cse will merge these two references),
8927 then set the output to FOO. */
8928 else if (temp
8929 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8930 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8931 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8932 TREE_OPERAND (exp, 1), 0)
8933 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8934 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8935 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8936 {
8937 if (GET_CODE (temp) == REG
8938 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8939 temp = gen_reg_rtx (mode);
8940 store_expr (TREE_OPERAND (exp, 1), temp,
8941 modifier == EXPAND_STACK_PARM ? 2 : 0);
8942 jumpif (TREE_OPERAND (exp, 0), op0);
8943
8944 start_cleanup_deferral ();
8945 store_expr (TREE_OPERAND (exp, 2), temp,
8946 modifier == EXPAND_STACK_PARM ? 2 : 0);
8947 op1 = op0;
8948 }
8949 else if (temp
8950 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8951 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8952 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8953 TREE_OPERAND (exp, 2), 0)
8954 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8955 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8956 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8957 {
8958 if (GET_CODE (temp) == REG
8959 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8960 temp = gen_reg_rtx (mode);
8961 store_expr (TREE_OPERAND (exp, 2), temp,
8962 modifier == EXPAND_STACK_PARM ? 2 : 0);
8963 jumpifnot (TREE_OPERAND (exp, 0), op0);
8964
8965 start_cleanup_deferral ();
8966 store_expr (TREE_OPERAND (exp, 1), temp,
8967 modifier == EXPAND_STACK_PARM ? 2 : 0);
8968 op1 = op0;
8969 }
8970 else
8971 {
8972 op1 = gen_label_rtx ();
8973 jumpifnot (TREE_OPERAND (exp, 0), op0);
8974
8975 start_cleanup_deferral ();
8976
8977 /* One branch of the cond can be void, if it never returns. For
8978 example A ? throw : E */
8979 if (temp != 0
8980 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8981 store_expr (TREE_OPERAND (exp, 1), temp,
8982 modifier == EXPAND_STACK_PARM ? 2 : 0);
8983 else
8984 expand_expr (TREE_OPERAND (exp, 1),
8985 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8986 end_cleanup_deferral ();
8987 emit_queue ();
8988 emit_jump_insn (gen_jump (op1));
8989 emit_barrier ();
8990 emit_label (op0);
8991 start_cleanup_deferral ();
8992 if (temp != 0
8993 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8994 store_expr (TREE_OPERAND (exp, 2), temp,
8995 modifier == EXPAND_STACK_PARM ? 2 : 0);
8996 else
8997 expand_expr (TREE_OPERAND (exp, 2),
8998 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8999 }
9000
9001 end_cleanup_deferral ();
9002
9003 emit_queue ();
9004 emit_label (op1);
9005 OK_DEFER_POP;
9006
9007 return temp;
9008 }
9009
9010 case TARGET_EXPR:
9011 {
9012 /* Something needs to be initialized, but we didn't know
9013 where that thing was when building the tree. For example,
9014 it could be the return value of a function, or a parameter
9015 to a function which lays down in the stack, or a temporary
9016 variable which must be passed by reference.
9017
9018 We guarantee that the expression will either be constructed
9019 or copied into our original target. */
9020
9021 tree slot = TREE_OPERAND (exp, 0);
9022 tree cleanups = NULL_TREE;
9023 tree exp1;
9024
9025 if (TREE_CODE (slot) != VAR_DECL)
9026 abort ();
9027
9028 if (! ignore)
9029 target = original_target;
9030
9031 /* Set this here so that if we get a target that refers to a
9032 register variable that's already been used, put_reg_into_stack
9033 knows that it should fix up those uses. */
9034 TREE_USED (slot) = 1;
9035
9036 if (target == 0)
9037 {
9038 if (DECL_RTL_SET_P (slot))
9039 {
9040 target = DECL_RTL (slot);
9041 /* If we have already expanded the slot, so don't do
9042 it again. (mrs) */
9043 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9044 return target;
9045 }
9046 else
9047 {
9048 target = assign_temp (type, 2, 0, 1);
9049 /* All temp slots at this level must not conflict. */
9050 preserve_temp_slots (target);
9051 SET_DECL_RTL (slot, target);
9052 if (TREE_ADDRESSABLE (slot))
9053 put_var_into_stack (slot, /*rescan=*/false);
9054
9055 /* Since SLOT is not known to the called function
9056 to belong to its stack frame, we must build an explicit
9057 cleanup. This case occurs when we must build up a reference
9058 to pass the reference as an argument. In this case,
9059 it is very likely that such a reference need not be
9060 built here. */
9061
9062 if (TREE_OPERAND (exp, 2) == 0)
9063 TREE_OPERAND (exp, 2)
9064 = (*lang_hooks.maybe_build_cleanup) (slot);
9065 cleanups = TREE_OPERAND (exp, 2);
9066 }
9067 }
9068 else
9069 {
9070 /* This case does occur, when expanding a parameter which
9071 needs to be constructed on the stack. The target
9072 is the actual stack address that we want to initialize.
9073 The function we call will perform the cleanup in this case. */
9074
9075 /* If we have already assigned it space, use that space,
9076 not target that we were passed in, as our target
9077 parameter is only a hint. */
9078 if (DECL_RTL_SET_P (slot))
9079 {
9080 target = DECL_RTL (slot);
9081 /* If we have already expanded the slot, so don't do
9082 it again. (mrs) */
9083 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9084 return target;
9085 }
9086 else
9087 {
9088 SET_DECL_RTL (slot, target);
9089 /* If we must have an addressable slot, then make sure that
9090 the RTL that we just stored in slot is OK. */
9091 if (TREE_ADDRESSABLE (slot))
9092 put_var_into_stack (slot, /*rescan=*/true);
9093 }
9094 }
9095
9096 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
9097 /* Mark it as expanded. */
9098 TREE_OPERAND (exp, 1) = NULL_TREE;
9099
9100 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
9101
9102 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
9103
9104 return target;
9105 }
9106
9107 case INIT_EXPR:
9108 {
9109 tree lhs = TREE_OPERAND (exp, 0);
9110 tree rhs = TREE_OPERAND (exp, 1);
9111
9112 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9113 return temp;
9114 }
9115
9116 case MODIFY_EXPR:
9117 {
9118 /* If lhs is complex, expand calls in rhs before computing it.
9119 That's so we don't compute a pointer and save it over a
9120 call. If lhs is simple, compute it first so we can give it
9121 as a target if the rhs is just a call. This avoids an
9122 extra temp and copy and that prevents a partial-subsumption
9123 which makes bad code. Actually we could treat
9124 component_ref's of vars like vars. */
9125
9126 tree lhs = TREE_OPERAND (exp, 0);
9127 tree rhs = TREE_OPERAND (exp, 1);
9128
9129 temp = 0;
9130
9131 /* Check for |= or &= of a bitfield of size one into another bitfield
9132 of size 1. In this case, (unless we need the result of the
9133 assignment) we can do this more efficiently with a
9134 test followed by an assignment, if necessary.
9135
9136 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9137 things change so we do, this code should be enhanced to
9138 support it. */
9139 if (ignore
9140 && TREE_CODE (lhs) == COMPONENT_REF
9141 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9142 || TREE_CODE (rhs) == BIT_AND_EXPR)
9143 && TREE_OPERAND (rhs, 0) == lhs
9144 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9145 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9146 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9147 {
9148 rtx label = gen_label_rtx ();
9149
9150 do_jump (TREE_OPERAND (rhs, 1),
9151 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
9152 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
9153 expand_assignment (lhs, convert (TREE_TYPE (rhs),
9154 (TREE_CODE (rhs) == BIT_IOR_EXPR
9155 ? integer_one_node
9156 : integer_zero_node)),
9157 0, 0);
9158 do_pending_stack_adjust ();
9159 emit_label (label);
9160 return const0_rtx;
9161 }
9162
9163 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9164
9165 return temp;
9166 }
9167
9168 case RETURN_EXPR:
9169 if (!TREE_OPERAND (exp, 0))
9170 expand_null_return ();
9171 else
9172 expand_return (TREE_OPERAND (exp, 0));
9173 return const0_rtx;
9174
9175 case PREINCREMENT_EXPR:
9176 case PREDECREMENT_EXPR:
9177 return expand_increment (exp, 0, ignore);
9178
9179 case POSTINCREMENT_EXPR:
9180 case POSTDECREMENT_EXPR:
9181 /* Faster to treat as pre-increment if result is not used. */
9182 return expand_increment (exp, ! ignore, ignore);
9183
9184 case ADDR_EXPR:
9185 if (modifier == EXPAND_STACK_PARM)
9186 target = 0;
9187 /* Are we taking the address of a nested function? */
9188 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
9189 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
9190 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
9191 && ! TREE_STATIC (exp))
9192 {
9193 op0 = trampoline_address (TREE_OPERAND (exp, 0));
9194 op0 = force_operand (op0, target);
9195 }
9196 /* If we are taking the address of something erroneous, just
9197 return a zero. */
9198 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9199 return const0_rtx;
9200 /* If we are taking the address of a constant and are at the
9201 top level, we have to use output_constant_def since we can't
9202 call force_const_mem at top level. */
9203 else if (cfun == 0
9204 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9205 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9206 == 'c')))
9207 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
9208 else
9209 {
9210 /* We make sure to pass const0_rtx down if we came in with
9211 ignore set, to avoid doing the cleanups twice for something. */
9212 op0 = expand_expr (TREE_OPERAND (exp, 0),
9213 ignore ? const0_rtx : NULL_RTX, VOIDmode,
9214 (modifier == EXPAND_INITIALIZER
9215 ? modifier : EXPAND_CONST_ADDRESS));
9216
9217 /* If we are going to ignore the result, OP0 will have been set
9218 to const0_rtx, so just return it. Don't get confused and
9219 think we are taking the address of the constant. */
9220 if (ignore)
9221 return op0;
9222
9223 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9224 clever and returns a REG when given a MEM. */
9225 op0 = protect_from_queue (op0, 1);
9226
9227 /* We would like the object in memory. If it is a constant, we can
9228 have it be statically allocated into memory. For a non-constant,
9229 we need to allocate some memory and store the value into it. */
9230
9231 if (CONSTANT_P (op0))
9232 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9233 op0);
9234 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9235 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9236 || GET_CODE (op0) == PARALLEL)
9237 {
9238 /* If the operand is a SAVE_EXPR, we can deal with this by
9239 forcing the SAVE_EXPR into memory. */
9240 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9241 {
9242 put_var_into_stack (TREE_OPERAND (exp, 0),
9243 /*rescan=*/true);
9244 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9245 }
9246 else
9247 {
9248 /* If this object is in a register, it can't be BLKmode. */
9249 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9250 rtx memloc = assign_temp (inner_type, 1, 1, 1);
9251
9252 if (GET_CODE (op0) == PARALLEL)
9253 /* Handle calls that pass values in multiple
9254 non-contiguous locations. The Irix 6 ABI has examples
9255 of this. */
9256 emit_group_store (memloc, op0,
9257 int_size_in_bytes (inner_type));
9258 else
9259 emit_move_insn (memloc, op0);
9260
9261 op0 = memloc;
9262 }
9263 }
9264
9265 if (GET_CODE (op0) != MEM)
9266 abort ();
9267
9268 mark_temp_addr_taken (op0);
9269 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9270 {
9271 op0 = XEXP (op0, 0);
9272 #ifdef POINTERS_EXTEND_UNSIGNED
9273 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9274 && mode == ptr_mode)
9275 op0 = convert_memory_address (ptr_mode, op0);
9276 #endif
9277 return op0;
9278 }
9279
9280 /* If OP0 is not aligned as least as much as the type requires, we
9281 need to make a temporary, copy OP0 to it, and take the address of
9282 the temporary. We want to use the alignment of the type, not of
9283 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9284 the test for BLKmode means that can't happen. The test for
9285 BLKmode is because we never make mis-aligned MEMs with
9286 non-BLKmode.
9287
9288 We don't need to do this at all if the machine doesn't have
9289 strict alignment. */
9290 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9291 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9292 > MEM_ALIGN (op0))
9293 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9294 {
9295 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9296 rtx new;
9297
9298 if (TYPE_ALIGN_OK (inner_type))
9299 abort ();
9300
9301 if (TREE_ADDRESSABLE (inner_type))
9302 {
9303 /* We can't make a bitwise copy of this object, so fail. */
9304 error ("cannot take the address of an unaligned member");
9305 return const0_rtx;
9306 }
9307
9308 new = assign_stack_temp_for_type
9309 (TYPE_MODE (inner_type),
9310 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9311 : int_size_in_bytes (inner_type),
9312 1, build_qualified_type (inner_type,
9313 (TYPE_QUALS (inner_type)
9314 | TYPE_QUAL_CONST)));
9315
9316 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9317 (modifier == EXPAND_STACK_PARM
9318 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9319
9320 op0 = new;
9321 }
9322
9323 op0 = force_operand (XEXP (op0, 0), target);
9324 }
9325
9326 if (flag_force_addr
9327 && GET_CODE (op0) != REG
9328 && modifier != EXPAND_CONST_ADDRESS
9329 && modifier != EXPAND_INITIALIZER
9330 && modifier != EXPAND_SUM)
9331 op0 = force_reg (Pmode, op0);
9332
9333 if (GET_CODE (op0) == REG
9334 && ! REG_USERVAR_P (op0))
9335 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9336
9337 #ifdef POINTERS_EXTEND_UNSIGNED
9338 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9339 && mode == ptr_mode)
9340 op0 = convert_memory_address (ptr_mode, op0);
9341 #endif
9342
9343 return op0;
9344
9345 case ENTRY_VALUE_EXPR:
9346 abort ();
9347
9348 /* COMPLEX type for Extended Pascal & Fortran */
9349 case COMPLEX_EXPR:
9350 {
9351 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9352 rtx insns;
9353
9354 /* Get the rtx code of the operands. */
9355 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9356 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9357
9358 if (! target)
9359 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9360
9361 start_sequence ();
9362
9363 /* Move the real (op0) and imaginary (op1) parts to their location. */
9364 emit_move_insn (gen_realpart (mode, target), op0);
9365 emit_move_insn (gen_imagpart (mode, target), op1);
9366
9367 insns = get_insns ();
9368 end_sequence ();
9369
9370 /* Complex construction should appear as a single unit. */
9371 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9372 each with a separate pseudo as destination.
9373 It's not correct for flow to treat them as a unit. */
9374 if (GET_CODE (target) != CONCAT)
9375 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9376 else
9377 emit_insn (insns);
9378
9379 return target;
9380 }
9381
9382 case REALPART_EXPR:
9383 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9384 return gen_realpart (mode, op0);
9385
9386 case IMAGPART_EXPR:
9387 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9388 return gen_imagpart (mode, op0);
9389
9390 case CONJ_EXPR:
9391 {
9392 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9393 rtx imag_t;
9394 rtx insns;
9395
9396 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9397
9398 if (! target)
9399 target = gen_reg_rtx (mode);
9400
9401 start_sequence ();
9402
9403 /* Store the realpart and the negated imagpart to target. */
9404 emit_move_insn (gen_realpart (partmode, target),
9405 gen_realpart (partmode, op0));
9406
9407 imag_t = gen_imagpart (partmode, target);
9408 temp = expand_unop (partmode,
9409 ! unsignedp && flag_trapv
9410 && (GET_MODE_CLASS(partmode) == MODE_INT)
9411 ? negv_optab : neg_optab,
9412 gen_imagpart (partmode, op0), imag_t, 0);
9413 if (temp != imag_t)
9414 emit_move_insn (imag_t, temp);
9415
9416 insns = get_insns ();
9417 end_sequence ();
9418
9419 /* Conjugate should appear as a single unit
9420 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9421 each with a separate pseudo as destination.
9422 It's not correct for flow to treat them as a unit. */
9423 if (GET_CODE (target) != CONCAT)
9424 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9425 else
9426 emit_insn (insns);
9427
9428 return target;
9429 }
9430
9431 case TRY_CATCH_EXPR:
9432 {
9433 tree handler = TREE_OPERAND (exp, 1);
9434
9435 expand_eh_region_start ();
9436
9437 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9438
9439 expand_eh_region_end_cleanup (handler);
9440
9441 return op0;
9442 }
9443
9444 case TRY_FINALLY_EXPR:
9445 {
9446 tree try_block = TREE_OPERAND (exp, 0);
9447 tree finally_block = TREE_OPERAND (exp, 1);
9448
9449 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9450 {
9451 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9452 is not sufficient, so we cannot expand the block twice.
9453 So we play games with GOTO_SUBROUTINE_EXPR to let us
9454 expand the thing only once. */
9455 /* When not optimizing, we go ahead with this form since
9456 (1) user breakpoints operate more predictably without
9457 code duplication, and
9458 (2) we're not running any of the global optimizers
9459 that would explode in time/space with the highly
9460 connected CFG created by the indirect branching. */
9461
9462 rtx finally_label = gen_label_rtx ();
9463 rtx done_label = gen_label_rtx ();
9464 rtx return_link = gen_reg_rtx (Pmode);
9465 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9466 (tree) finally_label, (tree) return_link);
9467 TREE_SIDE_EFFECTS (cleanup) = 1;
9468
9469 /* Start a new binding layer that will keep track of all cleanup
9470 actions to be performed. */
9471 expand_start_bindings (2);
9472 target_temp_slot_level = temp_slot_level;
9473
9474 expand_decl_cleanup (NULL_TREE, cleanup);
9475 op0 = expand_expr (try_block, target, tmode, modifier);
9476
9477 preserve_temp_slots (op0);
9478 expand_end_bindings (NULL_TREE, 0, 0);
9479 emit_jump (done_label);
9480 emit_label (finally_label);
9481 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9482 emit_indirect_jump (return_link);
9483 emit_label (done_label);
9484 }
9485 else
9486 {
9487 expand_start_bindings (2);
9488 target_temp_slot_level = temp_slot_level;
9489
9490 expand_decl_cleanup (NULL_TREE, finally_block);
9491 op0 = expand_expr (try_block, target, tmode, modifier);
9492
9493 preserve_temp_slots (op0);
9494 expand_end_bindings (NULL_TREE, 0, 0);
9495 }
9496
9497 return op0;
9498 }
9499
9500 case GOTO_SUBROUTINE_EXPR:
9501 {
9502 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9503 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9504 rtx return_address = gen_label_rtx ();
9505 emit_move_insn (return_link,
9506 gen_rtx_LABEL_REF (Pmode, return_address));
9507 emit_jump (subr);
9508 emit_label (return_address);
9509 return const0_rtx;
9510 }
9511
9512 case VA_ARG_EXPR:
9513 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9514
9515 case EXC_PTR_EXPR:
9516 return get_exception_pointer (cfun);
9517
9518 case FDESC_EXPR:
9519 /* Function descriptors are not valid except for as
9520 initialization constants, and should not be expanded. */
9521 abort ();
9522
9523 default:
9524 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9525 }
9526
9527 /* Here to do an ordinary binary operator, generating an instruction
9528 from the optab already placed in `this_optab'. */
9529 binop:
9530 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9531 subtarget = 0;
9532 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9533 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9534 binop2:
9535 if (modifier == EXPAND_STACK_PARM)
9536 target = 0;
9537 temp = expand_binop (mode, this_optab, op0, op1, target,
9538 unsignedp, OPTAB_LIB_WIDEN);
9539 if (temp == 0)
9540 abort ();
9541 return temp;
9542 }
9543 \f
9544 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9545 when applied to the address of EXP produces an address known to be
9546 aligned more than BIGGEST_ALIGNMENT. */
9547
9548 static int
9549 is_aligning_offset (offset, exp)
9550 tree offset;
9551 tree exp;
9552 {
9553 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9554 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9555 || TREE_CODE (offset) == NOP_EXPR
9556 || TREE_CODE (offset) == CONVERT_EXPR
9557 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9558 offset = TREE_OPERAND (offset, 0);
9559
9560 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9561 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9562 if (TREE_CODE (offset) != BIT_AND_EXPR
9563 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9564 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9565 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9566 return 0;
9567
9568 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9569 It must be NEGATE_EXPR. Then strip any more conversions. */
9570 offset = TREE_OPERAND (offset, 0);
9571 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9572 || TREE_CODE (offset) == NOP_EXPR
9573 || TREE_CODE (offset) == CONVERT_EXPR)
9574 offset = TREE_OPERAND (offset, 0);
9575
9576 if (TREE_CODE (offset) != NEGATE_EXPR)
9577 return 0;
9578
9579 offset = TREE_OPERAND (offset, 0);
9580 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9581 || TREE_CODE (offset) == NOP_EXPR
9582 || TREE_CODE (offset) == CONVERT_EXPR)
9583 offset = TREE_OPERAND (offset, 0);
9584
9585 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9586 whose type is the same as EXP. */
9587 return (TREE_CODE (offset) == ADDR_EXPR
9588 && (TREE_OPERAND (offset, 0) == exp
9589 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9590 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9591 == TREE_TYPE (exp)))));
9592 }
9593 \f
9594 /* Return the tree node if an ARG corresponds to a string constant or zero
9595 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9596 in bytes within the string that ARG is accessing. The type of the
9597 offset will be `sizetype'. */
9598
9599 tree
9600 string_constant (arg, ptr_offset)
9601 tree arg;
9602 tree *ptr_offset;
9603 {
9604 STRIP_NOPS (arg);
9605
9606 if (TREE_CODE (arg) == ADDR_EXPR
9607 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9608 {
9609 *ptr_offset = size_zero_node;
9610 return TREE_OPERAND (arg, 0);
9611 }
9612 else if (TREE_CODE (arg) == PLUS_EXPR)
9613 {
9614 tree arg0 = TREE_OPERAND (arg, 0);
9615 tree arg1 = TREE_OPERAND (arg, 1);
9616
9617 STRIP_NOPS (arg0);
9618 STRIP_NOPS (arg1);
9619
9620 if (TREE_CODE (arg0) == ADDR_EXPR
9621 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9622 {
9623 *ptr_offset = convert (sizetype, arg1);
9624 return TREE_OPERAND (arg0, 0);
9625 }
9626 else if (TREE_CODE (arg1) == ADDR_EXPR
9627 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9628 {
9629 *ptr_offset = convert (sizetype, arg0);
9630 return TREE_OPERAND (arg1, 0);
9631 }
9632 }
9633
9634 return 0;
9635 }
9636 \f
9637 /* Expand code for a post- or pre- increment or decrement
9638 and return the RTX for the result.
9639 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9640
9641 static rtx
9642 expand_increment (exp, post, ignore)
9643 tree exp;
9644 int post, ignore;
9645 {
9646 rtx op0, op1;
9647 rtx temp, value;
9648 tree incremented = TREE_OPERAND (exp, 0);
9649 optab this_optab = add_optab;
9650 int icode;
9651 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9652 int op0_is_copy = 0;
9653 int single_insn = 0;
9654 /* 1 means we can't store into OP0 directly,
9655 because it is a subreg narrower than a word,
9656 and we don't dare clobber the rest of the word. */
9657 int bad_subreg = 0;
9658
9659 /* Stabilize any component ref that might need to be
9660 evaluated more than once below. */
9661 if (!post
9662 || TREE_CODE (incremented) == BIT_FIELD_REF
9663 || (TREE_CODE (incremented) == COMPONENT_REF
9664 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9665 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9666 incremented = stabilize_reference (incremented);
9667 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9668 ones into save exprs so that they don't accidentally get evaluated
9669 more than once by the code below. */
9670 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9671 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9672 incremented = save_expr (incremented);
9673
9674 /* Compute the operands as RTX.
9675 Note whether OP0 is the actual lvalue or a copy of it:
9676 I believe it is a copy iff it is a register or subreg
9677 and insns were generated in computing it. */
9678
9679 temp = get_last_insn ();
9680 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9681
9682 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9683 in place but instead must do sign- or zero-extension during assignment,
9684 so we copy it into a new register and let the code below use it as
9685 a copy.
9686
9687 Note that we can safely modify this SUBREG since it is know not to be
9688 shared (it was made by the expand_expr call above). */
9689
9690 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9691 {
9692 if (post)
9693 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9694 else
9695 bad_subreg = 1;
9696 }
9697 else if (GET_CODE (op0) == SUBREG
9698 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9699 {
9700 /* We cannot increment this SUBREG in place. If we are
9701 post-incrementing, get a copy of the old value. Otherwise,
9702 just mark that we cannot increment in place. */
9703 if (post)
9704 op0 = copy_to_reg (op0);
9705 else
9706 bad_subreg = 1;
9707 }
9708
9709 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9710 && temp != get_last_insn ());
9711 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9712
9713 /* Decide whether incrementing or decrementing. */
9714 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9715 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9716 this_optab = sub_optab;
9717
9718 /* Convert decrement by a constant into a negative increment. */
9719 if (this_optab == sub_optab
9720 && GET_CODE (op1) == CONST_INT)
9721 {
9722 op1 = GEN_INT (-INTVAL (op1));
9723 this_optab = add_optab;
9724 }
9725
9726 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9727 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9728
9729 /* For a preincrement, see if we can do this with a single instruction. */
9730 if (!post)
9731 {
9732 icode = (int) this_optab->handlers[(int) mode].insn_code;
9733 if (icode != (int) CODE_FOR_nothing
9734 /* Make sure that OP0 is valid for operands 0 and 1
9735 of the insn we want to queue. */
9736 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9737 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9738 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9739 single_insn = 1;
9740 }
9741
9742 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9743 then we cannot just increment OP0. We must therefore contrive to
9744 increment the original value. Then, for postincrement, we can return
9745 OP0 since it is a copy of the old value. For preincrement, expand here
9746 unless we can do it with a single insn.
9747
9748 Likewise if storing directly into OP0 would clobber high bits
9749 we need to preserve (bad_subreg). */
9750 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9751 {
9752 /* This is the easiest way to increment the value wherever it is.
9753 Problems with multiple evaluation of INCREMENTED are prevented
9754 because either (1) it is a component_ref or preincrement,
9755 in which case it was stabilized above, or (2) it is an array_ref
9756 with constant index in an array in a register, which is
9757 safe to reevaluate. */
9758 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9759 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9760 ? MINUS_EXPR : PLUS_EXPR),
9761 TREE_TYPE (exp),
9762 incremented,
9763 TREE_OPERAND (exp, 1));
9764
9765 while (TREE_CODE (incremented) == NOP_EXPR
9766 || TREE_CODE (incremented) == CONVERT_EXPR)
9767 {
9768 newexp = convert (TREE_TYPE (incremented), newexp);
9769 incremented = TREE_OPERAND (incremented, 0);
9770 }
9771
9772 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9773 return post ? op0 : temp;
9774 }
9775
9776 if (post)
9777 {
9778 /* We have a true reference to the value in OP0.
9779 If there is an insn to add or subtract in this mode, queue it.
9780 Queueing the increment insn avoids the register shuffling
9781 that often results if we must increment now and first save
9782 the old value for subsequent use. */
9783
9784 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9785 op0 = stabilize (op0);
9786 #endif
9787
9788 icode = (int) this_optab->handlers[(int) mode].insn_code;
9789 if (icode != (int) CODE_FOR_nothing
9790 /* Make sure that OP0 is valid for operands 0 and 1
9791 of the insn we want to queue. */
9792 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9793 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9794 {
9795 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9796 op1 = force_reg (mode, op1);
9797
9798 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9799 }
9800 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9801 {
9802 rtx addr = (general_operand (XEXP (op0, 0), mode)
9803 ? force_reg (Pmode, XEXP (op0, 0))
9804 : copy_to_reg (XEXP (op0, 0)));
9805 rtx temp, result;
9806
9807 op0 = replace_equiv_address (op0, addr);
9808 temp = force_reg (GET_MODE (op0), op0);
9809 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9810 op1 = force_reg (mode, op1);
9811
9812 /* The increment queue is LIFO, thus we have to `queue'
9813 the instructions in reverse order. */
9814 enqueue_insn (op0, gen_move_insn (op0, temp));
9815 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9816 return result;
9817 }
9818 }
9819
9820 /* Preincrement, or we can't increment with one simple insn. */
9821 if (post)
9822 /* Save a copy of the value before inc or dec, to return it later. */
9823 temp = value = copy_to_reg (op0);
9824 else
9825 /* Arrange to return the incremented value. */
9826 /* Copy the rtx because expand_binop will protect from the queue,
9827 and the results of that would be invalid for us to return
9828 if our caller does emit_queue before using our result. */
9829 temp = copy_rtx (value = op0);
9830
9831 /* Increment however we can. */
9832 op1 = expand_binop (mode, this_optab, value, op1, op0,
9833 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9834
9835 /* Make sure the value is stored into OP0. */
9836 if (op1 != op0)
9837 emit_move_insn (op0, op1);
9838
9839 return temp;
9840 }
9841 \f
9842 /* Generate code to calculate EXP using a store-flag instruction
9843 and return an rtx for the result. EXP is either a comparison
9844 or a TRUTH_NOT_EXPR whose operand is a comparison.
9845
9846 If TARGET is nonzero, store the result there if convenient.
9847
9848 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9849 cheap.
9850
9851 Return zero if there is no suitable set-flag instruction
9852 available on this machine.
9853
9854 Once expand_expr has been called on the arguments of the comparison,
9855 we are committed to doing the store flag, since it is not safe to
9856 re-evaluate the expression. We emit the store-flag insn by calling
9857 emit_store_flag, but only expand the arguments if we have a reason
9858 to believe that emit_store_flag will be successful. If we think that
9859 it will, but it isn't, we have to simulate the store-flag with a
9860 set/jump/set sequence. */
9861
9862 static rtx
9863 do_store_flag (exp, target, mode, only_cheap)
9864 tree exp;
9865 rtx target;
9866 enum machine_mode mode;
9867 int only_cheap;
9868 {
9869 enum rtx_code code;
9870 tree arg0, arg1, type;
9871 tree tem;
9872 enum machine_mode operand_mode;
9873 int invert = 0;
9874 int unsignedp;
9875 rtx op0, op1;
9876 enum insn_code icode;
9877 rtx subtarget = target;
9878 rtx result, label;
9879
9880 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9881 result at the end. We can't simply invert the test since it would
9882 have already been inverted if it were valid. This case occurs for
9883 some floating-point comparisons. */
9884
9885 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9886 invert = 1, exp = TREE_OPERAND (exp, 0);
9887
9888 arg0 = TREE_OPERAND (exp, 0);
9889 arg1 = TREE_OPERAND (exp, 1);
9890
9891 /* Don't crash if the comparison was erroneous. */
9892 if (arg0 == error_mark_node || arg1 == error_mark_node)
9893 return const0_rtx;
9894
9895 type = TREE_TYPE (arg0);
9896 operand_mode = TYPE_MODE (type);
9897 unsignedp = TREE_UNSIGNED (type);
9898
9899 /* We won't bother with BLKmode store-flag operations because it would mean
9900 passing a lot of information to emit_store_flag. */
9901 if (operand_mode == BLKmode)
9902 return 0;
9903
9904 /* We won't bother with store-flag operations involving function pointers
9905 when function pointers must be canonicalized before comparisons. */
9906 #ifdef HAVE_canonicalize_funcptr_for_compare
9907 if (HAVE_canonicalize_funcptr_for_compare
9908 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9909 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9910 == FUNCTION_TYPE))
9911 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9912 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9913 == FUNCTION_TYPE))))
9914 return 0;
9915 #endif
9916
9917 STRIP_NOPS (arg0);
9918 STRIP_NOPS (arg1);
9919
9920 /* Get the rtx comparison code to use. We know that EXP is a comparison
9921 operation of some type. Some comparisons against 1 and -1 can be
9922 converted to comparisons with zero. Do so here so that the tests
9923 below will be aware that we have a comparison with zero. These
9924 tests will not catch constants in the first operand, but constants
9925 are rarely passed as the first operand. */
9926
9927 switch (TREE_CODE (exp))
9928 {
9929 case EQ_EXPR:
9930 code = EQ;
9931 break;
9932 case NE_EXPR:
9933 code = NE;
9934 break;
9935 case LT_EXPR:
9936 if (integer_onep (arg1))
9937 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9938 else
9939 code = unsignedp ? LTU : LT;
9940 break;
9941 case LE_EXPR:
9942 if (! unsignedp && integer_all_onesp (arg1))
9943 arg1 = integer_zero_node, code = LT;
9944 else
9945 code = unsignedp ? LEU : LE;
9946 break;
9947 case GT_EXPR:
9948 if (! unsignedp && integer_all_onesp (arg1))
9949 arg1 = integer_zero_node, code = GE;
9950 else
9951 code = unsignedp ? GTU : GT;
9952 break;
9953 case GE_EXPR:
9954 if (integer_onep (arg1))
9955 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9956 else
9957 code = unsignedp ? GEU : GE;
9958 break;
9959
9960 case UNORDERED_EXPR:
9961 code = UNORDERED;
9962 break;
9963 case ORDERED_EXPR:
9964 code = ORDERED;
9965 break;
9966 case UNLT_EXPR:
9967 code = UNLT;
9968 break;
9969 case UNLE_EXPR:
9970 code = UNLE;
9971 break;
9972 case UNGT_EXPR:
9973 code = UNGT;
9974 break;
9975 case UNGE_EXPR:
9976 code = UNGE;
9977 break;
9978 case UNEQ_EXPR:
9979 code = UNEQ;
9980 break;
9981
9982 default:
9983 abort ();
9984 }
9985
9986 /* Put a constant second. */
9987 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9988 {
9989 tem = arg0; arg0 = arg1; arg1 = tem;
9990 code = swap_condition (code);
9991 }
9992
9993 /* If this is an equality or inequality test of a single bit, we can
9994 do this by shifting the bit being tested to the low-order bit and
9995 masking the result with the constant 1. If the condition was EQ,
9996 we xor it with 1. This does not require an scc insn and is faster
9997 than an scc insn even if we have it. */
9998
9999 if ((code == NE || code == EQ)
10000 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10001 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10002 {
10003 tree inner = TREE_OPERAND (arg0, 0);
10004 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10005 int ops_unsignedp;
10006
10007 /* If INNER is a right shift of a constant and it plus BITNUM does
10008 not overflow, adjust BITNUM and INNER. */
10009
10010 if (TREE_CODE (inner) == RSHIFT_EXPR
10011 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10012 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10013 && bitnum < TYPE_PRECISION (type)
10014 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10015 bitnum - TYPE_PRECISION (type)))
10016 {
10017 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10018 inner = TREE_OPERAND (inner, 0);
10019 }
10020
10021 /* If we are going to be able to omit the AND below, we must do our
10022 operations as unsigned. If we must use the AND, we have a choice.
10023 Normally unsigned is faster, but for some machines signed is. */
10024 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10025 #ifdef LOAD_EXTEND_OP
10026 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10027 #else
10028 : 1
10029 #endif
10030 );
10031
10032 if (! get_subtarget (subtarget)
10033 || GET_MODE (subtarget) != operand_mode
10034 || ! safe_from_p (subtarget, inner, 1))
10035 subtarget = 0;
10036
10037 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10038
10039 if (bitnum != 0)
10040 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10041 size_int (bitnum), subtarget, ops_unsignedp);
10042
10043 if (GET_MODE (op0) != mode)
10044 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10045
10046 if ((code == EQ && ! invert) || (code == NE && invert))
10047 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10048 ops_unsignedp, OPTAB_LIB_WIDEN);
10049
10050 /* Put the AND last so it can combine with more things. */
10051 if (bitnum != TYPE_PRECISION (type) - 1)
10052 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10053
10054 return op0;
10055 }
10056
10057 /* Now see if we are likely to be able to do this. Return if not. */
10058 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10059 return 0;
10060
10061 icode = setcc_gen_code[(int) code];
10062 if (icode == CODE_FOR_nothing
10063 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10064 {
10065 /* We can only do this if it is one of the special cases that
10066 can be handled without an scc insn. */
10067 if ((code == LT && integer_zerop (arg1))
10068 || (! only_cheap && code == GE && integer_zerop (arg1)))
10069 ;
10070 else if (BRANCH_COST >= 0
10071 && ! only_cheap && (code == NE || code == EQ)
10072 && TREE_CODE (type) != REAL_TYPE
10073 && ((abs_optab->handlers[(int) operand_mode].insn_code
10074 != CODE_FOR_nothing)
10075 || (ffs_optab->handlers[(int) operand_mode].insn_code
10076 != CODE_FOR_nothing)))
10077 ;
10078 else
10079 return 0;
10080 }
10081
10082 if (! get_subtarget (target)
10083 || GET_MODE (subtarget) != operand_mode
10084 || ! safe_from_p (subtarget, arg1, 1))
10085 subtarget = 0;
10086
10087 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10088 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10089
10090 if (target == 0)
10091 target = gen_reg_rtx (mode);
10092
10093 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10094 because, if the emit_store_flag does anything it will succeed and
10095 OP0 and OP1 will not be used subsequently. */
10096
10097 result = emit_store_flag (target, code,
10098 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10099 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10100 operand_mode, unsignedp, 1);
10101
10102 if (result)
10103 {
10104 if (invert)
10105 result = expand_binop (mode, xor_optab, result, const1_rtx,
10106 result, 0, OPTAB_LIB_WIDEN);
10107 return result;
10108 }
10109
10110 /* If this failed, we have to do this with set/compare/jump/set code. */
10111 if (GET_CODE (target) != REG
10112 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10113 target = gen_reg_rtx (GET_MODE (target));
10114
10115 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10116 result = compare_from_rtx (op0, op1, code, unsignedp,
10117 operand_mode, NULL_RTX);
10118 if (GET_CODE (result) == CONST_INT)
10119 return (((result == const0_rtx && ! invert)
10120 || (result != const0_rtx && invert))
10121 ? const0_rtx : const1_rtx);
10122
10123 /* The code of RESULT may not match CODE if compare_from_rtx
10124 decided to swap its operands and reverse the original code.
10125
10126 We know that compare_from_rtx returns either a CONST_INT or
10127 a new comparison code, so it is safe to just extract the
10128 code from RESULT. */
10129 code = GET_CODE (result);
10130
10131 label = gen_label_rtx ();
10132 if (bcc_gen_fctn[(int) code] == 0)
10133 abort ();
10134
10135 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10136 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10137 emit_label (label);
10138
10139 return target;
10140 }
10141 \f
10142
10143 /* Stubs in case we haven't got a casesi insn. */
10144 #ifndef HAVE_casesi
10145 # define HAVE_casesi 0
10146 # define gen_casesi(a, b, c, d, e) (0)
10147 # define CODE_FOR_casesi CODE_FOR_nothing
10148 #endif
10149
10150 /* If the machine does not have a case insn that compares the bounds,
10151 this means extra overhead for dispatch tables, which raises the
10152 threshold for using them. */
10153 #ifndef CASE_VALUES_THRESHOLD
10154 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10155 #endif /* CASE_VALUES_THRESHOLD */
10156
10157 unsigned int
10158 case_values_threshold ()
10159 {
10160 return CASE_VALUES_THRESHOLD;
10161 }
10162
10163 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10164 0 otherwise (i.e. if there is no casesi instruction). */
10165 int
10166 try_casesi (index_type, index_expr, minval, range,
10167 table_label, default_label)
10168 tree index_type, index_expr, minval, range;
10169 rtx table_label ATTRIBUTE_UNUSED;
10170 rtx default_label;
10171 {
10172 enum machine_mode index_mode = SImode;
10173 int index_bits = GET_MODE_BITSIZE (index_mode);
10174 rtx op1, op2, index;
10175 enum machine_mode op_mode;
10176
10177 if (! HAVE_casesi)
10178 return 0;
10179
10180 /* Convert the index to SImode. */
10181 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10182 {
10183 enum machine_mode omode = TYPE_MODE (index_type);
10184 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10185
10186 /* We must handle the endpoints in the original mode. */
10187 index_expr = build (MINUS_EXPR, index_type,
10188 index_expr, minval);
10189 minval = integer_zero_node;
10190 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10191 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10192 omode, 1, default_label);
10193 /* Now we can safely truncate. */
10194 index = convert_to_mode (index_mode, index, 0);
10195 }
10196 else
10197 {
10198 if (TYPE_MODE (index_type) != index_mode)
10199 {
10200 index_expr = convert ((*lang_hooks.types.type_for_size)
10201 (index_bits, 0), index_expr);
10202 index_type = TREE_TYPE (index_expr);
10203 }
10204
10205 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10206 }
10207 emit_queue ();
10208 index = protect_from_queue (index, 0);
10209 do_pending_stack_adjust ();
10210
10211 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10212 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10213 (index, op_mode))
10214 index = copy_to_mode_reg (op_mode, index);
10215
10216 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10217
10218 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10219 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10220 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10221 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10222 (op1, op_mode))
10223 op1 = copy_to_mode_reg (op_mode, op1);
10224
10225 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10226
10227 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10228 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10229 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10230 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10231 (op2, op_mode))
10232 op2 = copy_to_mode_reg (op_mode, op2);
10233
10234 emit_jump_insn (gen_casesi (index, op1, op2,
10235 table_label, default_label));
10236 return 1;
10237 }
10238
10239 /* Attempt to generate a tablejump instruction; same concept. */
10240 #ifndef HAVE_tablejump
10241 #define HAVE_tablejump 0
10242 #define gen_tablejump(x, y) (0)
10243 #endif
10244
10245 /* Subroutine of the next function.
10246
10247 INDEX is the value being switched on, with the lowest value
10248 in the table already subtracted.
10249 MODE is its expected mode (needed if INDEX is constant).
10250 RANGE is the length of the jump table.
10251 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10252
10253 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10254 index value is out of range. */
10255
10256 static void
10257 do_tablejump (index, mode, range, table_label, default_label)
10258 rtx index, range, table_label, default_label;
10259 enum machine_mode mode;
10260 {
10261 rtx temp, vector;
10262
10263 if (INTVAL (range) > cfun->max_jumptable_ents)
10264 cfun->max_jumptable_ents = INTVAL (range);
10265
10266 /* Do an unsigned comparison (in the proper mode) between the index
10267 expression and the value which represents the length of the range.
10268 Since we just finished subtracting the lower bound of the range
10269 from the index expression, this comparison allows us to simultaneously
10270 check that the original index expression value is both greater than
10271 or equal to the minimum value of the range and less than or equal to
10272 the maximum value of the range. */
10273
10274 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10275 default_label);
10276
10277 /* If index is in range, it must fit in Pmode.
10278 Convert to Pmode so we can index with it. */
10279 if (mode != Pmode)
10280 index = convert_to_mode (Pmode, index, 1);
10281
10282 /* Don't let a MEM slip thru, because then INDEX that comes
10283 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10284 and break_out_memory_refs will go to work on it and mess it up. */
10285 #ifdef PIC_CASE_VECTOR_ADDRESS
10286 if (flag_pic && GET_CODE (index) != REG)
10287 index = copy_to_mode_reg (Pmode, index);
10288 #endif
10289
10290 /* If flag_force_addr were to affect this address
10291 it could interfere with the tricky assumptions made
10292 about addresses that contain label-refs,
10293 which may be valid only very near the tablejump itself. */
10294 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10295 GET_MODE_SIZE, because this indicates how large insns are. The other
10296 uses should all be Pmode, because they are addresses. This code
10297 could fail if addresses and insns are not the same size. */
10298 index = gen_rtx_PLUS (Pmode,
10299 gen_rtx_MULT (Pmode, index,
10300 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10301 gen_rtx_LABEL_REF (Pmode, table_label));
10302 #ifdef PIC_CASE_VECTOR_ADDRESS
10303 if (flag_pic)
10304 index = PIC_CASE_VECTOR_ADDRESS (index);
10305 else
10306 #endif
10307 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10308 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10309 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10310 RTX_UNCHANGING_P (vector) = 1;
10311 MEM_NOTRAP_P (vector) = 1;
10312 convert_move (temp, vector, 0);
10313
10314 emit_jump_insn (gen_tablejump (temp, table_label));
10315
10316 /* If we are generating PIC code or if the table is PC-relative, the
10317 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10318 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10319 emit_barrier ();
10320 }
10321
10322 int
10323 try_tablejump (index_type, index_expr, minval, range,
10324 table_label, default_label)
10325 tree index_type, index_expr, minval, range;
10326 rtx table_label, default_label;
10327 {
10328 rtx index;
10329
10330 if (! HAVE_tablejump)
10331 return 0;
10332
10333 index_expr = fold (build (MINUS_EXPR, index_type,
10334 convert (index_type, index_expr),
10335 convert (index_type, minval)));
10336 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10337 emit_queue ();
10338 index = protect_from_queue (index, 0);
10339 do_pending_stack_adjust ();
10340
10341 do_tablejump (index, TYPE_MODE (index_type),
10342 convert_modes (TYPE_MODE (index_type),
10343 TYPE_MODE (TREE_TYPE (range)),
10344 expand_expr (range, NULL_RTX,
10345 VOIDmode, 0),
10346 TREE_UNSIGNED (TREE_TYPE (range))),
10347 table_label, default_label);
10348 return 1;
10349 }
10350
10351 /* Nonzero if the mode is a valid vector mode for this architecture.
10352 This returns nonzero even if there is no hardware support for the
10353 vector mode, but we can emulate with narrower modes. */
10354
10355 int
10356 vector_mode_valid_p (mode)
10357 enum machine_mode mode;
10358 {
10359 enum mode_class class = GET_MODE_CLASS (mode);
10360 enum machine_mode innermode;
10361
10362 /* Doh! What's going on? */
10363 if (class != MODE_VECTOR_INT
10364 && class != MODE_VECTOR_FLOAT)
10365 return 0;
10366
10367 /* Hardware support. Woo hoo! */
10368 if (VECTOR_MODE_SUPPORTED_P (mode))
10369 return 1;
10370
10371 innermode = GET_MODE_INNER (mode);
10372
10373 /* We should probably return 1 if requesting V4DI and we have no DI,
10374 but we have V2DI, but this is probably very unlikely. */
10375
10376 /* If we have support for the inner mode, we can safely emulate it.
10377 We may not have V2DI, but me can emulate with a pair of DIs. */
10378 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10379 }
10380
10381 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10382 static rtx
10383 const_vector_from_tree (exp)
10384 tree exp;
10385 {
10386 rtvec v;
10387 int units, i;
10388 tree link, elt;
10389 enum machine_mode inner, mode;
10390
10391 mode = TYPE_MODE (TREE_TYPE (exp));
10392
10393 if (is_zeros_p (exp))
10394 return CONST0_RTX (mode);
10395
10396 units = GET_MODE_NUNITS (mode);
10397 inner = GET_MODE_INNER (mode);
10398
10399 v = rtvec_alloc (units);
10400
10401 link = TREE_VECTOR_CST_ELTS (exp);
10402 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10403 {
10404 elt = TREE_VALUE (link);
10405
10406 if (TREE_CODE (elt) == REAL_CST)
10407 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10408 inner);
10409 else
10410 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10411 TREE_INT_CST_HIGH (elt),
10412 inner);
10413 }
10414
10415 return gen_rtx_raw_CONST_VECTOR (mode, v);
10416 }
10417
10418 #include "gt-expr.h"