8540.md: New file.
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
53
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
56
57 #ifdef PUSH_ROUNDING
58
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
62 #endif
63 #endif
64
65 #endif
66
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
70 #else
71 #define STACK_PUSH_CODE PRE_INC
72 #endif
73 #endif
74
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
78 #endif
79
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
84 #else
85 #define TARGET_MEM_FUNCTIONS 0
86 #endif
87
88
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
95 int cse_not_expected;
96
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 tree placeholder_list = 0;
99
100 /* This structure is used by move_by_pieces to describe the move to
101 be performed. */
102 struct move_by_pieces
103 {
104 rtx to;
105 rtx to_addr;
106 int autinc_to;
107 int explicit_inc_to;
108 rtx from;
109 rtx from_addr;
110 int autinc_from;
111 int explicit_inc_from;
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
114 int reverse;
115 };
116
117 /* This structure is used by store_by_pieces to describe the clear to
118 be performed. */
119
120 struct store_by_pieces
121 {
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
128 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
129 PTR constfundata;
130 int reverse;
131 };
132
133 static rtx enqueue_insn PARAMS ((rtx, rtx));
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
135 PARAMS ((unsigned HOST_WIDE_INT,
136 unsigned int));
137 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
138 struct move_by_pieces *));
139 static bool block_move_libcall_safe_for_call_parm PARAMS ((void));
140 static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
141 static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
142 static tree emit_block_move_libcall_fn PARAMS ((int));
143 static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned));
144 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
145 enum machine_mode));
146 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
147 unsigned int));
148 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
149 unsigned int));
150 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
151 enum machine_mode,
152 struct store_by_pieces *));
153 static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
154 static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
155 static tree clear_storage_libcall_fn PARAMS ((int));
156 static rtx compress_float_constant PARAMS ((rtx, rtx));
157 static rtx get_subtarget PARAMS ((rtx));
158 static int is_zeros_p PARAMS ((tree));
159 static int mostly_zeros_p PARAMS ((tree));
160 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, tree, int, int));
163 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
164 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
165 HOST_WIDE_INT, enum machine_mode,
166 tree, enum machine_mode, int, tree,
167 int));
168 static rtx var_rtx PARAMS ((tree));
169 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
170 static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree));
171 static int is_aligning_offset PARAMS ((tree, tree));
172 static rtx expand_increment PARAMS ((tree, int, int));
173 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
174 #ifdef PUSH_ROUNDING
175 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
176 #endif
177 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
178
179 /* Record for each mode whether we can move a register directly to or
180 from an object of that mode in memory. If we can't, we won't try
181 to use that mode directly when accessing a field of that mode. */
182
183 static char direct_load[NUM_MACHINE_MODES];
184 static char direct_store[NUM_MACHINE_MODES];
185
186 /* Record for each mode whether we can float-extend from memory. */
187
188 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
189
190 /* If a memory-to-memory move would take MOVE_RATIO or more simple
191 move-instruction sequences, we will do a movstr or libcall instead. */
192
193 #ifndef MOVE_RATIO
194 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
195 #define MOVE_RATIO 2
196 #else
197 /* If we are optimizing for space (-Os), cut down the default move ratio. */
198 #define MOVE_RATIO (optimize_size ? 3 : 15)
199 #endif
200 #endif
201
202 /* This macro is used to determine whether move_by_pieces should be called
203 to perform a structure copy. */
204 #ifndef MOVE_BY_PIECES_P
205 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
206 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
207 #endif
208
209 /* If a clear memory operation would take CLEAR_RATIO or more simple
210 move-instruction sequences, we will do a clrstr or libcall instead. */
211
212 #ifndef CLEAR_RATIO
213 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
214 #define CLEAR_RATIO 2
215 #else
216 /* If we are optimizing for space, cut down the default clear ratio. */
217 #define CLEAR_RATIO (optimize_size ? 3 : 15)
218 #endif
219 #endif
220
221 /* This macro is used to determine whether clear_by_pieces should be
222 called to clear storage. */
223 #ifndef CLEAR_BY_PIECES_P
224 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
225 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
226 #endif
227
228 /* This macro is used to determine whether store_by_pieces should be
229 called to "memset" storage with byte values other than zero, or
230 to "memcpy" storage when the source is a constant string. */
231 #ifndef STORE_BY_PIECES_P
232 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
233 #endif
234
235 /* This array records the insn_code of insns to perform block moves. */
236 enum insn_code movstr_optab[NUM_MACHINE_MODES];
237
238 /* This array records the insn_code of insns to perform block clears. */
239 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
240
241 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
242
243 #ifndef SLOW_UNALIGNED_ACCESS
244 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
245 #endif
246 \f
247 /* This is run once per compilation to set up which modes can be used
248 directly in memory and to initialize the block move optab. */
249
250 void
251 init_expr_once ()
252 {
253 rtx insn, pat;
254 enum machine_mode mode;
255 int num_clobbers;
256 rtx mem, mem1;
257 rtx reg;
258
259 /* Try indexing by frame ptr and try by stack ptr.
260 It is known that on the Convex the stack ptr isn't a valid index.
261 With luck, one or the other is valid on any machine. */
262 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
263 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
264
265 /* A scratch register we can modify in-place below to avoid
266 useless RTL allocations. */
267 reg = gen_rtx_REG (VOIDmode, -1);
268
269 insn = rtx_alloc (INSN);
270 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
271 PATTERN (insn) = pat;
272
273 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
274 mode = (enum machine_mode) ((int) mode + 1))
275 {
276 int regno;
277
278 direct_load[(int) mode] = direct_store[(int) mode] = 0;
279 PUT_MODE (mem, mode);
280 PUT_MODE (mem1, mode);
281 PUT_MODE (reg, mode);
282
283 /* See if there is some register that can be used in this mode and
284 directly loaded or stored from memory. */
285
286 if (mode != VOIDmode && mode != BLKmode)
287 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
288 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
289 regno++)
290 {
291 if (! HARD_REGNO_MODE_OK (regno, mode))
292 continue;
293
294 REGNO (reg) = regno;
295
296 SET_SRC (pat) = mem;
297 SET_DEST (pat) = reg;
298 if (recog (pat, insn, &num_clobbers) >= 0)
299 direct_load[(int) mode] = 1;
300
301 SET_SRC (pat) = mem1;
302 SET_DEST (pat) = reg;
303 if (recog (pat, insn, &num_clobbers) >= 0)
304 direct_load[(int) mode] = 1;
305
306 SET_SRC (pat) = reg;
307 SET_DEST (pat) = mem;
308 if (recog (pat, insn, &num_clobbers) >= 0)
309 direct_store[(int) mode] = 1;
310
311 SET_SRC (pat) = reg;
312 SET_DEST (pat) = mem1;
313 if (recog (pat, insn, &num_clobbers) >= 0)
314 direct_store[(int) mode] = 1;
315 }
316 }
317
318 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
319
320 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
321 mode = GET_MODE_WIDER_MODE (mode))
322 {
323 enum machine_mode srcmode;
324 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
325 srcmode = GET_MODE_WIDER_MODE (srcmode))
326 {
327 enum insn_code ic;
328
329 ic = can_extend_p (mode, srcmode, 0);
330 if (ic == CODE_FOR_nothing)
331 continue;
332
333 PUT_MODE (mem, srcmode);
334
335 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
336 float_extend_from_mem[mode][srcmode] = true;
337 }
338 }
339 }
340
341 /* This is run at the start of compiling a function. */
342
343 void
344 init_expr ()
345 {
346 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
347
348 pending_chain = 0;
349 pending_stack_adjust = 0;
350 stack_pointer_delta = 0;
351 inhibit_defer_pop = 0;
352 saveregs_value = 0;
353 apply_args_value = 0;
354 forced_labels = 0;
355 }
356
357 /* Small sanity check that the queue is empty at the end of a function. */
358
359 void
360 finish_expr_for_function ()
361 {
362 if (pending_chain)
363 abort ();
364 }
365 \f
366 /* Manage the queue of increment instructions to be output
367 for POSTINCREMENT_EXPR expressions, etc. */
368
369 /* Queue up to increment (or change) VAR later. BODY says how:
370 BODY should be the same thing you would pass to emit_insn
371 to increment right away. It will go to emit_insn later on.
372
373 The value is a QUEUED expression to be used in place of VAR
374 where you want to guarantee the pre-incrementation value of VAR. */
375
376 static rtx
377 enqueue_insn (var, body)
378 rtx var, body;
379 {
380 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
381 body, pending_chain);
382 return pending_chain;
383 }
384
385 /* Use protect_from_queue to convert a QUEUED expression
386 into something that you can put immediately into an instruction.
387 If the queued incrementation has not happened yet,
388 protect_from_queue returns the variable itself.
389 If the incrementation has happened, protect_from_queue returns a temp
390 that contains a copy of the old value of the variable.
391
392 Any time an rtx which might possibly be a QUEUED is to be put
393 into an instruction, it must be passed through protect_from_queue first.
394 QUEUED expressions are not meaningful in instructions.
395
396 Do not pass a value through protect_from_queue and then hold
397 on to it for a while before putting it in an instruction!
398 If the queue is flushed in between, incorrect code will result. */
399
400 rtx
401 protect_from_queue (x, modify)
402 rtx x;
403 int modify;
404 {
405 RTX_CODE code = GET_CODE (x);
406
407 #if 0 /* A QUEUED can hang around after the queue is forced out. */
408 /* Shortcut for most common case. */
409 if (pending_chain == 0)
410 return x;
411 #endif
412
413 if (code != QUEUED)
414 {
415 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
416 use of autoincrement. Make a copy of the contents of the memory
417 location rather than a copy of the address, but not if the value is
418 of mode BLKmode. Don't modify X in place since it might be
419 shared. */
420 if (code == MEM && GET_MODE (x) != BLKmode
421 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
422 {
423 rtx y = XEXP (x, 0);
424 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
425
426 if (QUEUED_INSN (y))
427 {
428 rtx temp = gen_reg_rtx (GET_MODE (x));
429
430 emit_insn_before (gen_move_insn (temp, new),
431 QUEUED_INSN (y));
432 return temp;
433 }
434
435 /* Copy the address into a pseudo, so that the returned value
436 remains correct across calls to emit_queue. */
437 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
438 }
439
440 /* Otherwise, recursively protect the subexpressions of all
441 the kinds of rtx's that can contain a QUEUED. */
442 if (code == MEM)
443 {
444 rtx tem = protect_from_queue (XEXP (x, 0), 0);
445 if (tem != XEXP (x, 0))
446 {
447 x = copy_rtx (x);
448 XEXP (x, 0) = tem;
449 }
450 }
451 else if (code == PLUS || code == MULT)
452 {
453 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
454 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
455 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
456 {
457 x = copy_rtx (x);
458 XEXP (x, 0) = new0;
459 XEXP (x, 1) = new1;
460 }
461 }
462 return x;
463 }
464 /* If the increment has not happened, use the variable itself. Copy it
465 into a new pseudo so that the value remains correct across calls to
466 emit_queue. */
467 if (QUEUED_INSN (x) == 0)
468 return copy_to_reg (QUEUED_VAR (x));
469 /* If the increment has happened and a pre-increment copy exists,
470 use that copy. */
471 if (QUEUED_COPY (x) != 0)
472 return QUEUED_COPY (x);
473 /* The increment has happened but we haven't set up a pre-increment copy.
474 Set one up now, and use it. */
475 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
476 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
477 QUEUED_INSN (x));
478 return QUEUED_COPY (x);
479 }
480
481 /* Return nonzero if X contains a QUEUED expression:
482 if it contains anything that will be altered by a queued increment.
483 We handle only combinations of MEM, PLUS, MINUS and MULT operators
484 since memory addresses generally contain only those. */
485
486 int
487 queued_subexp_p (x)
488 rtx x;
489 {
490 enum rtx_code code = GET_CODE (x);
491 switch (code)
492 {
493 case QUEUED:
494 return 1;
495 case MEM:
496 return queued_subexp_p (XEXP (x, 0));
497 case MULT:
498 case PLUS:
499 case MINUS:
500 return (queued_subexp_p (XEXP (x, 0))
501 || queued_subexp_p (XEXP (x, 1)));
502 default:
503 return 0;
504 }
505 }
506
507 /* Perform all the pending incrementations. */
508
509 void
510 emit_queue ()
511 {
512 rtx p;
513 while ((p = pending_chain))
514 {
515 rtx body = QUEUED_BODY (p);
516
517 switch (GET_CODE (body))
518 {
519 case INSN:
520 case JUMP_INSN:
521 case CALL_INSN:
522 case CODE_LABEL:
523 case BARRIER:
524 case NOTE:
525 QUEUED_INSN (p) = body;
526 emit_insn (body);
527 break;
528
529 #ifdef ENABLE_CHECKING
530 case SEQUENCE:
531 abort ();
532 break;
533 #endif
534
535 default:
536 QUEUED_INSN (p) = emit_insn (body);
537 break;
538 }
539
540 pending_chain = QUEUED_NEXT (p);
541 }
542 }
543 \f
544 /* Copy data from FROM to TO, where the machine modes are not the same.
545 Both modes may be integer, or both may be floating.
546 UNSIGNEDP should be nonzero if FROM is an unsigned type.
547 This causes zero-extension instead of sign-extension. */
548
549 void
550 convert_move (to, from, unsignedp)
551 rtx to, from;
552 int unsignedp;
553 {
554 enum machine_mode to_mode = GET_MODE (to);
555 enum machine_mode from_mode = GET_MODE (from);
556 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
557 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
558 enum insn_code code;
559 rtx libcall;
560
561 /* rtx code for making an equivalent value. */
562 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
563 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
564
565 to = protect_from_queue (to, 1);
566 from = protect_from_queue (from, 0);
567
568 if (to_real != from_real)
569 abort ();
570
571 /* If FROM is a SUBREG that indicates that we have already done at least
572 the required extension, strip it. We don't handle such SUBREGs as
573 TO here. */
574
575 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
576 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
577 >= GET_MODE_SIZE (to_mode))
578 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
579 from = gen_lowpart (to_mode, from), from_mode = to_mode;
580
581 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
582 abort ();
583
584 if (to_mode == from_mode
585 || (from_mode == VOIDmode && CONSTANT_P (from)))
586 {
587 emit_move_insn (to, from);
588 return;
589 }
590
591 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
592 {
593 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
594 abort ();
595
596 if (VECTOR_MODE_P (to_mode))
597 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
598 else
599 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
600
601 emit_move_insn (to, from);
602 return;
603 }
604
605 if (to_real != from_real)
606 abort ();
607
608 if (to_real)
609 {
610 rtx value, insns;
611
612 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
613 {
614 /* Try converting directly if the insn is supported. */
615 if ((code = can_extend_p (to_mode, from_mode, 0))
616 != CODE_FOR_nothing)
617 {
618 emit_unop_insn (code, to, from, UNKNOWN);
619 return;
620 }
621 }
622
623 #ifdef HAVE_trunchfqf2
624 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
625 {
626 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
627 return;
628 }
629 #endif
630 #ifdef HAVE_trunctqfqf2
631 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
632 {
633 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
634 return;
635 }
636 #endif
637 #ifdef HAVE_truncsfqf2
638 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
639 {
640 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
641 return;
642 }
643 #endif
644 #ifdef HAVE_truncdfqf2
645 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
646 {
647 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
648 return;
649 }
650 #endif
651 #ifdef HAVE_truncxfqf2
652 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
653 {
654 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
655 return;
656 }
657 #endif
658 #ifdef HAVE_trunctfqf2
659 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
660 {
661 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
662 return;
663 }
664 #endif
665
666 #ifdef HAVE_trunctqfhf2
667 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
668 {
669 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
670 return;
671 }
672 #endif
673 #ifdef HAVE_truncsfhf2
674 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
675 {
676 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
677 return;
678 }
679 #endif
680 #ifdef HAVE_truncdfhf2
681 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
682 {
683 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
684 return;
685 }
686 #endif
687 #ifdef HAVE_truncxfhf2
688 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
689 {
690 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
691 return;
692 }
693 #endif
694 #ifdef HAVE_trunctfhf2
695 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
696 {
697 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
698 return;
699 }
700 #endif
701
702 #ifdef HAVE_truncsftqf2
703 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
704 {
705 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
706 return;
707 }
708 #endif
709 #ifdef HAVE_truncdftqf2
710 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
711 {
712 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
713 return;
714 }
715 #endif
716 #ifdef HAVE_truncxftqf2
717 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
718 {
719 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
720 return;
721 }
722 #endif
723 #ifdef HAVE_trunctftqf2
724 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
725 {
726 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
727 return;
728 }
729 #endif
730
731 #ifdef HAVE_truncdfsf2
732 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
733 {
734 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
735 return;
736 }
737 #endif
738 #ifdef HAVE_truncxfsf2
739 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
740 {
741 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
742 return;
743 }
744 #endif
745 #ifdef HAVE_trunctfsf2
746 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
747 {
748 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
749 return;
750 }
751 #endif
752 #ifdef HAVE_truncxfdf2
753 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
754 {
755 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
756 return;
757 }
758 #endif
759 #ifdef HAVE_trunctfdf2
760 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
761 {
762 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
763 return;
764 }
765 #endif
766
767 libcall = (rtx) 0;
768 switch (from_mode)
769 {
770 case SFmode:
771 switch (to_mode)
772 {
773 case DFmode:
774 libcall = extendsfdf2_libfunc;
775 break;
776
777 case XFmode:
778 libcall = extendsfxf2_libfunc;
779 break;
780
781 case TFmode:
782 libcall = extendsftf2_libfunc;
783 break;
784
785 default:
786 break;
787 }
788 break;
789
790 case DFmode:
791 switch (to_mode)
792 {
793 case SFmode:
794 libcall = truncdfsf2_libfunc;
795 break;
796
797 case XFmode:
798 libcall = extenddfxf2_libfunc;
799 break;
800
801 case TFmode:
802 libcall = extenddftf2_libfunc;
803 break;
804
805 default:
806 break;
807 }
808 break;
809
810 case XFmode:
811 switch (to_mode)
812 {
813 case SFmode:
814 libcall = truncxfsf2_libfunc;
815 break;
816
817 case DFmode:
818 libcall = truncxfdf2_libfunc;
819 break;
820
821 default:
822 break;
823 }
824 break;
825
826 case TFmode:
827 switch (to_mode)
828 {
829 case SFmode:
830 libcall = trunctfsf2_libfunc;
831 break;
832
833 case DFmode:
834 libcall = trunctfdf2_libfunc;
835 break;
836
837 default:
838 break;
839 }
840 break;
841
842 default:
843 break;
844 }
845
846 if (libcall == (rtx) 0)
847 /* This conversion is not implemented yet. */
848 abort ();
849
850 start_sequence ();
851 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
852 1, from, from_mode);
853 insns = get_insns ();
854 end_sequence ();
855 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
856 from));
857 return;
858 }
859
860 /* Now both modes are integers. */
861
862 /* Handle expanding beyond a word. */
863 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
864 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
865 {
866 rtx insns;
867 rtx lowpart;
868 rtx fill_value;
869 rtx lowfrom;
870 int i;
871 enum machine_mode lowpart_mode;
872 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
873
874 /* Try converting directly if the insn is supported. */
875 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
876 != CODE_FOR_nothing)
877 {
878 /* If FROM is a SUBREG, put it into a register. Do this
879 so that we always generate the same set of insns for
880 better cse'ing; if an intermediate assignment occurred,
881 we won't be doing the operation directly on the SUBREG. */
882 if (optimize > 0 && GET_CODE (from) == SUBREG)
883 from = force_reg (from_mode, from);
884 emit_unop_insn (code, to, from, equiv_code);
885 return;
886 }
887 /* Next, try converting via full word. */
888 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
889 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
890 != CODE_FOR_nothing))
891 {
892 if (GET_CODE (to) == REG)
893 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
894 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
895 emit_unop_insn (code, to,
896 gen_lowpart (word_mode, to), equiv_code);
897 return;
898 }
899
900 /* No special multiword conversion insn; do it by hand. */
901 start_sequence ();
902
903 /* Since we will turn this into a no conflict block, we must ensure
904 that the source does not overlap the target. */
905
906 if (reg_overlap_mentioned_p (to, from))
907 from = force_reg (from_mode, from);
908
909 /* Get a copy of FROM widened to a word, if necessary. */
910 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
911 lowpart_mode = word_mode;
912 else
913 lowpart_mode = from_mode;
914
915 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
916
917 lowpart = gen_lowpart (lowpart_mode, to);
918 emit_move_insn (lowpart, lowfrom);
919
920 /* Compute the value to put in each remaining word. */
921 if (unsignedp)
922 fill_value = const0_rtx;
923 else
924 {
925 #ifdef HAVE_slt
926 if (HAVE_slt
927 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
928 && STORE_FLAG_VALUE == -1)
929 {
930 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
931 lowpart_mode, 0);
932 fill_value = gen_reg_rtx (word_mode);
933 emit_insn (gen_slt (fill_value));
934 }
935 else
936 #endif
937 {
938 fill_value
939 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
940 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
941 NULL_RTX, 0);
942 fill_value = convert_to_mode (word_mode, fill_value, 1);
943 }
944 }
945
946 /* Fill the remaining words. */
947 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
948 {
949 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
950 rtx subword = operand_subword (to, index, 1, to_mode);
951
952 if (subword == 0)
953 abort ();
954
955 if (fill_value != subword)
956 emit_move_insn (subword, fill_value);
957 }
958
959 insns = get_insns ();
960 end_sequence ();
961
962 emit_no_conflict_block (insns, to, from, NULL_RTX,
963 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
964 return;
965 }
966
967 /* Truncating multi-word to a word or less. */
968 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
969 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
970 {
971 if (!((GET_CODE (from) == MEM
972 && ! MEM_VOLATILE_P (from)
973 && direct_load[(int) to_mode]
974 && ! mode_dependent_address_p (XEXP (from, 0)))
975 || GET_CODE (from) == REG
976 || GET_CODE (from) == SUBREG))
977 from = force_reg (from_mode, from);
978 convert_move (to, gen_lowpart (word_mode, from), 0);
979 return;
980 }
981
982 /* Handle pointer conversion. */ /* SPEE 900220. */
983 if (to_mode == PQImode)
984 {
985 if (from_mode != QImode)
986 from = convert_to_mode (QImode, from, unsignedp);
987
988 #ifdef HAVE_truncqipqi2
989 if (HAVE_truncqipqi2)
990 {
991 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
992 return;
993 }
994 #endif /* HAVE_truncqipqi2 */
995 abort ();
996 }
997
998 if (from_mode == PQImode)
999 {
1000 if (to_mode != QImode)
1001 {
1002 from = convert_to_mode (QImode, from, unsignedp);
1003 from_mode = QImode;
1004 }
1005 else
1006 {
1007 #ifdef HAVE_extendpqiqi2
1008 if (HAVE_extendpqiqi2)
1009 {
1010 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1011 return;
1012 }
1013 #endif /* HAVE_extendpqiqi2 */
1014 abort ();
1015 }
1016 }
1017
1018 if (to_mode == PSImode)
1019 {
1020 if (from_mode != SImode)
1021 from = convert_to_mode (SImode, from, unsignedp);
1022
1023 #ifdef HAVE_truncsipsi2
1024 if (HAVE_truncsipsi2)
1025 {
1026 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1027 return;
1028 }
1029 #endif /* HAVE_truncsipsi2 */
1030 abort ();
1031 }
1032
1033 if (from_mode == PSImode)
1034 {
1035 if (to_mode != SImode)
1036 {
1037 from = convert_to_mode (SImode, from, unsignedp);
1038 from_mode = SImode;
1039 }
1040 else
1041 {
1042 #ifdef HAVE_extendpsisi2
1043 if (! unsignedp && HAVE_extendpsisi2)
1044 {
1045 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1046 return;
1047 }
1048 #endif /* HAVE_extendpsisi2 */
1049 #ifdef HAVE_zero_extendpsisi2
1050 if (unsignedp && HAVE_zero_extendpsisi2)
1051 {
1052 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1053 return;
1054 }
1055 #endif /* HAVE_zero_extendpsisi2 */
1056 abort ();
1057 }
1058 }
1059
1060 if (to_mode == PDImode)
1061 {
1062 if (from_mode != DImode)
1063 from = convert_to_mode (DImode, from, unsignedp);
1064
1065 #ifdef HAVE_truncdipdi2
1066 if (HAVE_truncdipdi2)
1067 {
1068 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1069 return;
1070 }
1071 #endif /* HAVE_truncdipdi2 */
1072 abort ();
1073 }
1074
1075 if (from_mode == PDImode)
1076 {
1077 if (to_mode != DImode)
1078 {
1079 from = convert_to_mode (DImode, from, unsignedp);
1080 from_mode = DImode;
1081 }
1082 else
1083 {
1084 #ifdef HAVE_extendpdidi2
1085 if (HAVE_extendpdidi2)
1086 {
1087 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1088 return;
1089 }
1090 #endif /* HAVE_extendpdidi2 */
1091 abort ();
1092 }
1093 }
1094
1095 /* Now follow all the conversions between integers
1096 no more than a word long. */
1097
1098 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1099 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1100 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1101 GET_MODE_BITSIZE (from_mode)))
1102 {
1103 if (!((GET_CODE (from) == MEM
1104 && ! MEM_VOLATILE_P (from)
1105 && direct_load[(int) to_mode]
1106 && ! mode_dependent_address_p (XEXP (from, 0)))
1107 || GET_CODE (from) == REG
1108 || GET_CODE (from) == SUBREG))
1109 from = force_reg (from_mode, from);
1110 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1111 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1112 from = copy_to_reg (from);
1113 emit_move_insn (to, gen_lowpart (to_mode, from));
1114 return;
1115 }
1116
1117 /* Handle extension. */
1118 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1119 {
1120 /* Convert directly if that works. */
1121 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1122 != CODE_FOR_nothing)
1123 {
1124 if (flag_force_mem)
1125 from = force_not_mem (from);
1126
1127 emit_unop_insn (code, to, from, equiv_code);
1128 return;
1129 }
1130 else
1131 {
1132 enum machine_mode intermediate;
1133 rtx tmp;
1134 tree shift_amount;
1135
1136 /* Search for a mode to convert via. */
1137 for (intermediate = from_mode; intermediate != VOIDmode;
1138 intermediate = GET_MODE_WIDER_MODE (intermediate))
1139 if (((can_extend_p (to_mode, intermediate, unsignedp)
1140 != CODE_FOR_nothing)
1141 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1142 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1143 GET_MODE_BITSIZE (intermediate))))
1144 && (can_extend_p (intermediate, from_mode, unsignedp)
1145 != CODE_FOR_nothing))
1146 {
1147 convert_move (to, convert_to_mode (intermediate, from,
1148 unsignedp), unsignedp);
1149 return;
1150 }
1151
1152 /* No suitable intermediate mode.
1153 Generate what we need with shifts. */
1154 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1155 - GET_MODE_BITSIZE (from_mode), 0);
1156 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1157 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1158 to, unsignedp);
1159 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1160 to, unsignedp);
1161 if (tmp != to)
1162 emit_move_insn (to, tmp);
1163 return;
1164 }
1165 }
1166
1167 /* Support special truncate insns for certain modes. */
1168
1169 if (from_mode == DImode && to_mode == SImode)
1170 {
1171 #ifdef HAVE_truncdisi2
1172 if (HAVE_truncdisi2)
1173 {
1174 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1175 return;
1176 }
1177 #endif
1178 convert_move (to, force_reg (from_mode, from), unsignedp);
1179 return;
1180 }
1181
1182 if (from_mode == DImode && to_mode == HImode)
1183 {
1184 #ifdef HAVE_truncdihi2
1185 if (HAVE_truncdihi2)
1186 {
1187 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1188 return;
1189 }
1190 #endif
1191 convert_move (to, force_reg (from_mode, from), unsignedp);
1192 return;
1193 }
1194
1195 if (from_mode == DImode && to_mode == QImode)
1196 {
1197 #ifdef HAVE_truncdiqi2
1198 if (HAVE_truncdiqi2)
1199 {
1200 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1201 return;
1202 }
1203 #endif
1204 convert_move (to, force_reg (from_mode, from), unsignedp);
1205 return;
1206 }
1207
1208 if (from_mode == SImode && to_mode == HImode)
1209 {
1210 #ifdef HAVE_truncsihi2
1211 if (HAVE_truncsihi2)
1212 {
1213 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1214 return;
1215 }
1216 #endif
1217 convert_move (to, force_reg (from_mode, from), unsignedp);
1218 return;
1219 }
1220
1221 if (from_mode == SImode && to_mode == QImode)
1222 {
1223 #ifdef HAVE_truncsiqi2
1224 if (HAVE_truncsiqi2)
1225 {
1226 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1227 return;
1228 }
1229 #endif
1230 convert_move (to, force_reg (from_mode, from), unsignedp);
1231 return;
1232 }
1233
1234 if (from_mode == HImode && to_mode == QImode)
1235 {
1236 #ifdef HAVE_trunchiqi2
1237 if (HAVE_trunchiqi2)
1238 {
1239 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1240 return;
1241 }
1242 #endif
1243 convert_move (to, force_reg (from_mode, from), unsignedp);
1244 return;
1245 }
1246
1247 if (from_mode == TImode && to_mode == DImode)
1248 {
1249 #ifdef HAVE_trunctidi2
1250 if (HAVE_trunctidi2)
1251 {
1252 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1253 return;
1254 }
1255 #endif
1256 convert_move (to, force_reg (from_mode, from), unsignedp);
1257 return;
1258 }
1259
1260 if (from_mode == TImode && to_mode == SImode)
1261 {
1262 #ifdef HAVE_trunctisi2
1263 if (HAVE_trunctisi2)
1264 {
1265 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1266 return;
1267 }
1268 #endif
1269 convert_move (to, force_reg (from_mode, from), unsignedp);
1270 return;
1271 }
1272
1273 if (from_mode == TImode && to_mode == HImode)
1274 {
1275 #ifdef HAVE_trunctihi2
1276 if (HAVE_trunctihi2)
1277 {
1278 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1279 return;
1280 }
1281 #endif
1282 convert_move (to, force_reg (from_mode, from), unsignedp);
1283 return;
1284 }
1285
1286 if (from_mode == TImode && to_mode == QImode)
1287 {
1288 #ifdef HAVE_trunctiqi2
1289 if (HAVE_trunctiqi2)
1290 {
1291 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1292 return;
1293 }
1294 #endif
1295 convert_move (to, force_reg (from_mode, from), unsignedp);
1296 return;
1297 }
1298
1299 /* Handle truncation of volatile memrefs, and so on;
1300 the things that couldn't be truncated directly,
1301 and for which there was no special instruction. */
1302 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1303 {
1304 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1305 emit_move_insn (to, temp);
1306 return;
1307 }
1308
1309 /* Mode combination is not recognized. */
1310 abort ();
1311 }
1312
1313 /* Return an rtx for a value that would result
1314 from converting X to mode MODE.
1315 Both X and MODE may be floating, or both integer.
1316 UNSIGNEDP is nonzero if X is an unsigned value.
1317 This can be done by referring to a part of X in place
1318 or by copying to a new temporary with conversion.
1319
1320 This function *must not* call protect_from_queue
1321 except when putting X into an insn (in which case convert_move does it). */
1322
1323 rtx
1324 convert_to_mode (mode, x, unsignedp)
1325 enum machine_mode mode;
1326 rtx x;
1327 int unsignedp;
1328 {
1329 return convert_modes (mode, VOIDmode, x, unsignedp);
1330 }
1331
1332 /* Return an rtx for a value that would result
1333 from converting X from mode OLDMODE to mode MODE.
1334 Both modes may be floating, or both integer.
1335 UNSIGNEDP is nonzero if X is an unsigned value.
1336
1337 This can be done by referring to a part of X in place
1338 or by copying to a new temporary with conversion.
1339
1340 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1341
1342 This function *must not* call protect_from_queue
1343 except when putting X into an insn (in which case convert_move does it). */
1344
1345 rtx
1346 convert_modes (mode, oldmode, x, unsignedp)
1347 enum machine_mode mode, oldmode;
1348 rtx x;
1349 int unsignedp;
1350 {
1351 rtx temp;
1352
1353 /* If FROM is a SUBREG that indicates that we have already done at least
1354 the required extension, strip it. */
1355
1356 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1357 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1358 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1359 x = gen_lowpart (mode, x);
1360
1361 if (GET_MODE (x) != VOIDmode)
1362 oldmode = GET_MODE (x);
1363
1364 if (mode == oldmode)
1365 return x;
1366
1367 /* There is one case that we must handle specially: If we are converting
1368 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1369 we are to interpret the constant as unsigned, gen_lowpart will do
1370 the wrong if the constant appears negative. What we want to do is
1371 make the high-order word of the constant zero, not all ones. */
1372
1373 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1374 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1375 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1376 {
1377 HOST_WIDE_INT val = INTVAL (x);
1378
1379 if (oldmode != VOIDmode
1380 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1381 {
1382 int width = GET_MODE_BITSIZE (oldmode);
1383
1384 /* We need to zero extend VAL. */
1385 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1386 }
1387
1388 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1389 }
1390
1391 /* We can do this with a gen_lowpart if both desired and current modes
1392 are integer, and this is either a constant integer, a register, or a
1393 non-volatile MEM. Except for the constant case where MODE is no
1394 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1395
1396 if ((GET_CODE (x) == CONST_INT
1397 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1398 || (GET_MODE_CLASS (mode) == MODE_INT
1399 && GET_MODE_CLASS (oldmode) == MODE_INT
1400 && (GET_CODE (x) == CONST_DOUBLE
1401 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1402 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1403 && direct_load[(int) mode])
1404 || (GET_CODE (x) == REG
1405 && (! HARD_REGISTER_P (x)
1406 || HARD_REGNO_MODE_OK (REGNO (x), mode))
1407 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1408 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1409 {
1410 /* ?? If we don't know OLDMODE, we have to assume here that
1411 X does not need sign- or zero-extension. This may not be
1412 the case, but it's the best we can do. */
1413 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1414 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1415 {
1416 HOST_WIDE_INT val = INTVAL (x);
1417 int width = GET_MODE_BITSIZE (oldmode);
1418
1419 /* We must sign or zero-extend in this case. Start by
1420 zero-extending, then sign extend if we need to. */
1421 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1422 if (! unsignedp
1423 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1424 val |= (HOST_WIDE_INT) (-1) << width;
1425
1426 return gen_int_mode (val, mode);
1427 }
1428
1429 return gen_lowpart (mode, x);
1430 }
1431
1432 temp = gen_reg_rtx (mode);
1433 convert_move (temp, x, unsignedp);
1434 return temp;
1435 }
1436 \f
1437 /* This macro is used to determine what the largest unit size that
1438 move_by_pieces can use is. */
1439
1440 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1441 move efficiently, as opposed to MOVE_MAX which is the maximum
1442 number of bytes we can move with a single instruction. */
1443
1444 #ifndef MOVE_MAX_PIECES
1445 #define MOVE_MAX_PIECES MOVE_MAX
1446 #endif
1447
1448 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1449 store efficiently. Due to internal GCC limitations, this is
1450 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1451 for an immediate constant. */
1452
1453 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1454
1455 /* Generate several move instructions to copy LEN bytes from block FROM to
1456 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1457 and TO through protect_from_queue before calling.
1458
1459 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1460 used to push FROM to the stack.
1461
1462 ALIGN is maximum alignment we can assume. */
1463
1464 void
1465 move_by_pieces (to, from, len, align)
1466 rtx to, from;
1467 unsigned HOST_WIDE_INT len;
1468 unsigned int align;
1469 {
1470 struct move_by_pieces data;
1471 rtx to_addr, from_addr = XEXP (from, 0);
1472 unsigned int max_size = MOVE_MAX_PIECES + 1;
1473 enum machine_mode mode = VOIDmode, tmode;
1474 enum insn_code icode;
1475
1476 data.offset = 0;
1477 data.from_addr = from_addr;
1478 if (to)
1479 {
1480 to_addr = XEXP (to, 0);
1481 data.to = to;
1482 data.autinc_to
1483 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1484 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1485 data.reverse
1486 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1487 }
1488 else
1489 {
1490 to_addr = NULL_RTX;
1491 data.to = NULL_RTX;
1492 data.autinc_to = 1;
1493 #ifdef STACK_GROWS_DOWNWARD
1494 data.reverse = 1;
1495 #else
1496 data.reverse = 0;
1497 #endif
1498 }
1499 data.to_addr = to_addr;
1500 data.from = from;
1501 data.autinc_from
1502 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1503 || GET_CODE (from_addr) == POST_INC
1504 || GET_CODE (from_addr) == POST_DEC);
1505
1506 data.explicit_inc_from = 0;
1507 data.explicit_inc_to = 0;
1508 if (data.reverse) data.offset = len;
1509 data.len = len;
1510
1511 /* If copying requires more than two move insns,
1512 copy addresses to registers (to make displacements shorter)
1513 and use post-increment if available. */
1514 if (!(data.autinc_from && data.autinc_to)
1515 && move_by_pieces_ninsns (len, align) > 2)
1516 {
1517 /* Find the mode of the largest move... */
1518 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1519 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1520 if (GET_MODE_SIZE (tmode) < max_size)
1521 mode = tmode;
1522
1523 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1524 {
1525 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1526 data.autinc_from = 1;
1527 data.explicit_inc_from = -1;
1528 }
1529 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1530 {
1531 data.from_addr = copy_addr_to_reg (from_addr);
1532 data.autinc_from = 1;
1533 data.explicit_inc_from = 1;
1534 }
1535 if (!data.autinc_from && CONSTANT_P (from_addr))
1536 data.from_addr = copy_addr_to_reg (from_addr);
1537 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1538 {
1539 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1540 data.autinc_to = 1;
1541 data.explicit_inc_to = -1;
1542 }
1543 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1544 {
1545 data.to_addr = copy_addr_to_reg (to_addr);
1546 data.autinc_to = 1;
1547 data.explicit_inc_to = 1;
1548 }
1549 if (!data.autinc_to && CONSTANT_P (to_addr))
1550 data.to_addr = copy_addr_to_reg (to_addr);
1551 }
1552
1553 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1554 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1555 align = MOVE_MAX * BITS_PER_UNIT;
1556
1557 /* First move what we can in the largest integer mode, then go to
1558 successively smaller modes. */
1559
1560 while (max_size > 1)
1561 {
1562 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1563 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1564 if (GET_MODE_SIZE (tmode) < max_size)
1565 mode = tmode;
1566
1567 if (mode == VOIDmode)
1568 break;
1569
1570 icode = mov_optab->handlers[(int) mode].insn_code;
1571 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1572 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1573
1574 max_size = GET_MODE_SIZE (mode);
1575 }
1576
1577 /* The code above should have handled everything. */
1578 if (data.len > 0)
1579 abort ();
1580 }
1581
1582 /* Return number of insns required to move L bytes by pieces.
1583 ALIGN (in bits) is maximum alignment we can assume. */
1584
1585 static unsigned HOST_WIDE_INT
1586 move_by_pieces_ninsns (l, align)
1587 unsigned HOST_WIDE_INT l;
1588 unsigned int align;
1589 {
1590 unsigned HOST_WIDE_INT n_insns = 0;
1591 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1592
1593 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1594 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1595 align = MOVE_MAX * BITS_PER_UNIT;
1596
1597 while (max_size > 1)
1598 {
1599 enum machine_mode mode = VOIDmode, tmode;
1600 enum insn_code icode;
1601
1602 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1603 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1604 if (GET_MODE_SIZE (tmode) < max_size)
1605 mode = tmode;
1606
1607 if (mode == VOIDmode)
1608 break;
1609
1610 icode = mov_optab->handlers[(int) mode].insn_code;
1611 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1612 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1613
1614 max_size = GET_MODE_SIZE (mode);
1615 }
1616
1617 if (l)
1618 abort ();
1619 return n_insns;
1620 }
1621
1622 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1623 with move instructions for mode MODE. GENFUN is the gen_... function
1624 to make a move insn for that mode. DATA has all the other info. */
1625
1626 static void
1627 move_by_pieces_1 (genfun, mode, data)
1628 rtx (*genfun) PARAMS ((rtx, ...));
1629 enum machine_mode mode;
1630 struct move_by_pieces *data;
1631 {
1632 unsigned int size = GET_MODE_SIZE (mode);
1633 rtx to1 = NULL_RTX, from1;
1634
1635 while (data->len >= size)
1636 {
1637 if (data->reverse)
1638 data->offset -= size;
1639
1640 if (data->to)
1641 {
1642 if (data->autinc_to)
1643 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1644 data->offset);
1645 else
1646 to1 = adjust_address (data->to, mode, data->offset);
1647 }
1648
1649 if (data->autinc_from)
1650 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1651 data->offset);
1652 else
1653 from1 = adjust_address (data->from, mode, data->offset);
1654
1655 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1656 emit_insn (gen_add2_insn (data->to_addr,
1657 GEN_INT (-(HOST_WIDE_INT)size)));
1658 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1659 emit_insn (gen_add2_insn (data->from_addr,
1660 GEN_INT (-(HOST_WIDE_INT)size)));
1661
1662 if (data->to)
1663 emit_insn ((*genfun) (to1, from1));
1664 else
1665 {
1666 #ifdef PUSH_ROUNDING
1667 emit_single_push_insn (mode, from1, NULL);
1668 #else
1669 abort ();
1670 #endif
1671 }
1672
1673 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1674 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1675 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1676 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1677
1678 if (! data->reverse)
1679 data->offset += size;
1680
1681 data->len -= size;
1682 }
1683 }
1684 \f
1685 /* Emit code to move a block Y to a block X. This may be done with
1686 string-move instructions, with multiple scalar move instructions,
1687 or with a library call.
1688
1689 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1690 SIZE is an rtx that says how long they are.
1691 ALIGN is the maximum alignment we can assume they have.
1692 METHOD describes what kind of copy this is, and what mechanisms may be used.
1693
1694 Return the address of the new block, if memcpy is called and returns it,
1695 0 otherwise. */
1696
1697 rtx
1698 emit_block_move (x, y, size, method)
1699 rtx x, y, size;
1700 enum block_op_methods method;
1701 {
1702 bool may_use_call;
1703 rtx retval = 0;
1704 unsigned int align;
1705
1706 switch (method)
1707 {
1708 case BLOCK_OP_NORMAL:
1709 may_use_call = true;
1710 break;
1711
1712 case BLOCK_OP_CALL_PARM:
1713 may_use_call = block_move_libcall_safe_for_call_parm ();
1714
1715 /* Make inhibit_defer_pop nonzero around the library call
1716 to force it to pop the arguments right away. */
1717 NO_DEFER_POP;
1718 break;
1719
1720 case BLOCK_OP_NO_LIBCALL:
1721 may_use_call = false;
1722 break;
1723
1724 default:
1725 abort ();
1726 }
1727
1728 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1729
1730 if (GET_MODE (x) != BLKmode)
1731 abort ();
1732 if (GET_MODE (y) != BLKmode)
1733 abort ();
1734
1735 x = protect_from_queue (x, 1);
1736 y = protect_from_queue (y, 0);
1737 size = protect_from_queue (size, 0);
1738
1739 if (GET_CODE (x) != MEM)
1740 abort ();
1741 if (GET_CODE (y) != MEM)
1742 abort ();
1743 if (size == 0)
1744 abort ();
1745
1746 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1747 can be incorrect is coming from __builtin_memcpy. */
1748 if (GET_CODE (size) == CONST_INT)
1749 {
1750 x = shallow_copy_rtx (x);
1751 y = shallow_copy_rtx (y);
1752 set_mem_size (x, size);
1753 set_mem_size (y, size);
1754 }
1755
1756 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1757 move_by_pieces (x, y, INTVAL (size), align);
1758 else if (emit_block_move_via_movstr (x, y, size, align))
1759 ;
1760 else if (may_use_call)
1761 retval = emit_block_move_via_libcall (x, y, size);
1762 else
1763 emit_block_move_via_loop (x, y, size, align);
1764
1765 if (method == BLOCK_OP_CALL_PARM)
1766 OK_DEFER_POP;
1767
1768 return retval;
1769 }
1770
1771 /* A subroutine of emit_block_move. Returns true if calling the
1772 block move libcall will not clobber any parameters which may have
1773 already been placed on the stack. */
1774
1775 static bool
1776 block_move_libcall_safe_for_call_parm ()
1777 {
1778 if (PUSH_ARGS)
1779 return true;
1780 else
1781 {
1782 /* Check to see whether memcpy takes all register arguments. */
1783 static enum {
1784 takes_regs_uninit, takes_regs_no, takes_regs_yes
1785 } takes_regs = takes_regs_uninit;
1786
1787 switch (takes_regs)
1788 {
1789 case takes_regs_uninit:
1790 {
1791 CUMULATIVE_ARGS args_so_far;
1792 tree fn, arg;
1793
1794 fn = emit_block_move_libcall_fn (false);
1795 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1796
1797 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1798 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1799 {
1800 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1801 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1802 if (!tmp || !REG_P (tmp))
1803 goto fail_takes_regs;
1804 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1805 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1806 NULL_TREE, 1))
1807 goto fail_takes_regs;
1808 #endif
1809 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1810 }
1811 }
1812 takes_regs = takes_regs_yes;
1813 /* FALLTHRU */
1814
1815 case takes_regs_yes:
1816 return true;
1817
1818 fail_takes_regs:
1819 takes_regs = takes_regs_no;
1820 /* FALLTHRU */
1821 case takes_regs_no:
1822 return false;
1823
1824 default:
1825 abort ();
1826 }
1827 }
1828 }
1829
1830 /* A subroutine of emit_block_move. Expand a movstr pattern;
1831 return true if successful. */
1832
1833 static bool
1834 emit_block_move_via_movstr (x, y, size, align)
1835 rtx x, y, size;
1836 unsigned int align;
1837 {
1838 /* Try the most limited insn first, because there's no point
1839 including more than one in the machine description unless
1840 the more limited one has some advantage. */
1841
1842 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1843 enum machine_mode mode;
1844
1845 /* Since this is a move insn, we don't care about volatility. */
1846 volatile_ok = 1;
1847
1848 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1849 mode = GET_MODE_WIDER_MODE (mode))
1850 {
1851 enum insn_code code = movstr_optab[(int) mode];
1852 insn_operand_predicate_fn pred;
1853
1854 if (code != CODE_FOR_nothing
1855 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1856 here because if SIZE is less than the mode mask, as it is
1857 returned by the macro, it will definitely be less than the
1858 actual mode mask. */
1859 && ((GET_CODE (size) == CONST_INT
1860 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1861 <= (GET_MODE_MASK (mode) >> 1)))
1862 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1863 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1864 || (*pred) (x, BLKmode))
1865 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1866 || (*pred) (y, BLKmode))
1867 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1868 || (*pred) (opalign, VOIDmode)))
1869 {
1870 rtx op2;
1871 rtx last = get_last_insn ();
1872 rtx pat;
1873
1874 op2 = convert_to_mode (mode, size, 1);
1875 pred = insn_data[(int) code].operand[2].predicate;
1876 if (pred != 0 && ! (*pred) (op2, mode))
1877 op2 = copy_to_mode_reg (mode, op2);
1878
1879 /* ??? When called via emit_block_move_for_call, it'd be
1880 nice if there were some way to inform the backend, so
1881 that it doesn't fail the expansion because it thinks
1882 emitting the libcall would be more efficient. */
1883
1884 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1885 if (pat)
1886 {
1887 emit_insn (pat);
1888 volatile_ok = 0;
1889 return true;
1890 }
1891 else
1892 delete_insns_since (last);
1893 }
1894 }
1895
1896 volatile_ok = 0;
1897 return false;
1898 }
1899
1900 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1901 Return the return value from memcpy, 0 otherwise. */
1902
1903 static rtx
1904 emit_block_move_via_libcall (dst, src, size)
1905 rtx dst, src, size;
1906 {
1907 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1908 enum machine_mode size_mode;
1909 rtx retval;
1910
1911 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1912
1913 It is unsafe to save the value generated by protect_from_queue
1914 and reuse it later. Consider what happens if emit_queue is
1915 called before the return value from protect_from_queue is used.
1916
1917 Expansion of the CALL_EXPR below will call emit_queue before
1918 we are finished emitting RTL for argument setup. So if we are
1919 not careful we could get the wrong value for an argument.
1920
1921 To avoid this problem we go ahead and emit code to copy X, Y &
1922 SIZE into new pseudos. We can then place those new pseudos
1923 into an RTL_EXPR and use them later, even after a call to
1924 emit_queue.
1925
1926 Note this is not strictly needed for library calls since they
1927 do not call emit_queue before loading their arguments. However,
1928 we may need to have library calls call emit_queue in the future
1929 since failing to do so could cause problems for targets which
1930 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1931
1932 dst = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1933 src = copy_to_mode_reg (Pmode, XEXP (src, 0));
1934
1935 if (TARGET_MEM_FUNCTIONS)
1936 size_mode = TYPE_MODE (sizetype);
1937 else
1938 size_mode = TYPE_MODE (unsigned_type_node);
1939 size = convert_to_mode (size_mode, size, 1);
1940 size = copy_to_mode_reg (size_mode, size);
1941
1942 /* It is incorrect to use the libcall calling conventions to call
1943 memcpy in this context. This could be a user call to memcpy and
1944 the user may wish to examine the return value from memcpy. For
1945 targets where libcalls and normal calls have different conventions
1946 for returning pointers, we could end up generating incorrect code.
1947
1948 For convenience, we generate the call to bcopy this way as well. */
1949
1950 dst_tree = make_tree (ptr_type_node, dst);
1951 src_tree = make_tree (ptr_type_node, src);
1952 if (TARGET_MEM_FUNCTIONS)
1953 size_tree = make_tree (sizetype, size);
1954 else
1955 size_tree = make_tree (unsigned_type_node, size);
1956
1957 fn = emit_block_move_libcall_fn (true);
1958 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1959 if (TARGET_MEM_FUNCTIONS)
1960 {
1961 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1962 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1963 }
1964 else
1965 {
1966 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1967 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1968 }
1969
1970 /* Now we have to build up the CALL_EXPR itself. */
1971 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1972 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1973 call_expr, arg_list, NULL_TREE);
1974 TREE_SIDE_EFFECTS (call_expr) = 1;
1975
1976 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1977
1978 /* If we are initializing a readonly value, show the above call
1979 clobbered it. Otherwise, a load from it may erroneously be
1980 hoisted from a loop. */
1981 if (RTX_UNCHANGING_P (dst))
1982 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
1983
1984 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
1985 }
1986
1987 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1988 for the function we use for block copies. The first time FOR_CALL
1989 is true, we call assemble_external. */
1990
1991 static GTY(()) tree block_move_fn;
1992
1993 static tree
1994 emit_block_move_libcall_fn (for_call)
1995 int for_call;
1996 {
1997 static bool emitted_extern;
1998 tree fn = block_move_fn, args;
1999
2000 if (!fn)
2001 {
2002 if (TARGET_MEM_FUNCTIONS)
2003 {
2004 fn = get_identifier ("memcpy");
2005 args = build_function_type_list (ptr_type_node, ptr_type_node,
2006 const_ptr_type_node, sizetype,
2007 NULL_TREE);
2008 }
2009 else
2010 {
2011 fn = get_identifier ("bcopy");
2012 args = build_function_type_list (void_type_node, const_ptr_type_node,
2013 ptr_type_node, unsigned_type_node,
2014 NULL_TREE);
2015 }
2016
2017 fn = build_decl (FUNCTION_DECL, fn, args);
2018 DECL_EXTERNAL (fn) = 1;
2019 TREE_PUBLIC (fn) = 1;
2020 DECL_ARTIFICIAL (fn) = 1;
2021 TREE_NOTHROW (fn) = 1;
2022
2023 block_move_fn = fn;
2024 }
2025
2026 if (for_call && !emitted_extern)
2027 {
2028 emitted_extern = true;
2029 make_decl_rtl (fn, NULL);
2030 assemble_external (fn);
2031 }
2032
2033 return fn;
2034 }
2035
2036 /* A subroutine of emit_block_move. Copy the data via an explicit
2037 loop. This is used only when libcalls are forbidden. */
2038 /* ??? It'd be nice to copy in hunks larger than QImode. */
2039
2040 static void
2041 emit_block_move_via_loop (x, y, size, align)
2042 rtx x, y, size;
2043 unsigned int align ATTRIBUTE_UNUSED;
2044 {
2045 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2046 enum machine_mode iter_mode;
2047
2048 iter_mode = GET_MODE (size);
2049 if (iter_mode == VOIDmode)
2050 iter_mode = word_mode;
2051
2052 top_label = gen_label_rtx ();
2053 cmp_label = gen_label_rtx ();
2054 iter = gen_reg_rtx (iter_mode);
2055
2056 emit_move_insn (iter, const0_rtx);
2057
2058 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2059 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2060 do_pending_stack_adjust ();
2061
2062 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2063
2064 emit_jump (cmp_label);
2065 emit_label (top_label);
2066
2067 tmp = convert_modes (Pmode, iter_mode, iter, true);
2068 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2069 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2070 x = change_address (x, QImode, x_addr);
2071 y = change_address (y, QImode, y_addr);
2072
2073 emit_move_insn (x, y);
2074
2075 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2076 true, OPTAB_LIB_WIDEN);
2077 if (tmp != iter)
2078 emit_move_insn (iter, tmp);
2079
2080 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2081 emit_label (cmp_label);
2082
2083 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2084 true, top_label);
2085
2086 emit_note (NULL, NOTE_INSN_LOOP_END);
2087 }
2088 \f
2089 /* Copy all or part of a value X into registers starting at REGNO.
2090 The number of registers to be filled is NREGS. */
2091
2092 void
2093 move_block_to_reg (regno, x, nregs, mode)
2094 int regno;
2095 rtx x;
2096 int nregs;
2097 enum machine_mode mode;
2098 {
2099 int i;
2100 #ifdef HAVE_load_multiple
2101 rtx pat;
2102 rtx last;
2103 #endif
2104
2105 if (nregs == 0)
2106 return;
2107
2108 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2109 x = validize_mem (force_const_mem (mode, x));
2110
2111 /* See if the machine can do this with a load multiple insn. */
2112 #ifdef HAVE_load_multiple
2113 if (HAVE_load_multiple)
2114 {
2115 last = get_last_insn ();
2116 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2117 GEN_INT (nregs));
2118 if (pat)
2119 {
2120 emit_insn (pat);
2121 return;
2122 }
2123 else
2124 delete_insns_since (last);
2125 }
2126 #endif
2127
2128 for (i = 0; i < nregs; i++)
2129 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2130 operand_subword_force (x, i, mode));
2131 }
2132
2133 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2134 The number of registers to be filled is NREGS. SIZE indicates the number
2135 of bytes in the object X. */
2136
2137 void
2138 move_block_from_reg (regno, x, nregs, size)
2139 int regno;
2140 rtx x;
2141 int nregs;
2142 int size;
2143 {
2144 int i;
2145 #ifdef HAVE_store_multiple
2146 rtx pat;
2147 rtx last;
2148 #endif
2149 enum machine_mode mode;
2150
2151 if (nregs == 0)
2152 return;
2153
2154 /* If SIZE is that of a mode no bigger than a word, just use that
2155 mode's store operation. */
2156 if (size <= UNITS_PER_WORD
2157 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
2158 {
2159 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
2160 return;
2161 }
2162
2163 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
2164 to the left before storing to memory. Note that the previous test
2165 doesn't handle all cases (e.g. SIZE == 3). */
2166 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
2167 {
2168 rtx tem = operand_subword (x, 0, 1, BLKmode);
2169 rtx shift;
2170
2171 if (tem == 0)
2172 abort ();
2173
2174 shift = expand_shift (LSHIFT_EXPR, word_mode,
2175 gen_rtx_REG (word_mode, regno),
2176 build_int_2 ((UNITS_PER_WORD - size)
2177 * BITS_PER_UNIT, 0), NULL_RTX, 0);
2178 emit_move_insn (tem, shift);
2179 return;
2180 }
2181
2182 /* See if the machine can do this with a store multiple insn. */
2183 #ifdef HAVE_store_multiple
2184 if (HAVE_store_multiple)
2185 {
2186 last = get_last_insn ();
2187 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2188 GEN_INT (nregs));
2189 if (pat)
2190 {
2191 emit_insn (pat);
2192 return;
2193 }
2194 else
2195 delete_insns_since (last);
2196 }
2197 #endif
2198
2199 for (i = 0; i < nregs; i++)
2200 {
2201 rtx tem = operand_subword (x, i, 1, BLKmode);
2202
2203 if (tem == 0)
2204 abort ();
2205
2206 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2207 }
2208 }
2209
2210 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2211 ORIG, where ORIG is a non-consecutive group of registers represented by
2212 a PARALLEL. The clone is identical to the original except in that the
2213 original set of registers is replaced by a new set of pseudo registers.
2214 The new set has the same modes as the original set. */
2215
2216 rtx
2217 gen_group_rtx (orig)
2218 rtx orig;
2219 {
2220 int i, length;
2221 rtx *tmps;
2222
2223 if (GET_CODE (orig) != PARALLEL)
2224 abort ();
2225
2226 length = XVECLEN (orig, 0);
2227 tmps = (rtx *) alloca (sizeof (rtx) * length);
2228
2229 /* Skip a NULL entry in first slot. */
2230 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2231
2232 if (i)
2233 tmps[0] = 0;
2234
2235 for (; i < length; i++)
2236 {
2237 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2238 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2239
2240 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2241 }
2242
2243 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2244 }
2245
2246 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2247 registers represented by a PARALLEL. SSIZE represents the total size of
2248 block SRC in bytes, or -1 if not known. */
2249 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2250 the balance will be in what would be the low-order memory addresses, i.e.
2251 left justified for big endian, right justified for little endian. This
2252 happens to be true for the targets currently using this support. If this
2253 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2254 would be needed. */
2255
2256 void
2257 emit_group_load (dst, orig_src, ssize)
2258 rtx dst, orig_src;
2259 int ssize;
2260 {
2261 rtx *tmps, src;
2262 int start, i;
2263
2264 if (GET_CODE (dst) != PARALLEL)
2265 abort ();
2266
2267 /* Check for a NULL entry, used to indicate that the parameter goes
2268 both on the stack and in registers. */
2269 if (XEXP (XVECEXP (dst, 0, 0), 0))
2270 start = 0;
2271 else
2272 start = 1;
2273
2274 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2275
2276 /* Process the pieces. */
2277 for (i = start; i < XVECLEN (dst, 0); i++)
2278 {
2279 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2280 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2281 unsigned int bytelen = GET_MODE_SIZE (mode);
2282 int shift = 0;
2283
2284 /* Handle trailing fragments that run over the size of the struct. */
2285 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2286 {
2287 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2288 bytelen = ssize - bytepos;
2289 if (bytelen <= 0)
2290 abort ();
2291 }
2292
2293 /* If we won't be loading directly from memory, protect the real source
2294 from strange tricks we might play; but make sure that the source can
2295 be loaded directly into the destination. */
2296 src = orig_src;
2297 if (GET_CODE (orig_src) != MEM
2298 && (!CONSTANT_P (orig_src)
2299 || (GET_MODE (orig_src) != mode
2300 && GET_MODE (orig_src) != VOIDmode)))
2301 {
2302 if (GET_MODE (orig_src) == VOIDmode)
2303 src = gen_reg_rtx (mode);
2304 else
2305 src = gen_reg_rtx (GET_MODE (orig_src));
2306
2307 emit_move_insn (src, orig_src);
2308 }
2309
2310 /* Optimize the access just a bit. */
2311 if (GET_CODE (src) == MEM
2312 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2313 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2314 && bytelen == GET_MODE_SIZE (mode))
2315 {
2316 tmps[i] = gen_reg_rtx (mode);
2317 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2318 }
2319 else if (GET_CODE (src) == CONCAT)
2320 {
2321 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2322 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2323
2324 if ((bytepos == 0 && bytelen == slen0)
2325 || (bytepos != 0 && bytepos + bytelen <= slen))
2326 {
2327 /* The following assumes that the concatenated objects all
2328 have the same size. In this case, a simple calculation
2329 can be used to determine the object and the bit field
2330 to be extracted. */
2331 tmps[i] = XEXP (src, bytepos / slen0);
2332 if (! CONSTANT_P (tmps[i])
2333 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2334 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2335 (bytepos % slen0) * BITS_PER_UNIT,
2336 1, NULL_RTX, mode, mode, ssize);
2337 }
2338 else if (bytepos == 0)
2339 {
2340 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2341 emit_move_insn (mem, src);
2342 tmps[i] = adjust_address (mem, mode, 0);
2343 }
2344 else
2345 abort ();
2346 }
2347 else if (CONSTANT_P (src)
2348 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2349 tmps[i] = src;
2350 else
2351 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2352 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2353 mode, mode, ssize);
2354
2355 if (BYTES_BIG_ENDIAN && shift)
2356 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2357 tmps[i], 0, OPTAB_WIDEN);
2358 }
2359
2360 emit_queue ();
2361
2362 /* Copy the extracted pieces into the proper (probable) hard regs. */
2363 for (i = start; i < XVECLEN (dst, 0); i++)
2364 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2365 }
2366
2367 /* Emit code to move a block SRC to block DST, where SRC and DST are
2368 non-consecutive groups of registers, each represented by a PARALLEL. */
2369
2370 void
2371 emit_group_move (dst, src)
2372 rtx dst, src;
2373 {
2374 int i;
2375
2376 if (GET_CODE (src) != PARALLEL
2377 || GET_CODE (dst) != PARALLEL
2378 || XVECLEN (src, 0) != XVECLEN (dst, 0))
2379 abort ();
2380
2381 /* Skip first entry if NULL. */
2382 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2383 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2384 XEXP (XVECEXP (src, 0, i), 0));
2385 }
2386
2387 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2388 registers represented by a PARALLEL. SSIZE represents the total size of
2389 block DST, or -1 if not known. */
2390
2391 void
2392 emit_group_store (orig_dst, src, ssize)
2393 rtx orig_dst, src;
2394 int ssize;
2395 {
2396 rtx *tmps, dst;
2397 int start, i;
2398
2399 if (GET_CODE (src) != PARALLEL)
2400 abort ();
2401
2402 /* Check for a NULL entry, used to indicate that the parameter goes
2403 both on the stack and in registers. */
2404 if (XEXP (XVECEXP (src, 0, 0), 0))
2405 start = 0;
2406 else
2407 start = 1;
2408
2409 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2410
2411 /* Copy the (probable) hard regs into pseudos. */
2412 for (i = start; i < XVECLEN (src, 0); i++)
2413 {
2414 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2415 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2416 emit_move_insn (tmps[i], reg);
2417 }
2418 emit_queue ();
2419
2420 /* If we won't be storing directly into memory, protect the real destination
2421 from strange tricks we might play. */
2422 dst = orig_dst;
2423 if (GET_CODE (dst) == PARALLEL)
2424 {
2425 rtx temp;
2426
2427 /* We can get a PARALLEL dst if there is a conditional expression in
2428 a return statement. In that case, the dst and src are the same,
2429 so no action is necessary. */
2430 if (rtx_equal_p (dst, src))
2431 return;
2432
2433 /* It is unclear if we can ever reach here, but we may as well handle
2434 it. Allocate a temporary, and split this into a store/load to/from
2435 the temporary. */
2436
2437 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2438 emit_group_store (temp, src, ssize);
2439 emit_group_load (dst, temp, ssize);
2440 return;
2441 }
2442 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2443 {
2444 dst = gen_reg_rtx (GET_MODE (orig_dst));
2445 /* Make life a bit easier for combine. */
2446 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2447 }
2448
2449 /* Process the pieces. */
2450 for (i = start; i < XVECLEN (src, 0); i++)
2451 {
2452 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2453 enum machine_mode mode = GET_MODE (tmps[i]);
2454 unsigned int bytelen = GET_MODE_SIZE (mode);
2455 rtx dest = dst;
2456
2457 /* Handle trailing fragments that run over the size of the struct. */
2458 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2459 {
2460 if (BYTES_BIG_ENDIAN)
2461 {
2462 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2463 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2464 tmps[i], 0, OPTAB_WIDEN);
2465 }
2466 bytelen = ssize - bytepos;
2467 }
2468
2469 if (GET_CODE (dst) == CONCAT)
2470 {
2471 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2472 dest = XEXP (dst, 0);
2473 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2474 {
2475 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2476 dest = XEXP (dst, 1);
2477 }
2478 else if (bytepos == 0 && XVECLEN (src, 0))
2479 {
2480 dest = assign_stack_temp (GET_MODE (dest),
2481 GET_MODE_SIZE (GET_MODE (dest)), 0);
2482 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2483 tmps[i]);
2484 dst = dest;
2485 break;
2486 }
2487 else
2488 abort ();
2489 }
2490
2491 /* Optimize the access just a bit. */
2492 if (GET_CODE (dest) == MEM
2493 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2494 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2495 && bytelen == GET_MODE_SIZE (mode))
2496 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2497 else
2498 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2499 mode, tmps[i], ssize);
2500 }
2501
2502 emit_queue ();
2503
2504 /* Copy from the pseudo into the (probable) hard reg. */
2505 if (orig_dst != dst)
2506 emit_move_insn (orig_dst, dst);
2507 }
2508
2509 /* Generate code to copy a BLKmode object of TYPE out of a
2510 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2511 is null, a stack temporary is created. TGTBLK is returned.
2512
2513 The primary purpose of this routine is to handle functions
2514 that return BLKmode structures in registers. Some machines
2515 (the PA for example) want to return all small structures
2516 in registers regardless of the structure's alignment. */
2517
2518 rtx
2519 copy_blkmode_from_reg (tgtblk, srcreg, type)
2520 rtx tgtblk;
2521 rtx srcreg;
2522 tree type;
2523 {
2524 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2525 rtx src = NULL, dst = NULL;
2526 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2527 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2528
2529 if (tgtblk == 0)
2530 {
2531 tgtblk = assign_temp (build_qualified_type (type,
2532 (TYPE_QUALS (type)
2533 | TYPE_QUAL_CONST)),
2534 0, 1, 1);
2535 preserve_temp_slots (tgtblk);
2536 }
2537
2538 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2539 into a new pseudo which is a full word. */
2540
2541 if (GET_MODE (srcreg) != BLKmode
2542 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2543 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2544
2545 /* Structures whose size is not a multiple of a word are aligned
2546 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2547 machine, this means we must skip the empty high order bytes when
2548 calculating the bit offset. */
2549 if (BYTES_BIG_ENDIAN
2550 && bytes % UNITS_PER_WORD)
2551 big_endian_correction
2552 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2553
2554 /* Copy the structure BITSIZE bites at a time.
2555
2556 We could probably emit more efficient code for machines which do not use
2557 strict alignment, but it doesn't seem worth the effort at the current
2558 time. */
2559 for (bitpos = 0, xbitpos = big_endian_correction;
2560 bitpos < bytes * BITS_PER_UNIT;
2561 bitpos += bitsize, xbitpos += bitsize)
2562 {
2563 /* We need a new source operand each time xbitpos is on a
2564 word boundary and when xbitpos == big_endian_correction
2565 (the first time through). */
2566 if (xbitpos % BITS_PER_WORD == 0
2567 || xbitpos == big_endian_correction)
2568 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2569 GET_MODE (srcreg));
2570
2571 /* We need a new destination operand each time bitpos is on
2572 a word boundary. */
2573 if (bitpos % BITS_PER_WORD == 0)
2574 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2575
2576 /* Use xbitpos for the source extraction (right justified) and
2577 xbitpos for the destination store (left justified). */
2578 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2579 extract_bit_field (src, bitsize,
2580 xbitpos % BITS_PER_WORD, 1,
2581 NULL_RTX, word_mode, word_mode,
2582 BITS_PER_WORD),
2583 BITS_PER_WORD);
2584 }
2585
2586 return tgtblk;
2587 }
2588
2589 /* Add a USE expression for REG to the (possibly empty) list pointed
2590 to by CALL_FUSAGE. REG must denote a hard register. */
2591
2592 void
2593 use_reg (call_fusage, reg)
2594 rtx *call_fusage, reg;
2595 {
2596 if (GET_CODE (reg) != REG
2597 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2598 abort ();
2599
2600 *call_fusage
2601 = gen_rtx_EXPR_LIST (VOIDmode,
2602 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2603 }
2604
2605 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2606 starting at REGNO. All of these registers must be hard registers. */
2607
2608 void
2609 use_regs (call_fusage, regno, nregs)
2610 rtx *call_fusage;
2611 int regno;
2612 int nregs;
2613 {
2614 int i;
2615
2616 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2617 abort ();
2618
2619 for (i = 0; i < nregs; i++)
2620 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2621 }
2622
2623 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2624 PARALLEL REGS. This is for calls that pass values in multiple
2625 non-contiguous locations. The Irix 6 ABI has examples of this. */
2626
2627 void
2628 use_group_regs (call_fusage, regs)
2629 rtx *call_fusage;
2630 rtx regs;
2631 {
2632 int i;
2633
2634 for (i = 0; i < XVECLEN (regs, 0); i++)
2635 {
2636 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2637
2638 /* A NULL entry means the parameter goes both on the stack and in
2639 registers. This can also be a MEM for targets that pass values
2640 partially on the stack and partially in registers. */
2641 if (reg != 0 && GET_CODE (reg) == REG)
2642 use_reg (call_fusage, reg);
2643 }
2644 }
2645 \f
2646
2647 /* Determine whether the LEN bytes generated by CONSTFUN can be
2648 stored to memory using several move instructions. CONSTFUNDATA is
2649 a pointer which will be passed as argument in every CONSTFUN call.
2650 ALIGN is maximum alignment we can assume. Return nonzero if a
2651 call to store_by_pieces should succeed. */
2652
2653 int
2654 can_store_by_pieces (len, constfun, constfundata, align)
2655 unsigned HOST_WIDE_INT len;
2656 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2657 PTR constfundata;
2658 unsigned int align;
2659 {
2660 unsigned HOST_WIDE_INT max_size, l;
2661 HOST_WIDE_INT offset = 0;
2662 enum machine_mode mode, tmode;
2663 enum insn_code icode;
2664 int reverse;
2665 rtx cst;
2666
2667 if (! STORE_BY_PIECES_P (len, align))
2668 return 0;
2669
2670 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2671 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2672 align = MOVE_MAX * BITS_PER_UNIT;
2673
2674 /* We would first store what we can in the largest integer mode, then go to
2675 successively smaller modes. */
2676
2677 for (reverse = 0;
2678 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2679 reverse++)
2680 {
2681 l = len;
2682 mode = VOIDmode;
2683 max_size = STORE_MAX_PIECES + 1;
2684 while (max_size > 1)
2685 {
2686 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2687 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2688 if (GET_MODE_SIZE (tmode) < max_size)
2689 mode = tmode;
2690
2691 if (mode == VOIDmode)
2692 break;
2693
2694 icode = mov_optab->handlers[(int) mode].insn_code;
2695 if (icode != CODE_FOR_nothing
2696 && align >= GET_MODE_ALIGNMENT (mode))
2697 {
2698 unsigned int size = GET_MODE_SIZE (mode);
2699
2700 while (l >= size)
2701 {
2702 if (reverse)
2703 offset -= size;
2704
2705 cst = (*constfun) (constfundata, offset, mode);
2706 if (!LEGITIMATE_CONSTANT_P (cst))
2707 return 0;
2708
2709 if (!reverse)
2710 offset += size;
2711
2712 l -= size;
2713 }
2714 }
2715
2716 max_size = GET_MODE_SIZE (mode);
2717 }
2718
2719 /* The code above should have handled everything. */
2720 if (l != 0)
2721 abort ();
2722 }
2723
2724 return 1;
2725 }
2726
2727 /* Generate several move instructions to store LEN bytes generated by
2728 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2729 pointer which will be passed as argument in every CONSTFUN call.
2730 ALIGN is maximum alignment we can assume. */
2731
2732 void
2733 store_by_pieces (to, len, constfun, constfundata, align)
2734 rtx to;
2735 unsigned HOST_WIDE_INT len;
2736 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2737 PTR constfundata;
2738 unsigned int align;
2739 {
2740 struct store_by_pieces data;
2741
2742 if (! STORE_BY_PIECES_P (len, align))
2743 abort ();
2744 to = protect_from_queue (to, 1);
2745 data.constfun = constfun;
2746 data.constfundata = constfundata;
2747 data.len = len;
2748 data.to = to;
2749 store_by_pieces_1 (&data, align);
2750 }
2751
2752 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2753 rtx with BLKmode). The caller must pass TO through protect_from_queue
2754 before calling. ALIGN is maximum alignment we can assume. */
2755
2756 static void
2757 clear_by_pieces (to, len, align)
2758 rtx to;
2759 unsigned HOST_WIDE_INT len;
2760 unsigned int align;
2761 {
2762 struct store_by_pieces data;
2763
2764 data.constfun = clear_by_pieces_1;
2765 data.constfundata = NULL;
2766 data.len = len;
2767 data.to = to;
2768 store_by_pieces_1 (&data, align);
2769 }
2770
2771 /* Callback routine for clear_by_pieces.
2772 Return const0_rtx unconditionally. */
2773
2774 static rtx
2775 clear_by_pieces_1 (data, offset, mode)
2776 PTR data ATTRIBUTE_UNUSED;
2777 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2778 enum machine_mode mode ATTRIBUTE_UNUSED;
2779 {
2780 return const0_rtx;
2781 }
2782
2783 /* Subroutine of clear_by_pieces and store_by_pieces.
2784 Generate several move instructions to store LEN bytes of block TO. (A MEM
2785 rtx with BLKmode). The caller must pass TO through protect_from_queue
2786 before calling. ALIGN is maximum alignment we can assume. */
2787
2788 static void
2789 store_by_pieces_1 (data, align)
2790 struct store_by_pieces *data;
2791 unsigned int align;
2792 {
2793 rtx to_addr = XEXP (data->to, 0);
2794 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2795 enum machine_mode mode = VOIDmode, tmode;
2796 enum insn_code icode;
2797
2798 data->offset = 0;
2799 data->to_addr = to_addr;
2800 data->autinc_to
2801 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2802 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2803
2804 data->explicit_inc_to = 0;
2805 data->reverse
2806 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2807 if (data->reverse)
2808 data->offset = data->len;
2809
2810 /* If storing requires more than two move insns,
2811 copy addresses to registers (to make displacements shorter)
2812 and use post-increment if available. */
2813 if (!data->autinc_to
2814 && move_by_pieces_ninsns (data->len, align) > 2)
2815 {
2816 /* Determine the main mode we'll be using. */
2817 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2818 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2819 if (GET_MODE_SIZE (tmode) < max_size)
2820 mode = tmode;
2821
2822 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2823 {
2824 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2825 data->autinc_to = 1;
2826 data->explicit_inc_to = -1;
2827 }
2828
2829 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2830 && ! data->autinc_to)
2831 {
2832 data->to_addr = copy_addr_to_reg (to_addr);
2833 data->autinc_to = 1;
2834 data->explicit_inc_to = 1;
2835 }
2836
2837 if ( !data->autinc_to && CONSTANT_P (to_addr))
2838 data->to_addr = copy_addr_to_reg (to_addr);
2839 }
2840
2841 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2842 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2843 align = MOVE_MAX * BITS_PER_UNIT;
2844
2845 /* First store what we can in the largest integer mode, then go to
2846 successively smaller modes. */
2847
2848 while (max_size > 1)
2849 {
2850 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2851 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2852 if (GET_MODE_SIZE (tmode) < max_size)
2853 mode = tmode;
2854
2855 if (mode == VOIDmode)
2856 break;
2857
2858 icode = mov_optab->handlers[(int) mode].insn_code;
2859 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2860 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2861
2862 max_size = GET_MODE_SIZE (mode);
2863 }
2864
2865 /* The code above should have handled everything. */
2866 if (data->len != 0)
2867 abort ();
2868 }
2869
2870 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2871 with move instructions for mode MODE. GENFUN is the gen_... function
2872 to make a move insn for that mode. DATA has all the other info. */
2873
2874 static void
2875 store_by_pieces_2 (genfun, mode, data)
2876 rtx (*genfun) PARAMS ((rtx, ...));
2877 enum machine_mode mode;
2878 struct store_by_pieces *data;
2879 {
2880 unsigned int size = GET_MODE_SIZE (mode);
2881 rtx to1, cst;
2882
2883 while (data->len >= size)
2884 {
2885 if (data->reverse)
2886 data->offset -= size;
2887
2888 if (data->autinc_to)
2889 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2890 data->offset);
2891 else
2892 to1 = adjust_address (data->to, mode, data->offset);
2893
2894 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2895 emit_insn (gen_add2_insn (data->to_addr,
2896 GEN_INT (-(HOST_WIDE_INT) size)));
2897
2898 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2899 emit_insn ((*genfun) (to1, cst));
2900
2901 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2902 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2903
2904 if (! data->reverse)
2905 data->offset += size;
2906
2907 data->len -= size;
2908 }
2909 }
2910 \f
2911 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2912 its length in bytes. */
2913
2914 rtx
2915 clear_storage (object, size)
2916 rtx object;
2917 rtx size;
2918 {
2919 rtx retval = 0;
2920 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2921 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2922
2923 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2924 just move a zero. Otherwise, do this a piece at a time. */
2925 if (GET_MODE (object) != BLKmode
2926 && GET_CODE (size) == CONST_INT
2927 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2928 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2929 else
2930 {
2931 object = protect_from_queue (object, 1);
2932 size = protect_from_queue (size, 0);
2933
2934 if (GET_CODE (size) == CONST_INT
2935 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2936 clear_by_pieces (object, INTVAL (size), align);
2937 else if (clear_storage_via_clrstr (object, size, align))
2938 ;
2939 else
2940 retval = clear_storage_via_libcall (object, size);
2941 }
2942
2943 return retval;
2944 }
2945
2946 /* A subroutine of clear_storage. Expand a clrstr pattern;
2947 return true if successful. */
2948
2949 static bool
2950 clear_storage_via_clrstr (object, size, align)
2951 rtx object, size;
2952 unsigned int align;
2953 {
2954 /* Try the most limited insn first, because there's no point
2955 including more than one in the machine description unless
2956 the more limited one has some advantage. */
2957
2958 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2959 enum machine_mode mode;
2960
2961 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2962 mode = GET_MODE_WIDER_MODE (mode))
2963 {
2964 enum insn_code code = clrstr_optab[(int) mode];
2965 insn_operand_predicate_fn pred;
2966
2967 if (code != CODE_FOR_nothing
2968 /* We don't need MODE to be narrower than
2969 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2970 the mode mask, as it is returned by the macro, it will
2971 definitely be less than the actual mode mask. */
2972 && ((GET_CODE (size) == CONST_INT
2973 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2974 <= (GET_MODE_MASK (mode) >> 1)))
2975 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2976 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2977 || (*pred) (object, BLKmode))
2978 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2979 || (*pred) (opalign, VOIDmode)))
2980 {
2981 rtx op1;
2982 rtx last = get_last_insn ();
2983 rtx pat;
2984
2985 op1 = convert_to_mode (mode, size, 1);
2986 pred = insn_data[(int) code].operand[1].predicate;
2987 if (pred != 0 && ! (*pred) (op1, mode))
2988 op1 = copy_to_mode_reg (mode, op1);
2989
2990 pat = GEN_FCN ((int) code) (object, op1, opalign);
2991 if (pat)
2992 {
2993 emit_insn (pat);
2994 return true;
2995 }
2996 else
2997 delete_insns_since (last);
2998 }
2999 }
3000
3001 return false;
3002 }
3003
3004 /* A subroutine of clear_storage. Expand a call to memset or bzero.
3005 Return the return value of memset, 0 otherwise. */
3006
3007 static rtx
3008 clear_storage_via_libcall (object, size)
3009 rtx object, size;
3010 {
3011 tree call_expr, arg_list, fn, object_tree, size_tree;
3012 enum machine_mode size_mode;
3013 rtx retval;
3014
3015 /* OBJECT or SIZE may have been passed through protect_from_queue.
3016
3017 It is unsafe to save the value generated by protect_from_queue
3018 and reuse it later. Consider what happens if emit_queue is
3019 called before the return value from protect_from_queue is used.
3020
3021 Expansion of the CALL_EXPR below will call emit_queue before
3022 we are finished emitting RTL for argument setup. So if we are
3023 not careful we could get the wrong value for an argument.
3024
3025 To avoid this problem we go ahead and emit code to copy OBJECT
3026 and SIZE into new pseudos. We can then place those new pseudos
3027 into an RTL_EXPR and use them later, even after a call to
3028 emit_queue.
3029
3030 Note this is not strictly needed for library calls since they
3031 do not call emit_queue before loading their arguments. However,
3032 we may need to have library calls call emit_queue in the future
3033 since failing to do so could cause problems for targets which
3034 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3035
3036 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
3037
3038 if (TARGET_MEM_FUNCTIONS)
3039 size_mode = TYPE_MODE (sizetype);
3040 else
3041 size_mode = TYPE_MODE (unsigned_type_node);
3042 size = convert_to_mode (size_mode, size, 1);
3043 size = copy_to_mode_reg (size_mode, size);
3044
3045 /* It is incorrect to use the libcall calling conventions to call
3046 memset in this context. This could be a user call to memset and
3047 the user may wish to examine the return value from memset. For
3048 targets where libcalls and normal calls have different conventions
3049 for returning pointers, we could end up generating incorrect code.
3050
3051 For convenience, we generate the call to bzero this way as well. */
3052
3053 object_tree = make_tree (ptr_type_node, object);
3054 if (TARGET_MEM_FUNCTIONS)
3055 size_tree = make_tree (sizetype, size);
3056 else
3057 size_tree = make_tree (unsigned_type_node, size);
3058
3059 fn = clear_storage_libcall_fn (true);
3060 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3061 if (TARGET_MEM_FUNCTIONS)
3062 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3063 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3064
3065 /* Now we have to build up the CALL_EXPR itself. */
3066 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3067 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3068 call_expr, arg_list, NULL_TREE);
3069 TREE_SIDE_EFFECTS (call_expr) = 1;
3070
3071 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3072
3073 /* If we are initializing a readonly value, show the above call
3074 clobbered it. Otherwise, a load from it may erroneously be
3075 hoisted from a loop. */
3076 if (RTX_UNCHANGING_P (object))
3077 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3078
3079 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3080 }
3081
3082 /* A subroutine of clear_storage_via_libcall. Create the tree node
3083 for the function we use for block clears. The first time FOR_CALL
3084 is true, we call assemble_external. */
3085
3086 static GTY(()) tree block_clear_fn;
3087
3088 static tree
3089 clear_storage_libcall_fn (for_call)
3090 int for_call;
3091 {
3092 static bool emitted_extern;
3093 tree fn = block_clear_fn, args;
3094
3095 if (!fn)
3096 {
3097 if (TARGET_MEM_FUNCTIONS)
3098 {
3099 fn = get_identifier ("memset");
3100 args = build_function_type_list (ptr_type_node, ptr_type_node,
3101 integer_type_node, sizetype,
3102 NULL_TREE);
3103 }
3104 else
3105 {
3106 fn = get_identifier ("bzero");
3107 args = build_function_type_list (void_type_node, ptr_type_node,
3108 unsigned_type_node, NULL_TREE);
3109 }
3110
3111 fn = build_decl (FUNCTION_DECL, fn, args);
3112 DECL_EXTERNAL (fn) = 1;
3113 TREE_PUBLIC (fn) = 1;
3114 DECL_ARTIFICIAL (fn) = 1;
3115 TREE_NOTHROW (fn) = 1;
3116
3117 block_clear_fn = fn;
3118 }
3119
3120 if (for_call && !emitted_extern)
3121 {
3122 emitted_extern = true;
3123 make_decl_rtl (fn, NULL);
3124 assemble_external (fn);
3125 }
3126
3127 return fn;
3128 }
3129 \f
3130 /* Generate code to copy Y into X.
3131 Both Y and X must have the same mode, except that
3132 Y can be a constant with VOIDmode.
3133 This mode cannot be BLKmode; use emit_block_move for that.
3134
3135 Return the last instruction emitted. */
3136
3137 rtx
3138 emit_move_insn (x, y)
3139 rtx x, y;
3140 {
3141 enum machine_mode mode = GET_MODE (x);
3142 rtx y_cst = NULL_RTX;
3143 rtx last_insn;
3144
3145 x = protect_from_queue (x, 1);
3146 y = protect_from_queue (y, 0);
3147
3148 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3149 abort ();
3150
3151 /* Never force constant_p_rtx to memory. */
3152 if (GET_CODE (y) == CONSTANT_P_RTX)
3153 ;
3154 else if (CONSTANT_P (y))
3155 {
3156 if (optimize
3157 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3158 && (last_insn = compress_float_constant (x, y)))
3159 return last_insn;
3160
3161 if (!LEGITIMATE_CONSTANT_P (y))
3162 {
3163 y_cst = y;
3164 y = force_const_mem (mode, y);
3165
3166 /* If the target's cannot_force_const_mem prevented the spill,
3167 assume that the target's move expanders will also take care
3168 of the non-legitimate constant. */
3169 if (!y)
3170 y = y_cst;
3171 }
3172 }
3173
3174 /* If X or Y are memory references, verify that their addresses are valid
3175 for the machine. */
3176 if (GET_CODE (x) == MEM
3177 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3178 && ! push_operand (x, GET_MODE (x)))
3179 || (flag_force_addr
3180 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3181 x = validize_mem (x);
3182
3183 if (GET_CODE (y) == MEM
3184 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3185 || (flag_force_addr
3186 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3187 y = validize_mem (y);
3188
3189 if (mode == BLKmode)
3190 abort ();
3191
3192 last_insn = emit_move_insn_1 (x, y);
3193
3194 if (y_cst && GET_CODE (x) == REG)
3195 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3196
3197 return last_insn;
3198 }
3199
3200 /* Low level part of emit_move_insn.
3201 Called just like emit_move_insn, but assumes X and Y
3202 are basically valid. */
3203
3204 rtx
3205 emit_move_insn_1 (x, y)
3206 rtx x, y;
3207 {
3208 enum machine_mode mode = GET_MODE (x);
3209 enum machine_mode submode;
3210 enum mode_class class = GET_MODE_CLASS (mode);
3211
3212 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3213 abort ();
3214
3215 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3216 return
3217 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3218
3219 /* Expand complex moves by moving real part and imag part, if possible. */
3220 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3221 && BLKmode != (submode = GET_MODE_INNER (mode))
3222 && (mov_optab->handlers[(int) submode].insn_code
3223 != CODE_FOR_nothing))
3224 {
3225 /* Don't split destination if it is a stack push. */
3226 int stack = push_operand (x, GET_MODE (x));
3227
3228 #ifdef PUSH_ROUNDING
3229 /* In case we output to the stack, but the size is smaller machine can
3230 push exactly, we need to use move instructions. */
3231 if (stack
3232 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3233 != GET_MODE_SIZE (submode)))
3234 {
3235 rtx temp;
3236 HOST_WIDE_INT offset1, offset2;
3237
3238 /* Do not use anti_adjust_stack, since we don't want to update
3239 stack_pointer_delta. */
3240 temp = expand_binop (Pmode,
3241 #ifdef STACK_GROWS_DOWNWARD
3242 sub_optab,
3243 #else
3244 add_optab,
3245 #endif
3246 stack_pointer_rtx,
3247 GEN_INT
3248 (PUSH_ROUNDING
3249 (GET_MODE_SIZE (GET_MODE (x)))),
3250 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3251
3252 if (temp != stack_pointer_rtx)
3253 emit_move_insn (stack_pointer_rtx, temp);
3254
3255 #ifdef STACK_GROWS_DOWNWARD
3256 offset1 = 0;
3257 offset2 = GET_MODE_SIZE (submode);
3258 #else
3259 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3260 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3261 + GET_MODE_SIZE (submode));
3262 #endif
3263
3264 emit_move_insn (change_address (x, submode,
3265 gen_rtx_PLUS (Pmode,
3266 stack_pointer_rtx,
3267 GEN_INT (offset1))),
3268 gen_realpart (submode, y));
3269 emit_move_insn (change_address (x, submode,
3270 gen_rtx_PLUS (Pmode,
3271 stack_pointer_rtx,
3272 GEN_INT (offset2))),
3273 gen_imagpart (submode, y));
3274 }
3275 else
3276 #endif
3277 /* If this is a stack, push the highpart first, so it
3278 will be in the argument order.
3279
3280 In that case, change_address is used only to convert
3281 the mode, not to change the address. */
3282 if (stack)
3283 {
3284 /* Note that the real part always precedes the imag part in memory
3285 regardless of machine's endianness. */
3286 #ifdef STACK_GROWS_DOWNWARD
3287 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3288 (gen_rtx_MEM (submode, XEXP (x, 0)),
3289 gen_imagpart (submode, y)));
3290 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3291 (gen_rtx_MEM (submode, XEXP (x, 0)),
3292 gen_realpart (submode, y)));
3293 #else
3294 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3295 (gen_rtx_MEM (submode, XEXP (x, 0)),
3296 gen_realpart (submode, y)));
3297 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3298 (gen_rtx_MEM (submode, XEXP (x, 0)),
3299 gen_imagpart (submode, y)));
3300 #endif
3301 }
3302 else
3303 {
3304 rtx realpart_x, realpart_y;
3305 rtx imagpart_x, imagpart_y;
3306
3307 /* If this is a complex value with each part being smaller than a
3308 word, the usual calling sequence will likely pack the pieces into
3309 a single register. Unfortunately, SUBREG of hard registers only
3310 deals in terms of words, so we have a problem converting input
3311 arguments to the CONCAT of two registers that is used elsewhere
3312 for complex values. If this is before reload, we can copy it into
3313 memory and reload. FIXME, we should see about using extract and
3314 insert on integer registers, but complex short and complex char
3315 variables should be rarely used. */
3316 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3317 && (reload_in_progress | reload_completed) == 0)
3318 {
3319 int packed_dest_p
3320 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3321 int packed_src_p
3322 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3323
3324 if (packed_dest_p || packed_src_p)
3325 {
3326 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3327 ? MODE_FLOAT : MODE_INT);
3328
3329 enum machine_mode reg_mode
3330 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3331
3332 if (reg_mode != BLKmode)
3333 {
3334 rtx mem = assign_stack_temp (reg_mode,
3335 GET_MODE_SIZE (mode), 0);
3336 rtx cmem = adjust_address (mem, mode, 0);
3337
3338 cfun->cannot_inline
3339 = N_("function using short complex types cannot be inline");
3340
3341 if (packed_dest_p)
3342 {
3343 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3344
3345 emit_move_insn_1 (cmem, y);
3346 return emit_move_insn_1 (sreg, mem);
3347 }
3348 else
3349 {
3350 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3351
3352 emit_move_insn_1 (mem, sreg);
3353 return emit_move_insn_1 (x, cmem);
3354 }
3355 }
3356 }
3357 }
3358
3359 realpart_x = gen_realpart (submode, x);
3360 realpart_y = gen_realpart (submode, y);
3361 imagpart_x = gen_imagpart (submode, x);
3362 imagpart_y = gen_imagpart (submode, y);
3363
3364 /* Show the output dies here. This is necessary for SUBREGs
3365 of pseudos since we cannot track their lifetimes correctly;
3366 hard regs shouldn't appear here except as return values.
3367 We never want to emit such a clobber after reload. */
3368 if (x != y
3369 && ! (reload_in_progress || reload_completed)
3370 && (GET_CODE (realpart_x) == SUBREG
3371 || GET_CODE (imagpart_x) == SUBREG))
3372 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3373
3374 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3375 (realpart_x, realpart_y));
3376 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3377 (imagpart_x, imagpart_y));
3378 }
3379
3380 return get_last_insn ();
3381 }
3382
3383 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3384 find a mode to do it in. If we have a movcc, use it. Otherwise,
3385 find the MODE_INT mode of the same width. */
3386 else if (GET_MODE_CLASS (mode) == MODE_CC
3387 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3388 {
3389 enum insn_code insn_code;
3390 enum machine_mode tmode = VOIDmode;
3391 rtx x1 = x, y1 = y;
3392
3393 if (mode != CCmode
3394 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3395 tmode = CCmode;
3396 else
3397 for (tmode = QImode; tmode != VOIDmode;
3398 tmode = GET_MODE_WIDER_MODE (tmode))
3399 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3400 break;
3401
3402 if (tmode == VOIDmode)
3403 abort ();
3404
3405 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3406 may call change_address which is not appropriate if we were
3407 called when a reload was in progress. We don't have to worry
3408 about changing the address since the size in bytes is supposed to
3409 be the same. Copy the MEM to change the mode and move any
3410 substitutions from the old MEM to the new one. */
3411
3412 if (reload_in_progress)
3413 {
3414 x = gen_lowpart_common (tmode, x1);
3415 if (x == 0 && GET_CODE (x1) == MEM)
3416 {
3417 x = adjust_address_nv (x1, tmode, 0);
3418 copy_replacements (x1, x);
3419 }
3420
3421 y = gen_lowpart_common (tmode, y1);
3422 if (y == 0 && GET_CODE (y1) == MEM)
3423 {
3424 y = adjust_address_nv (y1, tmode, 0);
3425 copy_replacements (y1, y);
3426 }
3427 }
3428 else
3429 {
3430 x = gen_lowpart (tmode, x);
3431 y = gen_lowpart (tmode, y);
3432 }
3433
3434 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3435 return emit_insn (GEN_FCN (insn_code) (x, y));
3436 }
3437
3438 /* This will handle any multi-word or full-word mode that lacks a move_insn
3439 pattern. However, you will get better code if you define such patterns,
3440 even if they must turn into multiple assembler instructions. */
3441 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3442 {
3443 rtx last_insn = 0;
3444 rtx seq, inner;
3445 int need_clobber;
3446 int i;
3447
3448 #ifdef PUSH_ROUNDING
3449
3450 /* If X is a push on the stack, do the push now and replace
3451 X with a reference to the stack pointer. */
3452 if (push_operand (x, GET_MODE (x)))
3453 {
3454 rtx temp;
3455 enum rtx_code code;
3456
3457 /* Do not use anti_adjust_stack, since we don't want to update
3458 stack_pointer_delta. */
3459 temp = expand_binop (Pmode,
3460 #ifdef STACK_GROWS_DOWNWARD
3461 sub_optab,
3462 #else
3463 add_optab,
3464 #endif
3465 stack_pointer_rtx,
3466 GEN_INT
3467 (PUSH_ROUNDING
3468 (GET_MODE_SIZE (GET_MODE (x)))),
3469 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3470
3471 if (temp != stack_pointer_rtx)
3472 emit_move_insn (stack_pointer_rtx, temp);
3473
3474 code = GET_CODE (XEXP (x, 0));
3475
3476 /* Just hope that small offsets off SP are OK. */
3477 if (code == POST_INC)
3478 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3479 GEN_INT (-((HOST_WIDE_INT)
3480 GET_MODE_SIZE (GET_MODE (x)))));
3481 else if (code == POST_DEC)
3482 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3483 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3484 else
3485 temp = stack_pointer_rtx;
3486
3487 x = change_address (x, VOIDmode, temp);
3488 }
3489 #endif
3490
3491 /* If we are in reload, see if either operand is a MEM whose address
3492 is scheduled for replacement. */
3493 if (reload_in_progress && GET_CODE (x) == MEM
3494 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3495 x = replace_equiv_address_nv (x, inner);
3496 if (reload_in_progress && GET_CODE (y) == MEM
3497 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3498 y = replace_equiv_address_nv (y, inner);
3499
3500 start_sequence ();
3501
3502 need_clobber = 0;
3503 for (i = 0;
3504 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3505 i++)
3506 {
3507 rtx xpart = operand_subword (x, i, 1, mode);
3508 rtx ypart = operand_subword (y, i, 1, mode);
3509
3510 /* If we can't get a part of Y, put Y into memory if it is a
3511 constant. Otherwise, force it into a register. If we still
3512 can't get a part of Y, abort. */
3513 if (ypart == 0 && CONSTANT_P (y))
3514 {
3515 y = force_const_mem (mode, y);
3516 ypart = operand_subword (y, i, 1, mode);
3517 }
3518 else if (ypart == 0)
3519 ypart = operand_subword_force (y, i, mode);
3520
3521 if (xpart == 0 || ypart == 0)
3522 abort ();
3523
3524 need_clobber |= (GET_CODE (xpart) == SUBREG);
3525
3526 last_insn = emit_move_insn (xpart, ypart);
3527 }
3528
3529 seq = get_insns ();
3530 end_sequence ();
3531
3532 /* Show the output dies here. This is necessary for SUBREGs
3533 of pseudos since we cannot track their lifetimes correctly;
3534 hard regs shouldn't appear here except as return values.
3535 We never want to emit such a clobber after reload. */
3536 if (x != y
3537 && ! (reload_in_progress || reload_completed)
3538 && need_clobber != 0)
3539 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3540
3541 emit_insn (seq);
3542
3543 return last_insn;
3544 }
3545 else
3546 abort ();
3547 }
3548
3549 /* If Y is representable exactly in a narrower mode, and the target can
3550 perform the extension directly from constant or memory, then emit the
3551 move as an extension. */
3552
3553 static rtx
3554 compress_float_constant (x, y)
3555 rtx x, y;
3556 {
3557 enum machine_mode dstmode = GET_MODE (x);
3558 enum machine_mode orig_srcmode = GET_MODE (y);
3559 enum machine_mode srcmode;
3560 REAL_VALUE_TYPE r;
3561
3562 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3563
3564 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3565 srcmode != orig_srcmode;
3566 srcmode = GET_MODE_WIDER_MODE (srcmode))
3567 {
3568 enum insn_code ic;
3569 rtx trunc_y, last_insn;
3570
3571 /* Skip if the target can't extend this way. */
3572 ic = can_extend_p (dstmode, srcmode, 0);
3573 if (ic == CODE_FOR_nothing)
3574 continue;
3575
3576 /* Skip if the narrowed value isn't exact. */
3577 if (! exact_real_truncate (srcmode, &r))
3578 continue;
3579
3580 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3581
3582 if (LEGITIMATE_CONSTANT_P (trunc_y))
3583 {
3584 /* Skip if the target needs extra instructions to perform
3585 the extension. */
3586 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3587 continue;
3588 }
3589 else if (float_extend_from_mem[dstmode][srcmode])
3590 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3591 else
3592 continue;
3593
3594 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3595 last_insn = get_last_insn ();
3596
3597 if (GET_CODE (x) == REG)
3598 REG_NOTES (last_insn)
3599 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3600
3601 return last_insn;
3602 }
3603
3604 return NULL_RTX;
3605 }
3606 \f
3607 /* Pushing data onto the stack. */
3608
3609 /* Push a block of length SIZE (perhaps variable)
3610 and return an rtx to address the beginning of the block.
3611 Note that it is not possible for the value returned to be a QUEUED.
3612 The value may be virtual_outgoing_args_rtx.
3613
3614 EXTRA is the number of bytes of padding to push in addition to SIZE.
3615 BELOW nonzero means this padding comes at low addresses;
3616 otherwise, the padding comes at high addresses. */
3617
3618 rtx
3619 push_block (size, extra, below)
3620 rtx size;
3621 int extra, below;
3622 {
3623 rtx temp;
3624
3625 size = convert_modes (Pmode, ptr_mode, size, 1);
3626 if (CONSTANT_P (size))
3627 anti_adjust_stack (plus_constant (size, extra));
3628 else if (GET_CODE (size) == REG && extra == 0)
3629 anti_adjust_stack (size);
3630 else
3631 {
3632 temp = copy_to_mode_reg (Pmode, size);
3633 if (extra != 0)
3634 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3635 temp, 0, OPTAB_LIB_WIDEN);
3636 anti_adjust_stack (temp);
3637 }
3638
3639 #ifndef STACK_GROWS_DOWNWARD
3640 if (0)
3641 #else
3642 if (1)
3643 #endif
3644 {
3645 temp = virtual_outgoing_args_rtx;
3646 if (extra != 0 && below)
3647 temp = plus_constant (temp, extra);
3648 }
3649 else
3650 {
3651 if (GET_CODE (size) == CONST_INT)
3652 temp = plus_constant (virtual_outgoing_args_rtx,
3653 -INTVAL (size) - (below ? 0 : extra));
3654 else if (extra != 0 && !below)
3655 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3656 negate_rtx (Pmode, plus_constant (size, extra)));
3657 else
3658 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3659 negate_rtx (Pmode, size));
3660 }
3661
3662 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3663 }
3664
3665 #ifdef PUSH_ROUNDING
3666
3667 /* Emit single push insn. */
3668
3669 static void
3670 emit_single_push_insn (mode, x, type)
3671 rtx x;
3672 enum machine_mode mode;
3673 tree type;
3674 {
3675 rtx dest_addr;
3676 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3677 rtx dest;
3678 enum insn_code icode;
3679 insn_operand_predicate_fn pred;
3680
3681 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3682 /* If there is push pattern, use it. Otherwise try old way of throwing
3683 MEM representing push operation to move expander. */
3684 icode = push_optab->handlers[(int) mode].insn_code;
3685 if (icode != CODE_FOR_nothing)
3686 {
3687 if (((pred = insn_data[(int) icode].operand[0].predicate)
3688 && !((*pred) (x, mode))))
3689 x = force_reg (mode, x);
3690 emit_insn (GEN_FCN (icode) (x));
3691 return;
3692 }
3693 if (GET_MODE_SIZE (mode) == rounded_size)
3694 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3695 else
3696 {
3697 #ifdef STACK_GROWS_DOWNWARD
3698 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3699 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3700 #else
3701 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3702 GEN_INT (rounded_size));
3703 #endif
3704 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3705 }
3706
3707 dest = gen_rtx_MEM (mode, dest_addr);
3708
3709 if (type != 0)
3710 {
3711 set_mem_attributes (dest, type, 1);
3712
3713 if (flag_optimize_sibling_calls)
3714 /* Function incoming arguments may overlap with sibling call
3715 outgoing arguments and we cannot allow reordering of reads
3716 from function arguments with stores to outgoing arguments
3717 of sibling calls. */
3718 set_mem_alias_set (dest, 0);
3719 }
3720 emit_move_insn (dest, x);
3721 }
3722 #endif
3723
3724 /* Generate code to push X onto the stack, assuming it has mode MODE and
3725 type TYPE.
3726 MODE is redundant except when X is a CONST_INT (since they don't
3727 carry mode info).
3728 SIZE is an rtx for the size of data to be copied (in bytes),
3729 needed only if X is BLKmode.
3730
3731 ALIGN (in bits) is maximum alignment we can assume.
3732
3733 If PARTIAL and REG are both nonzero, then copy that many of the first
3734 words of X into registers starting with REG, and push the rest of X.
3735 The amount of space pushed is decreased by PARTIAL words,
3736 rounded *down* to a multiple of PARM_BOUNDARY.
3737 REG must be a hard register in this case.
3738 If REG is zero but PARTIAL is not, take any all others actions for an
3739 argument partially in registers, but do not actually load any
3740 registers.
3741
3742 EXTRA is the amount in bytes of extra space to leave next to this arg.
3743 This is ignored if an argument block has already been allocated.
3744
3745 On a machine that lacks real push insns, ARGS_ADDR is the address of
3746 the bottom of the argument block for this call. We use indexing off there
3747 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3748 argument block has not been preallocated.
3749
3750 ARGS_SO_FAR is the size of args previously pushed for this call.
3751
3752 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3753 for arguments passed in registers. If nonzero, it will be the number
3754 of bytes required. */
3755
3756 void
3757 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3758 args_addr, args_so_far, reg_parm_stack_space,
3759 alignment_pad)
3760 rtx x;
3761 enum machine_mode mode;
3762 tree type;
3763 rtx size;
3764 unsigned int align;
3765 int partial;
3766 rtx reg;
3767 int extra;
3768 rtx args_addr;
3769 rtx args_so_far;
3770 int reg_parm_stack_space;
3771 rtx alignment_pad;
3772 {
3773 rtx xinner;
3774 enum direction stack_direction
3775 #ifdef STACK_GROWS_DOWNWARD
3776 = downward;
3777 #else
3778 = upward;
3779 #endif
3780
3781 /* Decide where to pad the argument: `downward' for below,
3782 `upward' for above, or `none' for don't pad it.
3783 Default is below for small data on big-endian machines; else above. */
3784 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3785
3786 /* Invert direction if stack is post-decrement.
3787 FIXME: why? */
3788 if (STACK_PUSH_CODE == POST_DEC)
3789 if (where_pad != none)
3790 where_pad = (where_pad == downward ? upward : downward);
3791
3792 xinner = x = protect_from_queue (x, 0);
3793
3794 if (mode == BLKmode)
3795 {
3796 /* Copy a block into the stack, entirely or partially. */
3797
3798 rtx temp;
3799 int used = partial * UNITS_PER_WORD;
3800 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3801 int skip;
3802
3803 if (size == 0)
3804 abort ();
3805
3806 used -= offset;
3807
3808 /* USED is now the # of bytes we need not copy to the stack
3809 because registers will take care of them. */
3810
3811 if (partial != 0)
3812 xinner = adjust_address (xinner, BLKmode, used);
3813
3814 /* If the partial register-part of the arg counts in its stack size,
3815 skip the part of stack space corresponding to the registers.
3816 Otherwise, start copying to the beginning of the stack space,
3817 by setting SKIP to 0. */
3818 skip = (reg_parm_stack_space == 0) ? 0 : used;
3819
3820 #ifdef PUSH_ROUNDING
3821 /* Do it with several push insns if that doesn't take lots of insns
3822 and if there is no difficulty with push insns that skip bytes
3823 on the stack for alignment purposes. */
3824 if (args_addr == 0
3825 && PUSH_ARGS
3826 && GET_CODE (size) == CONST_INT
3827 && skip == 0
3828 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3829 /* Here we avoid the case of a structure whose weak alignment
3830 forces many pushes of a small amount of data,
3831 and such small pushes do rounding that causes trouble. */
3832 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3833 || align >= BIGGEST_ALIGNMENT
3834 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3835 == (align / BITS_PER_UNIT)))
3836 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3837 {
3838 /* Push padding now if padding above and stack grows down,
3839 or if padding below and stack grows up.
3840 But if space already allocated, this has already been done. */
3841 if (extra && args_addr == 0
3842 && where_pad != none && where_pad != stack_direction)
3843 anti_adjust_stack (GEN_INT (extra));
3844
3845 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3846 }
3847 else
3848 #endif /* PUSH_ROUNDING */
3849 {
3850 rtx target;
3851
3852 /* Otherwise make space on the stack and copy the data
3853 to the address of that space. */
3854
3855 /* Deduct words put into registers from the size we must copy. */
3856 if (partial != 0)
3857 {
3858 if (GET_CODE (size) == CONST_INT)
3859 size = GEN_INT (INTVAL (size) - used);
3860 else
3861 size = expand_binop (GET_MODE (size), sub_optab, size,
3862 GEN_INT (used), NULL_RTX, 0,
3863 OPTAB_LIB_WIDEN);
3864 }
3865
3866 /* Get the address of the stack space.
3867 In this case, we do not deal with EXTRA separately.
3868 A single stack adjust will do. */
3869 if (! args_addr)
3870 {
3871 temp = push_block (size, extra, where_pad == downward);
3872 extra = 0;
3873 }
3874 else if (GET_CODE (args_so_far) == CONST_INT)
3875 temp = memory_address (BLKmode,
3876 plus_constant (args_addr,
3877 skip + INTVAL (args_so_far)));
3878 else
3879 temp = memory_address (BLKmode,
3880 plus_constant (gen_rtx_PLUS (Pmode,
3881 args_addr,
3882 args_so_far),
3883 skip));
3884
3885 if (!ACCUMULATE_OUTGOING_ARGS)
3886 {
3887 /* If the source is referenced relative to the stack pointer,
3888 copy it to another register to stabilize it. We do not need
3889 to do this if we know that we won't be changing sp. */
3890
3891 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3892 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3893 temp = copy_to_reg (temp);
3894 }
3895
3896 target = gen_rtx_MEM (BLKmode, temp);
3897
3898 if (type != 0)
3899 {
3900 set_mem_attributes (target, type, 1);
3901 /* Function incoming arguments may overlap with sibling call
3902 outgoing arguments and we cannot allow reordering of reads
3903 from function arguments with stores to outgoing arguments
3904 of sibling calls. */
3905 set_mem_alias_set (target, 0);
3906 }
3907
3908 /* ALIGN may well be better aligned than TYPE, e.g. due to
3909 PARM_BOUNDARY. Assume the caller isn't lying. */
3910 set_mem_align (target, align);
3911
3912 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3913 }
3914 }
3915 else if (partial > 0)
3916 {
3917 /* Scalar partly in registers. */
3918
3919 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3920 int i;
3921 int not_stack;
3922 /* # words of start of argument
3923 that we must make space for but need not store. */
3924 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3925 int args_offset = INTVAL (args_so_far);
3926 int skip;
3927
3928 /* Push padding now if padding above and stack grows down,
3929 or if padding below and stack grows up.
3930 But if space already allocated, this has already been done. */
3931 if (extra && args_addr == 0
3932 && where_pad != none && where_pad != stack_direction)
3933 anti_adjust_stack (GEN_INT (extra));
3934
3935 /* If we make space by pushing it, we might as well push
3936 the real data. Otherwise, we can leave OFFSET nonzero
3937 and leave the space uninitialized. */
3938 if (args_addr == 0)
3939 offset = 0;
3940
3941 /* Now NOT_STACK gets the number of words that we don't need to
3942 allocate on the stack. */
3943 not_stack = partial - offset;
3944
3945 /* If the partial register-part of the arg counts in its stack size,
3946 skip the part of stack space corresponding to the registers.
3947 Otherwise, start copying to the beginning of the stack space,
3948 by setting SKIP to 0. */
3949 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3950
3951 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3952 x = validize_mem (force_const_mem (mode, x));
3953
3954 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3955 SUBREGs of such registers are not allowed. */
3956 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3957 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3958 x = copy_to_reg (x);
3959
3960 /* Loop over all the words allocated on the stack for this arg. */
3961 /* We can do it by words, because any scalar bigger than a word
3962 has a size a multiple of a word. */
3963 #ifndef PUSH_ARGS_REVERSED
3964 for (i = not_stack; i < size; i++)
3965 #else
3966 for (i = size - 1; i >= not_stack; i--)
3967 #endif
3968 if (i >= not_stack + offset)
3969 emit_push_insn (operand_subword_force (x, i, mode),
3970 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3971 0, args_addr,
3972 GEN_INT (args_offset + ((i - not_stack + skip)
3973 * UNITS_PER_WORD)),
3974 reg_parm_stack_space, alignment_pad);
3975 }
3976 else
3977 {
3978 rtx addr;
3979 rtx dest;
3980
3981 /* Push padding now if padding above and stack grows down,
3982 or if padding below and stack grows up.
3983 But if space already allocated, this has already been done. */
3984 if (extra && args_addr == 0
3985 && where_pad != none && where_pad != stack_direction)
3986 anti_adjust_stack (GEN_INT (extra));
3987
3988 #ifdef PUSH_ROUNDING
3989 if (args_addr == 0 && PUSH_ARGS)
3990 emit_single_push_insn (mode, x, type);
3991 else
3992 #endif
3993 {
3994 if (GET_CODE (args_so_far) == CONST_INT)
3995 addr
3996 = memory_address (mode,
3997 plus_constant (args_addr,
3998 INTVAL (args_so_far)));
3999 else
4000 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4001 args_so_far));
4002 dest = gen_rtx_MEM (mode, addr);
4003 if (type != 0)
4004 {
4005 set_mem_attributes (dest, type, 1);
4006 /* Function incoming arguments may overlap with sibling call
4007 outgoing arguments and we cannot allow reordering of reads
4008 from function arguments with stores to outgoing arguments
4009 of sibling calls. */
4010 set_mem_alias_set (dest, 0);
4011 }
4012
4013 emit_move_insn (dest, x);
4014 }
4015 }
4016
4017 /* If part should go in registers, copy that part
4018 into the appropriate registers. Do this now, at the end,
4019 since mem-to-mem copies above may do function calls. */
4020 if (partial > 0 && reg != 0)
4021 {
4022 /* Handle calls that pass values in multiple non-contiguous locations.
4023 The Irix 6 ABI has examples of this. */
4024 if (GET_CODE (reg) == PARALLEL)
4025 emit_group_load (reg, x, -1); /* ??? size? */
4026 else
4027 move_block_to_reg (REGNO (reg), x, partial, mode);
4028 }
4029
4030 if (extra && args_addr == 0 && where_pad == stack_direction)
4031 anti_adjust_stack (GEN_INT (extra));
4032
4033 if (alignment_pad && args_addr == 0)
4034 anti_adjust_stack (alignment_pad);
4035 }
4036 \f
4037 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4038 operations. */
4039
4040 static rtx
4041 get_subtarget (x)
4042 rtx x;
4043 {
4044 return ((x == 0
4045 /* Only registers can be subtargets. */
4046 || GET_CODE (x) != REG
4047 /* If the register is readonly, it can't be set more than once. */
4048 || RTX_UNCHANGING_P (x)
4049 /* Don't use hard regs to avoid extending their life. */
4050 || REGNO (x) < FIRST_PSEUDO_REGISTER
4051 /* Avoid subtargets inside loops,
4052 since they hide some invariant expressions. */
4053 || preserve_subexpressions_p ())
4054 ? 0 : x);
4055 }
4056
4057 /* Expand an assignment that stores the value of FROM into TO.
4058 If WANT_VALUE is nonzero, return an rtx for the value of TO.
4059 (This may contain a QUEUED rtx;
4060 if the value is constant, this rtx is a constant.)
4061 Otherwise, the returned value is NULL_RTX.
4062
4063 SUGGEST_REG is no longer actually used.
4064 It used to mean, copy the value through a register
4065 and return that register, if that is possible.
4066 We now use WANT_VALUE to decide whether to do this. */
4067
4068 rtx
4069 expand_assignment (to, from, want_value, suggest_reg)
4070 tree to, from;
4071 int want_value;
4072 int suggest_reg ATTRIBUTE_UNUSED;
4073 {
4074 rtx to_rtx = 0;
4075 rtx result;
4076
4077 /* Don't crash if the lhs of the assignment was erroneous. */
4078
4079 if (TREE_CODE (to) == ERROR_MARK)
4080 {
4081 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4082 return want_value ? result : NULL_RTX;
4083 }
4084
4085 /* Assignment of a structure component needs special treatment
4086 if the structure component's rtx is not simply a MEM.
4087 Assignment of an array element at a constant index, and assignment of
4088 an array element in an unaligned packed structure field, has the same
4089 problem. */
4090
4091 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
4092 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
4093 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4094 {
4095 enum machine_mode mode1;
4096 HOST_WIDE_INT bitsize, bitpos;
4097 rtx orig_to_rtx;
4098 tree offset;
4099 int unsignedp;
4100 int volatilep = 0;
4101 tree tem;
4102
4103 push_temp_slots ();
4104 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4105 &unsignedp, &volatilep);
4106
4107 /* If we are going to use store_bit_field and extract_bit_field,
4108 make sure to_rtx will be safe for multiple use. */
4109
4110 if (mode1 == VOIDmode && want_value)
4111 tem = stabilize_reference (tem);
4112
4113 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4114
4115 if (offset != 0)
4116 {
4117 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4118
4119 if (GET_CODE (to_rtx) != MEM)
4120 abort ();
4121
4122 #ifdef POINTERS_EXTEND_UNSIGNED
4123 if (GET_MODE (offset_rtx) != Pmode)
4124 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4125 #else
4126 if (GET_MODE (offset_rtx) != ptr_mode)
4127 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4128 #endif
4129
4130 /* A constant address in TO_RTX can have VOIDmode, we must not try
4131 to call force_reg for that case. Avoid that case. */
4132 if (GET_CODE (to_rtx) == MEM
4133 && GET_MODE (to_rtx) == BLKmode
4134 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4135 && bitsize > 0
4136 && (bitpos % bitsize) == 0
4137 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4138 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4139 {
4140 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4141 bitpos = 0;
4142 }
4143
4144 to_rtx = offset_address (to_rtx, offset_rtx,
4145 highest_pow2_factor_for_type (TREE_TYPE (to),
4146 offset));
4147 }
4148
4149 if (GET_CODE (to_rtx) == MEM)
4150 {
4151 /* If the field is at offset zero, we could have been given the
4152 DECL_RTX of the parent struct. Don't munge it. */
4153 to_rtx = shallow_copy_rtx (to_rtx);
4154
4155 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4156 }
4157
4158 /* Deal with volatile and readonly fields. The former is only done
4159 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4160 if (volatilep && GET_CODE (to_rtx) == MEM)
4161 {
4162 if (to_rtx == orig_to_rtx)
4163 to_rtx = copy_rtx (to_rtx);
4164 MEM_VOLATILE_P (to_rtx) = 1;
4165 }
4166
4167 if (TREE_CODE (to) == COMPONENT_REF
4168 && TREE_READONLY (TREE_OPERAND (to, 1)))
4169 {
4170 if (to_rtx == orig_to_rtx)
4171 to_rtx = copy_rtx (to_rtx);
4172 RTX_UNCHANGING_P (to_rtx) = 1;
4173 }
4174
4175 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4176 {
4177 if (to_rtx == orig_to_rtx)
4178 to_rtx = copy_rtx (to_rtx);
4179 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4180 }
4181
4182 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4183 (want_value
4184 /* Spurious cast for HPUX compiler. */
4185 ? ((enum machine_mode)
4186 TYPE_MODE (TREE_TYPE (to)))
4187 : VOIDmode),
4188 unsignedp, TREE_TYPE (tem), get_alias_set (to));
4189
4190 preserve_temp_slots (result);
4191 free_temp_slots ();
4192 pop_temp_slots ();
4193
4194 /* If the value is meaningful, convert RESULT to the proper mode.
4195 Otherwise, return nothing. */
4196 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4197 TYPE_MODE (TREE_TYPE (from)),
4198 result,
4199 TREE_UNSIGNED (TREE_TYPE (to)))
4200 : NULL_RTX);
4201 }
4202
4203 /* If the rhs is a function call and its value is not an aggregate,
4204 call the function before we start to compute the lhs.
4205 This is needed for correct code for cases such as
4206 val = setjmp (buf) on machines where reference to val
4207 requires loading up part of an address in a separate insn.
4208
4209 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4210 since it might be a promoted variable where the zero- or sign- extension
4211 needs to be done. Handling this in the normal way is safe because no
4212 computation is done before the call. */
4213 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
4214 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4215 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4216 && GET_CODE (DECL_RTL (to)) == REG))
4217 {
4218 rtx value;
4219
4220 push_temp_slots ();
4221 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4222 if (to_rtx == 0)
4223 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4224
4225 /* Handle calls that return values in multiple non-contiguous locations.
4226 The Irix 6 ABI has examples of this. */
4227 if (GET_CODE (to_rtx) == PARALLEL)
4228 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
4229 else if (GET_MODE (to_rtx) == BLKmode)
4230 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4231 else
4232 {
4233 #ifdef POINTERS_EXTEND_UNSIGNED
4234 if (POINTER_TYPE_P (TREE_TYPE (to))
4235 && GET_MODE (to_rtx) != GET_MODE (value))
4236 value = convert_memory_address (GET_MODE (to_rtx), value);
4237 #endif
4238 emit_move_insn (to_rtx, value);
4239 }
4240 preserve_temp_slots (to_rtx);
4241 free_temp_slots ();
4242 pop_temp_slots ();
4243 return want_value ? to_rtx : NULL_RTX;
4244 }
4245
4246 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4247 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4248
4249 if (to_rtx == 0)
4250 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4251
4252 /* Don't move directly into a return register. */
4253 if (TREE_CODE (to) == RESULT_DECL
4254 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4255 {
4256 rtx temp;
4257
4258 push_temp_slots ();
4259 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4260
4261 if (GET_CODE (to_rtx) == PARALLEL)
4262 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
4263 else
4264 emit_move_insn (to_rtx, temp);
4265
4266 preserve_temp_slots (to_rtx);
4267 free_temp_slots ();
4268 pop_temp_slots ();
4269 return want_value ? to_rtx : NULL_RTX;
4270 }
4271
4272 /* In case we are returning the contents of an object which overlaps
4273 the place the value is being stored, use a safe function when copying
4274 a value through a pointer into a structure value return block. */
4275 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4276 && current_function_returns_struct
4277 && !current_function_returns_pcc_struct)
4278 {
4279 rtx from_rtx, size;
4280
4281 push_temp_slots ();
4282 size = expr_size (from);
4283 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4284
4285 if (TARGET_MEM_FUNCTIONS)
4286 emit_library_call (memmove_libfunc, LCT_NORMAL,
4287 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4288 XEXP (from_rtx, 0), Pmode,
4289 convert_to_mode (TYPE_MODE (sizetype),
4290 size, TREE_UNSIGNED (sizetype)),
4291 TYPE_MODE (sizetype));
4292 else
4293 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4294 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4295 XEXP (to_rtx, 0), Pmode,
4296 convert_to_mode (TYPE_MODE (integer_type_node),
4297 size,
4298 TREE_UNSIGNED (integer_type_node)),
4299 TYPE_MODE (integer_type_node));
4300
4301 preserve_temp_slots (to_rtx);
4302 free_temp_slots ();
4303 pop_temp_slots ();
4304 return want_value ? to_rtx : NULL_RTX;
4305 }
4306
4307 /* Compute FROM and store the value in the rtx we got. */
4308
4309 push_temp_slots ();
4310 result = store_expr (from, to_rtx, want_value);
4311 preserve_temp_slots (result);
4312 free_temp_slots ();
4313 pop_temp_slots ();
4314 return want_value ? result : NULL_RTX;
4315 }
4316
4317 /* Generate code for computing expression EXP,
4318 and storing the value into TARGET.
4319 TARGET may contain a QUEUED rtx.
4320
4321 If WANT_VALUE & 1 is nonzero, return a copy of the value
4322 not in TARGET, so that we can be sure to use the proper
4323 value in a containing expression even if TARGET has something
4324 else stored in it. If possible, we copy the value through a pseudo
4325 and return that pseudo. Or, if the value is constant, we try to
4326 return the constant. In some cases, we return a pseudo
4327 copied *from* TARGET.
4328
4329 If the mode is BLKmode then we may return TARGET itself.
4330 It turns out that in BLKmode it doesn't cause a problem.
4331 because C has no operators that could combine two different
4332 assignments into the same BLKmode object with different values
4333 with no sequence point. Will other languages need this to
4334 be more thorough?
4335
4336 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4337 to catch quickly any cases where the caller uses the value
4338 and fails to set WANT_VALUE.
4339
4340 If WANT_VALUE & 2 is set, this is a store into a call param on the
4341 stack, and block moves may need to be treated specially. */
4342
4343 rtx
4344 store_expr (exp, target, want_value)
4345 tree exp;
4346 rtx target;
4347 int want_value;
4348 {
4349 rtx temp;
4350 int dont_return_target = 0;
4351 int dont_store_target = 0;
4352
4353 if (VOID_TYPE_P (TREE_TYPE (exp)))
4354 {
4355 /* C++ can generate ?: expressions with a throw expression in one
4356 branch and an rvalue in the other. Here, we resolve attempts to
4357 store the throw expression's nonexistant result. */
4358 if (want_value)
4359 abort ();
4360 expand_expr (exp, const0_rtx, VOIDmode, 0);
4361 return NULL_RTX;
4362 }
4363 if (TREE_CODE (exp) == COMPOUND_EXPR)
4364 {
4365 /* Perform first part of compound expression, then assign from second
4366 part. */
4367 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4368 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4369 emit_queue ();
4370 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4371 }
4372 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4373 {
4374 /* For conditional expression, get safe form of the target. Then
4375 test the condition, doing the appropriate assignment on either
4376 side. This avoids the creation of unnecessary temporaries.
4377 For non-BLKmode, it is more efficient not to do this. */
4378
4379 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4380
4381 emit_queue ();
4382 target = protect_from_queue (target, 1);
4383
4384 do_pending_stack_adjust ();
4385 NO_DEFER_POP;
4386 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4387 start_cleanup_deferral ();
4388 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4389 end_cleanup_deferral ();
4390 emit_queue ();
4391 emit_jump_insn (gen_jump (lab2));
4392 emit_barrier ();
4393 emit_label (lab1);
4394 start_cleanup_deferral ();
4395 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4396 end_cleanup_deferral ();
4397 emit_queue ();
4398 emit_label (lab2);
4399 OK_DEFER_POP;
4400
4401 return want_value & 1 ? target : NULL_RTX;
4402 }
4403 else if (queued_subexp_p (target))
4404 /* If target contains a postincrement, let's not risk
4405 using it as the place to generate the rhs. */
4406 {
4407 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4408 {
4409 /* Expand EXP into a new pseudo. */
4410 temp = gen_reg_rtx (GET_MODE (target));
4411 temp = expand_expr (exp, temp, GET_MODE (target),
4412 (want_value & 2
4413 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4414 }
4415 else
4416 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4417 (want_value & 2
4418 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4419
4420 /* If target is volatile, ANSI requires accessing the value
4421 *from* the target, if it is accessed. So make that happen.
4422 In no case return the target itself. */
4423 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4424 dont_return_target = 1;
4425 }
4426 else if ((want_value & 1) != 0
4427 && GET_CODE (target) == MEM
4428 && ! MEM_VOLATILE_P (target)
4429 && GET_MODE (target) != BLKmode)
4430 /* If target is in memory and caller wants value in a register instead,
4431 arrange that. Pass TARGET as target for expand_expr so that,
4432 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4433 We know expand_expr will not use the target in that case.
4434 Don't do this if TARGET is volatile because we are supposed
4435 to write it and then read it. */
4436 {
4437 temp = expand_expr (exp, target, GET_MODE (target),
4438 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4439 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4440 {
4441 /* If TEMP is already in the desired TARGET, only copy it from
4442 memory and don't store it there again. */
4443 if (temp == target
4444 || (rtx_equal_p (temp, target)
4445 && ! side_effects_p (temp) && ! side_effects_p (target)))
4446 dont_store_target = 1;
4447 temp = copy_to_reg (temp);
4448 }
4449 dont_return_target = 1;
4450 }
4451 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4452 /* If this is a scalar in a register that is stored in a wider mode
4453 than the declared mode, compute the result into its declared mode
4454 and then convert to the wider mode. Our value is the computed
4455 expression. */
4456 {
4457 rtx inner_target = 0;
4458
4459 /* If we don't want a value, we can do the conversion inside EXP,
4460 which will often result in some optimizations. Do the conversion
4461 in two steps: first change the signedness, if needed, then
4462 the extend. But don't do this if the type of EXP is a subtype
4463 of something else since then the conversion might involve
4464 more than just converting modes. */
4465 if ((want_value & 1) == 0
4466 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4467 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4468 {
4469 if (TREE_UNSIGNED (TREE_TYPE (exp))
4470 != SUBREG_PROMOTED_UNSIGNED_P (target))
4471 exp = convert
4472 ((*lang_hooks.types.signed_or_unsigned_type)
4473 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4474
4475 exp = convert ((*lang_hooks.types.type_for_mode)
4476 (GET_MODE (SUBREG_REG (target)),
4477 SUBREG_PROMOTED_UNSIGNED_P (target)),
4478 exp);
4479
4480 inner_target = SUBREG_REG (target);
4481 }
4482
4483 temp = expand_expr (exp, inner_target, VOIDmode,
4484 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4485
4486 /* If TEMP is a MEM and we want a result value, make the access
4487 now so it gets done only once. Strictly speaking, this is
4488 only necessary if the MEM is volatile, or if the address
4489 overlaps TARGET. But not performing the load twice also
4490 reduces the amount of rtl we generate and then have to CSE. */
4491 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4492 temp = copy_to_reg (temp);
4493
4494 /* If TEMP is a VOIDmode constant, use convert_modes to make
4495 sure that we properly convert it. */
4496 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4497 {
4498 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4499 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4500 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4501 GET_MODE (target), temp,
4502 SUBREG_PROMOTED_UNSIGNED_P (target));
4503 }
4504
4505 convert_move (SUBREG_REG (target), temp,
4506 SUBREG_PROMOTED_UNSIGNED_P (target));
4507
4508 /* If we promoted a constant, change the mode back down to match
4509 target. Otherwise, the caller might get confused by a result whose
4510 mode is larger than expected. */
4511
4512 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4513 {
4514 if (GET_MODE (temp) != VOIDmode)
4515 {
4516 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4517 SUBREG_PROMOTED_VAR_P (temp) = 1;
4518 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4519 SUBREG_PROMOTED_UNSIGNED_P (target));
4520 }
4521 else
4522 temp = convert_modes (GET_MODE (target),
4523 GET_MODE (SUBREG_REG (target)),
4524 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4525 }
4526
4527 return want_value & 1 ? temp : NULL_RTX;
4528 }
4529 else
4530 {
4531 temp = expand_expr (exp, target, GET_MODE (target),
4532 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4533 /* Return TARGET if it's a specified hardware register.
4534 If TARGET is a volatile mem ref, either return TARGET
4535 or return a reg copied *from* TARGET; ANSI requires this.
4536
4537 Otherwise, if TEMP is not TARGET, return TEMP
4538 if it is constant (for efficiency),
4539 or if we really want the correct value. */
4540 if (!(target && GET_CODE (target) == REG
4541 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4542 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4543 && ! rtx_equal_p (temp, target)
4544 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4545 dont_return_target = 1;
4546 }
4547
4548 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4549 the same as that of TARGET, adjust the constant. This is needed, for
4550 example, in case it is a CONST_DOUBLE and we want only a word-sized
4551 value. */
4552 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4553 && TREE_CODE (exp) != ERROR_MARK
4554 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4555 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4556 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4557
4558 /* If value was not generated in the target, store it there.
4559 Convert the value to TARGET's type first if necessary.
4560 If TEMP and TARGET compare equal according to rtx_equal_p, but
4561 one or both of them are volatile memory refs, we have to distinguish
4562 two cases:
4563 - expand_expr has used TARGET. In this case, we must not generate
4564 another copy. This can be detected by TARGET being equal according
4565 to == .
4566 - expand_expr has not used TARGET - that means that the source just
4567 happens to have the same RTX form. Since temp will have been created
4568 by expand_expr, it will compare unequal according to == .
4569 We must generate a copy in this case, to reach the correct number
4570 of volatile memory references. */
4571
4572 if ((! rtx_equal_p (temp, target)
4573 || (temp != target && (side_effects_p (temp)
4574 || side_effects_p (target))))
4575 && TREE_CODE (exp) != ERROR_MARK
4576 && ! dont_store_target
4577 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4578 but TARGET is not valid memory reference, TEMP will differ
4579 from TARGET although it is really the same location. */
4580 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4581 || target != DECL_RTL_IF_SET (exp))
4582 /* If there's nothing to copy, don't bother. Don't call expr_size
4583 unless necessary, because some front-ends (C++) expr_size-hook
4584 aborts on objects that are not supposed to be bit-copied or
4585 bit-initialized. */
4586 && expr_size (exp) != const0_rtx)
4587 {
4588 target = protect_from_queue (target, 1);
4589 if (GET_MODE (temp) != GET_MODE (target)
4590 && GET_MODE (temp) != VOIDmode)
4591 {
4592 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4593 if (dont_return_target)
4594 {
4595 /* In this case, we will return TEMP,
4596 so make sure it has the proper mode.
4597 But don't forget to store the value into TARGET. */
4598 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4599 emit_move_insn (target, temp);
4600 }
4601 else
4602 convert_move (target, temp, unsignedp);
4603 }
4604
4605 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4606 {
4607 /* Handle copying a string constant into an array. The string
4608 constant may be shorter than the array. So copy just the string's
4609 actual length, and clear the rest. First get the size of the data
4610 type of the string, which is actually the size of the target. */
4611 rtx size = expr_size (exp);
4612
4613 if (GET_CODE (size) == CONST_INT
4614 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4615 emit_block_move (target, temp, size,
4616 (want_value & 2
4617 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4618 else
4619 {
4620 /* Compute the size of the data to copy from the string. */
4621 tree copy_size
4622 = size_binop (MIN_EXPR,
4623 make_tree (sizetype, size),
4624 size_int (TREE_STRING_LENGTH (exp)));
4625 rtx copy_size_rtx
4626 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4627 (want_value & 2
4628 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4629 rtx label = 0;
4630
4631 /* Copy that much. */
4632 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4633 emit_block_move (target, temp, copy_size_rtx,
4634 (want_value & 2
4635 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4636
4637 /* Figure out how much is left in TARGET that we have to clear.
4638 Do all calculations in ptr_mode. */
4639 if (GET_CODE (copy_size_rtx) == CONST_INT)
4640 {
4641 size = plus_constant (size, -INTVAL (copy_size_rtx));
4642 target = adjust_address (target, BLKmode,
4643 INTVAL (copy_size_rtx));
4644 }
4645 else
4646 {
4647 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4648 copy_size_rtx, NULL_RTX, 0,
4649 OPTAB_LIB_WIDEN);
4650
4651 #ifdef POINTERS_EXTEND_UNSIGNED
4652 if (GET_MODE (copy_size_rtx) != Pmode)
4653 copy_size_rtx = convert_memory_address (Pmode,
4654 copy_size_rtx);
4655 #endif
4656
4657 target = offset_address (target, copy_size_rtx,
4658 highest_pow2_factor (copy_size));
4659 label = gen_label_rtx ();
4660 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4661 GET_MODE (size), 0, label);
4662 }
4663
4664 if (size != const0_rtx)
4665 clear_storage (target, size);
4666
4667 if (label)
4668 emit_label (label);
4669 }
4670 }
4671 /* Handle calls that return values in multiple non-contiguous locations.
4672 The Irix 6 ABI has examples of this. */
4673 else if (GET_CODE (target) == PARALLEL)
4674 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4675 else if (GET_MODE (temp) == BLKmode)
4676 emit_block_move (target, temp, expr_size (exp),
4677 (want_value & 2
4678 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4679 else
4680 emit_move_insn (target, temp);
4681 }
4682
4683 /* If we don't want a value, return NULL_RTX. */
4684 if ((want_value & 1) == 0)
4685 return NULL_RTX;
4686
4687 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4688 ??? The latter test doesn't seem to make sense. */
4689 else if (dont_return_target && GET_CODE (temp) != MEM)
4690 return temp;
4691
4692 /* Return TARGET itself if it is a hard register. */
4693 else if ((want_value & 1) != 0
4694 && GET_MODE (target) != BLKmode
4695 && ! (GET_CODE (target) == REG
4696 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4697 return copy_to_reg (target);
4698
4699 else
4700 return target;
4701 }
4702 \f
4703 /* Return 1 if EXP just contains zeros. */
4704
4705 static int
4706 is_zeros_p (exp)
4707 tree exp;
4708 {
4709 tree elt;
4710
4711 switch (TREE_CODE (exp))
4712 {
4713 case CONVERT_EXPR:
4714 case NOP_EXPR:
4715 case NON_LVALUE_EXPR:
4716 case VIEW_CONVERT_EXPR:
4717 return is_zeros_p (TREE_OPERAND (exp, 0));
4718
4719 case INTEGER_CST:
4720 return integer_zerop (exp);
4721
4722 case COMPLEX_CST:
4723 return
4724 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4725
4726 case REAL_CST:
4727 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4728
4729 case VECTOR_CST:
4730 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4731 elt = TREE_CHAIN (elt))
4732 if (!is_zeros_p (TREE_VALUE (elt)))
4733 return 0;
4734
4735 return 1;
4736
4737 case CONSTRUCTOR:
4738 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4739 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4740 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4741 if (! is_zeros_p (TREE_VALUE (elt)))
4742 return 0;
4743
4744 return 1;
4745
4746 default:
4747 return 0;
4748 }
4749 }
4750
4751 /* Return 1 if EXP contains mostly (3/4) zeros. */
4752
4753 static int
4754 mostly_zeros_p (exp)
4755 tree exp;
4756 {
4757 if (TREE_CODE (exp) == CONSTRUCTOR)
4758 {
4759 int elts = 0, zeros = 0;
4760 tree elt = CONSTRUCTOR_ELTS (exp);
4761 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4762 {
4763 /* If there are no ranges of true bits, it is all zero. */
4764 return elt == NULL_TREE;
4765 }
4766 for (; elt; elt = TREE_CHAIN (elt))
4767 {
4768 /* We do not handle the case where the index is a RANGE_EXPR,
4769 so the statistic will be somewhat inaccurate.
4770 We do make a more accurate count in store_constructor itself,
4771 so since this function is only used for nested array elements,
4772 this should be close enough. */
4773 if (mostly_zeros_p (TREE_VALUE (elt)))
4774 zeros++;
4775 elts++;
4776 }
4777
4778 return 4 * zeros >= 3 * elts;
4779 }
4780
4781 return is_zeros_p (exp);
4782 }
4783 \f
4784 /* Helper function for store_constructor.
4785 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4786 TYPE is the type of the CONSTRUCTOR, not the element type.
4787 CLEARED is as for store_constructor.
4788 ALIAS_SET is the alias set to use for any stores.
4789
4790 This provides a recursive shortcut back to store_constructor when it isn't
4791 necessary to go through store_field. This is so that we can pass through
4792 the cleared field to let store_constructor know that we may not have to
4793 clear a substructure if the outer structure has already been cleared. */
4794
4795 static void
4796 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4797 alias_set)
4798 rtx target;
4799 unsigned HOST_WIDE_INT bitsize;
4800 HOST_WIDE_INT bitpos;
4801 enum machine_mode mode;
4802 tree exp, type;
4803 int cleared;
4804 int alias_set;
4805 {
4806 if (TREE_CODE (exp) == CONSTRUCTOR
4807 && bitpos % BITS_PER_UNIT == 0
4808 /* If we have a nonzero bitpos for a register target, then we just
4809 let store_field do the bitfield handling. This is unlikely to
4810 generate unnecessary clear instructions anyways. */
4811 && (bitpos == 0 || GET_CODE (target) == MEM))
4812 {
4813 if (GET_CODE (target) == MEM)
4814 target
4815 = adjust_address (target,
4816 GET_MODE (target) == BLKmode
4817 || 0 != (bitpos
4818 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4819 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4820
4821
4822 /* Update the alias set, if required. */
4823 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4824 && MEM_ALIAS_SET (target) != 0)
4825 {
4826 target = copy_rtx (target);
4827 set_mem_alias_set (target, alias_set);
4828 }
4829
4830 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4831 }
4832 else
4833 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4834 alias_set);
4835 }
4836
4837 /* Store the value of constructor EXP into the rtx TARGET.
4838 TARGET is either a REG or a MEM; we know it cannot conflict, since
4839 safe_from_p has been called.
4840 CLEARED is true if TARGET is known to have been zero'd.
4841 SIZE is the number of bytes of TARGET we are allowed to modify: this
4842 may not be the same as the size of EXP if we are assigning to a field
4843 which has been packed to exclude padding bits. */
4844
4845 static void
4846 store_constructor (exp, target, cleared, size)
4847 tree exp;
4848 rtx target;
4849 int cleared;
4850 HOST_WIDE_INT size;
4851 {
4852 tree type = TREE_TYPE (exp);
4853 #ifdef WORD_REGISTER_OPERATIONS
4854 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4855 #endif
4856
4857 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4858 || TREE_CODE (type) == QUAL_UNION_TYPE)
4859 {
4860 tree elt;
4861
4862 /* We either clear the aggregate or indicate the value is dead. */
4863 if ((TREE_CODE (type) == UNION_TYPE
4864 || TREE_CODE (type) == QUAL_UNION_TYPE)
4865 && ! cleared
4866 && ! CONSTRUCTOR_ELTS (exp))
4867 /* If the constructor is empty, clear the union. */
4868 {
4869 clear_storage (target, expr_size (exp));
4870 cleared = 1;
4871 }
4872
4873 /* If we are building a static constructor into a register,
4874 set the initial value as zero so we can fold the value into
4875 a constant. But if more than one register is involved,
4876 this probably loses. */
4877 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4878 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4879 {
4880 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4881 cleared = 1;
4882 }
4883
4884 /* If the constructor has fewer fields than the structure
4885 or if we are initializing the structure to mostly zeros,
4886 clear the whole structure first. Don't do this if TARGET is a
4887 register whose mode size isn't equal to SIZE since clear_storage
4888 can't handle this case. */
4889 else if (! cleared && size > 0
4890 && ((list_length (CONSTRUCTOR_ELTS (exp))
4891 != fields_length (type))
4892 || mostly_zeros_p (exp))
4893 && (GET_CODE (target) != REG
4894 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4895 == size)))
4896 {
4897 clear_storage (target, GEN_INT (size));
4898 cleared = 1;
4899 }
4900
4901 if (! cleared)
4902 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4903
4904 /* Store each element of the constructor into
4905 the corresponding field of TARGET. */
4906
4907 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4908 {
4909 tree field = TREE_PURPOSE (elt);
4910 tree value = TREE_VALUE (elt);
4911 enum machine_mode mode;
4912 HOST_WIDE_INT bitsize;
4913 HOST_WIDE_INT bitpos = 0;
4914 tree offset;
4915 rtx to_rtx = target;
4916
4917 /* Just ignore missing fields.
4918 We cleared the whole structure, above,
4919 if any fields are missing. */
4920 if (field == 0)
4921 continue;
4922
4923 if (cleared && is_zeros_p (value))
4924 continue;
4925
4926 if (host_integerp (DECL_SIZE (field), 1))
4927 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4928 else
4929 bitsize = -1;
4930
4931 mode = DECL_MODE (field);
4932 if (DECL_BIT_FIELD (field))
4933 mode = VOIDmode;
4934
4935 offset = DECL_FIELD_OFFSET (field);
4936 if (host_integerp (offset, 0)
4937 && host_integerp (bit_position (field), 0))
4938 {
4939 bitpos = int_bit_position (field);
4940 offset = 0;
4941 }
4942 else
4943 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4944
4945 if (offset)
4946 {
4947 rtx offset_rtx;
4948
4949 if (contains_placeholder_p (offset))
4950 offset = build (WITH_RECORD_EXPR, sizetype,
4951 offset, make_tree (TREE_TYPE (exp), target));
4952
4953 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4954 if (GET_CODE (to_rtx) != MEM)
4955 abort ();
4956
4957 #ifdef POINTERS_EXTEND_UNSIGNED
4958 if (GET_MODE (offset_rtx) != Pmode)
4959 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4960 #else
4961 if (GET_MODE (offset_rtx) != ptr_mode)
4962 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4963 #endif
4964
4965 to_rtx = offset_address (to_rtx, offset_rtx,
4966 highest_pow2_factor (offset));
4967 }
4968
4969 if (TREE_READONLY (field))
4970 {
4971 if (GET_CODE (to_rtx) == MEM)
4972 to_rtx = copy_rtx (to_rtx);
4973
4974 RTX_UNCHANGING_P (to_rtx) = 1;
4975 }
4976
4977 #ifdef WORD_REGISTER_OPERATIONS
4978 /* If this initializes a field that is smaller than a word, at the
4979 start of a word, try to widen it to a full word.
4980 This special case allows us to output C++ member function
4981 initializations in a form that the optimizers can understand. */
4982 if (GET_CODE (target) == REG
4983 && bitsize < BITS_PER_WORD
4984 && bitpos % BITS_PER_WORD == 0
4985 && GET_MODE_CLASS (mode) == MODE_INT
4986 && TREE_CODE (value) == INTEGER_CST
4987 && exp_size >= 0
4988 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4989 {
4990 tree type = TREE_TYPE (value);
4991
4992 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4993 {
4994 type = (*lang_hooks.types.type_for_size)
4995 (BITS_PER_WORD, TREE_UNSIGNED (type));
4996 value = convert (type, value);
4997 }
4998
4999 if (BYTES_BIG_ENDIAN)
5000 value
5001 = fold (build (LSHIFT_EXPR, type, value,
5002 build_int_2 (BITS_PER_WORD - bitsize, 0)));
5003 bitsize = BITS_PER_WORD;
5004 mode = word_mode;
5005 }
5006 #endif
5007
5008 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5009 && DECL_NONADDRESSABLE_P (field))
5010 {
5011 to_rtx = copy_rtx (to_rtx);
5012 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5013 }
5014
5015 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5016 value, type, cleared,
5017 get_alias_set (TREE_TYPE (field)));
5018 }
5019 }
5020 else if (TREE_CODE (type) == ARRAY_TYPE
5021 || TREE_CODE (type) == VECTOR_TYPE)
5022 {
5023 tree elt;
5024 int i;
5025 int need_to_clear;
5026 tree domain = TYPE_DOMAIN (type);
5027 tree elttype = TREE_TYPE (type);
5028 int const_bounds_p;
5029 HOST_WIDE_INT minelt = 0;
5030 HOST_WIDE_INT maxelt = 0;
5031
5032 /* Vectors are like arrays, but the domain is stored via an array
5033 type indirectly. */
5034 if (TREE_CODE (type) == VECTOR_TYPE)
5035 {
5036 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
5037 the same field as TYPE_DOMAIN, we are not guaranteed that
5038 it always will. */
5039 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
5040 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
5041 }
5042
5043 const_bounds_p = (TYPE_MIN_VALUE (domain)
5044 && TYPE_MAX_VALUE (domain)
5045 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5046 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5047
5048 /* If we have constant bounds for the range of the type, get them. */
5049 if (const_bounds_p)
5050 {
5051 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5052 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5053 }
5054
5055 /* If the constructor has fewer elements than the array,
5056 clear the whole array first. Similarly if this is
5057 static constructor of a non-BLKmode object. */
5058 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
5059 need_to_clear = 1;
5060 else
5061 {
5062 HOST_WIDE_INT count = 0, zero_count = 0;
5063 need_to_clear = ! const_bounds_p;
5064
5065 /* This loop is a more accurate version of the loop in
5066 mostly_zeros_p (it handles RANGE_EXPR in an index).
5067 It is also needed to check for missing elements. */
5068 for (elt = CONSTRUCTOR_ELTS (exp);
5069 elt != NULL_TREE && ! need_to_clear;
5070 elt = TREE_CHAIN (elt))
5071 {
5072 tree index = TREE_PURPOSE (elt);
5073 HOST_WIDE_INT this_node_count;
5074
5075 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5076 {
5077 tree lo_index = TREE_OPERAND (index, 0);
5078 tree hi_index = TREE_OPERAND (index, 1);
5079
5080 if (! host_integerp (lo_index, 1)
5081 || ! host_integerp (hi_index, 1))
5082 {
5083 need_to_clear = 1;
5084 break;
5085 }
5086
5087 this_node_count = (tree_low_cst (hi_index, 1)
5088 - tree_low_cst (lo_index, 1) + 1);
5089 }
5090 else
5091 this_node_count = 1;
5092
5093 count += this_node_count;
5094 if (mostly_zeros_p (TREE_VALUE (elt)))
5095 zero_count += this_node_count;
5096 }
5097
5098 /* Clear the entire array first if there are any missing elements,
5099 or if the incidence of zero elements is >= 75%. */
5100 if (! need_to_clear
5101 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
5102 need_to_clear = 1;
5103 }
5104
5105 if (need_to_clear && size > 0)
5106 {
5107 if (! cleared)
5108 {
5109 if (REG_P (target))
5110 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5111 else
5112 clear_storage (target, GEN_INT (size));
5113 }
5114 cleared = 1;
5115 }
5116 else if (REG_P (target))
5117 /* Inform later passes that the old value is dead. */
5118 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5119
5120 /* Store each element of the constructor into
5121 the corresponding element of TARGET, determined
5122 by counting the elements. */
5123 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5124 elt;
5125 elt = TREE_CHAIN (elt), i++)
5126 {
5127 enum machine_mode mode;
5128 HOST_WIDE_INT bitsize;
5129 HOST_WIDE_INT bitpos;
5130 int unsignedp;
5131 tree value = TREE_VALUE (elt);
5132 tree index = TREE_PURPOSE (elt);
5133 rtx xtarget = target;
5134
5135 if (cleared && is_zeros_p (value))
5136 continue;
5137
5138 unsignedp = TREE_UNSIGNED (elttype);
5139 mode = TYPE_MODE (elttype);
5140 if (mode == BLKmode)
5141 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5142 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5143 : -1);
5144 else
5145 bitsize = GET_MODE_BITSIZE (mode);
5146
5147 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5148 {
5149 tree lo_index = TREE_OPERAND (index, 0);
5150 tree hi_index = TREE_OPERAND (index, 1);
5151 rtx index_r, pos_rtx, loop_end;
5152 struct nesting *loop;
5153 HOST_WIDE_INT lo, hi, count;
5154 tree position;
5155
5156 /* If the range is constant and "small", unroll the loop. */
5157 if (const_bounds_p
5158 && host_integerp (lo_index, 0)
5159 && host_integerp (hi_index, 0)
5160 && (lo = tree_low_cst (lo_index, 0),
5161 hi = tree_low_cst (hi_index, 0),
5162 count = hi - lo + 1,
5163 (GET_CODE (target) != MEM
5164 || count <= 2
5165 || (host_integerp (TYPE_SIZE (elttype), 1)
5166 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5167 <= 40 * 8)))))
5168 {
5169 lo -= minelt; hi -= minelt;
5170 for (; lo <= hi; lo++)
5171 {
5172 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5173
5174 if (GET_CODE (target) == MEM
5175 && !MEM_KEEP_ALIAS_SET_P (target)
5176 && TREE_CODE (type) == ARRAY_TYPE
5177 && TYPE_NONALIASED_COMPONENT (type))
5178 {
5179 target = copy_rtx (target);
5180 MEM_KEEP_ALIAS_SET_P (target) = 1;
5181 }
5182
5183 store_constructor_field
5184 (target, bitsize, bitpos, mode, value, type, cleared,
5185 get_alias_set (elttype));
5186 }
5187 }
5188 else
5189 {
5190 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5191 loop_end = gen_label_rtx ();
5192
5193 unsignedp = TREE_UNSIGNED (domain);
5194
5195 index = build_decl (VAR_DECL, NULL_TREE, domain);
5196
5197 index_r
5198 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5199 &unsignedp, 0));
5200 SET_DECL_RTL (index, index_r);
5201 if (TREE_CODE (value) == SAVE_EXPR
5202 && SAVE_EXPR_RTL (value) == 0)
5203 {
5204 /* Make sure value gets expanded once before the
5205 loop. */
5206 expand_expr (value, const0_rtx, VOIDmode, 0);
5207 emit_queue ();
5208 }
5209 store_expr (lo_index, index_r, 0);
5210 loop = expand_start_loop (0);
5211
5212 /* Assign value to element index. */
5213 position
5214 = convert (ssizetype,
5215 fold (build (MINUS_EXPR, TREE_TYPE (index),
5216 index, TYPE_MIN_VALUE (domain))));
5217 position = size_binop (MULT_EXPR, position,
5218 convert (ssizetype,
5219 TYPE_SIZE_UNIT (elttype)));
5220
5221 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5222 xtarget = offset_address (target, pos_rtx,
5223 highest_pow2_factor (position));
5224 xtarget = adjust_address (xtarget, mode, 0);
5225 if (TREE_CODE (value) == CONSTRUCTOR)
5226 store_constructor (value, xtarget, cleared,
5227 bitsize / BITS_PER_UNIT);
5228 else
5229 store_expr (value, xtarget, 0);
5230
5231 expand_exit_loop_if_false (loop,
5232 build (LT_EXPR, integer_type_node,
5233 index, hi_index));
5234
5235 expand_increment (build (PREINCREMENT_EXPR,
5236 TREE_TYPE (index),
5237 index, integer_one_node), 0, 0);
5238 expand_end_loop ();
5239 emit_label (loop_end);
5240 }
5241 }
5242 else if ((index != 0 && ! host_integerp (index, 0))
5243 || ! host_integerp (TYPE_SIZE (elttype), 1))
5244 {
5245 tree position;
5246
5247 if (index == 0)
5248 index = ssize_int (1);
5249
5250 if (minelt)
5251 index = convert (ssizetype,
5252 fold (build (MINUS_EXPR, index,
5253 TYPE_MIN_VALUE (domain))));
5254
5255 position = size_binop (MULT_EXPR, index,
5256 convert (ssizetype,
5257 TYPE_SIZE_UNIT (elttype)));
5258 xtarget = offset_address (target,
5259 expand_expr (position, 0, VOIDmode, 0),
5260 highest_pow2_factor (position));
5261 xtarget = adjust_address (xtarget, mode, 0);
5262 store_expr (value, xtarget, 0);
5263 }
5264 else
5265 {
5266 if (index != 0)
5267 bitpos = ((tree_low_cst (index, 0) - minelt)
5268 * tree_low_cst (TYPE_SIZE (elttype), 1));
5269 else
5270 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5271
5272 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5273 && TREE_CODE (type) == ARRAY_TYPE
5274 && TYPE_NONALIASED_COMPONENT (type))
5275 {
5276 target = copy_rtx (target);
5277 MEM_KEEP_ALIAS_SET_P (target) = 1;
5278 }
5279
5280 store_constructor_field (target, bitsize, bitpos, mode, value,
5281 type, cleared, get_alias_set (elttype));
5282
5283 }
5284 }
5285 }
5286
5287 /* Set constructor assignments. */
5288 else if (TREE_CODE (type) == SET_TYPE)
5289 {
5290 tree elt = CONSTRUCTOR_ELTS (exp);
5291 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5292 tree domain = TYPE_DOMAIN (type);
5293 tree domain_min, domain_max, bitlength;
5294
5295 /* The default implementation strategy is to extract the constant
5296 parts of the constructor, use that to initialize the target,
5297 and then "or" in whatever non-constant ranges we need in addition.
5298
5299 If a large set is all zero or all ones, it is
5300 probably better to set it using memset (if available) or bzero.
5301 Also, if a large set has just a single range, it may also be
5302 better to first clear all the first clear the set (using
5303 bzero/memset), and set the bits we want. */
5304
5305 /* Check for all zeros. */
5306 if (elt == NULL_TREE && size > 0)
5307 {
5308 if (!cleared)
5309 clear_storage (target, GEN_INT (size));
5310 return;
5311 }
5312
5313 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5314 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5315 bitlength = size_binop (PLUS_EXPR,
5316 size_diffop (domain_max, domain_min),
5317 ssize_int (1));
5318
5319 nbits = tree_low_cst (bitlength, 1);
5320
5321 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5322 are "complicated" (more than one range), initialize (the
5323 constant parts) by copying from a constant. */
5324 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5325 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5326 {
5327 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5328 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5329 char *bit_buffer = (char *) alloca (nbits);
5330 HOST_WIDE_INT word = 0;
5331 unsigned int bit_pos = 0;
5332 unsigned int ibit = 0;
5333 unsigned int offset = 0; /* In bytes from beginning of set. */
5334
5335 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5336 for (;;)
5337 {
5338 if (bit_buffer[ibit])
5339 {
5340 if (BYTES_BIG_ENDIAN)
5341 word |= (1 << (set_word_size - 1 - bit_pos));
5342 else
5343 word |= 1 << bit_pos;
5344 }
5345
5346 bit_pos++; ibit++;
5347 if (bit_pos >= set_word_size || ibit == nbits)
5348 {
5349 if (word != 0 || ! cleared)
5350 {
5351 rtx datum = GEN_INT (word);
5352 rtx to_rtx;
5353
5354 /* The assumption here is that it is safe to use
5355 XEXP if the set is multi-word, but not if
5356 it's single-word. */
5357 if (GET_CODE (target) == MEM)
5358 to_rtx = adjust_address (target, mode, offset);
5359 else if (offset == 0)
5360 to_rtx = target;
5361 else
5362 abort ();
5363 emit_move_insn (to_rtx, datum);
5364 }
5365
5366 if (ibit == nbits)
5367 break;
5368 word = 0;
5369 bit_pos = 0;
5370 offset += set_word_size / BITS_PER_UNIT;
5371 }
5372 }
5373 }
5374 else if (!cleared)
5375 /* Don't bother clearing storage if the set is all ones. */
5376 if (TREE_CHAIN (elt) != NULL_TREE
5377 || (TREE_PURPOSE (elt) == NULL_TREE
5378 ? nbits != 1
5379 : ( ! host_integerp (TREE_VALUE (elt), 0)
5380 || ! host_integerp (TREE_PURPOSE (elt), 0)
5381 || (tree_low_cst (TREE_VALUE (elt), 0)
5382 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5383 != (HOST_WIDE_INT) nbits))))
5384 clear_storage (target, expr_size (exp));
5385
5386 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5387 {
5388 /* Start of range of element or NULL. */
5389 tree startbit = TREE_PURPOSE (elt);
5390 /* End of range of element, or element value. */
5391 tree endbit = TREE_VALUE (elt);
5392 HOST_WIDE_INT startb, endb;
5393 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5394
5395 bitlength_rtx = expand_expr (bitlength,
5396 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5397
5398 /* Handle non-range tuple element like [ expr ]. */
5399 if (startbit == NULL_TREE)
5400 {
5401 startbit = save_expr (endbit);
5402 endbit = startbit;
5403 }
5404
5405 startbit = convert (sizetype, startbit);
5406 endbit = convert (sizetype, endbit);
5407 if (! integer_zerop (domain_min))
5408 {
5409 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5410 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5411 }
5412 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5413 EXPAND_CONST_ADDRESS);
5414 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5415 EXPAND_CONST_ADDRESS);
5416
5417 if (REG_P (target))
5418 {
5419 targetx
5420 = assign_temp
5421 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5422 (GET_MODE (target), 0),
5423 TYPE_QUAL_CONST)),
5424 0, 1, 1);
5425 emit_move_insn (targetx, target);
5426 }
5427
5428 else if (GET_CODE (target) == MEM)
5429 targetx = target;
5430 else
5431 abort ();
5432
5433 /* Optimization: If startbit and endbit are constants divisible
5434 by BITS_PER_UNIT, call memset instead. */
5435 if (TARGET_MEM_FUNCTIONS
5436 && TREE_CODE (startbit) == INTEGER_CST
5437 && TREE_CODE (endbit) == INTEGER_CST
5438 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5439 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5440 {
5441 emit_library_call (memset_libfunc, LCT_NORMAL,
5442 VOIDmode, 3,
5443 plus_constant (XEXP (targetx, 0),
5444 startb / BITS_PER_UNIT),
5445 Pmode,
5446 constm1_rtx, TYPE_MODE (integer_type_node),
5447 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5448 TYPE_MODE (sizetype));
5449 }
5450 else
5451 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5452 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5453 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5454 startbit_rtx, TYPE_MODE (sizetype),
5455 endbit_rtx, TYPE_MODE (sizetype));
5456
5457 if (REG_P (target))
5458 emit_move_insn (target, targetx);
5459 }
5460 }
5461
5462 else
5463 abort ();
5464 }
5465
5466 /* Store the value of EXP (an expression tree)
5467 into a subfield of TARGET which has mode MODE and occupies
5468 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5469 If MODE is VOIDmode, it means that we are storing into a bit-field.
5470
5471 If VALUE_MODE is VOIDmode, return nothing in particular.
5472 UNSIGNEDP is not used in this case.
5473
5474 Otherwise, return an rtx for the value stored. This rtx
5475 has mode VALUE_MODE if that is convenient to do.
5476 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5477
5478 TYPE is the type of the underlying object,
5479
5480 ALIAS_SET is the alias set for the destination. This value will
5481 (in general) be different from that for TARGET, since TARGET is a
5482 reference to the containing structure. */
5483
5484 static rtx
5485 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5486 alias_set)
5487 rtx target;
5488 HOST_WIDE_INT bitsize;
5489 HOST_WIDE_INT bitpos;
5490 enum machine_mode mode;
5491 tree exp;
5492 enum machine_mode value_mode;
5493 int unsignedp;
5494 tree type;
5495 int alias_set;
5496 {
5497 HOST_WIDE_INT width_mask = 0;
5498
5499 if (TREE_CODE (exp) == ERROR_MARK)
5500 return const0_rtx;
5501
5502 /* If we have nothing to store, do nothing unless the expression has
5503 side-effects. */
5504 if (bitsize == 0)
5505 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5506 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5507 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5508
5509 /* If we are storing into an unaligned field of an aligned union that is
5510 in a register, we may have the mode of TARGET being an integer mode but
5511 MODE == BLKmode. In that case, get an aligned object whose size and
5512 alignment are the same as TARGET and store TARGET into it (we can avoid
5513 the store if the field being stored is the entire width of TARGET). Then
5514 call ourselves recursively to store the field into a BLKmode version of
5515 that object. Finally, load from the object into TARGET. This is not
5516 very efficient in general, but should only be slightly more expensive
5517 than the otherwise-required unaligned accesses. Perhaps this can be
5518 cleaned up later. */
5519
5520 if (mode == BLKmode
5521 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5522 {
5523 rtx object
5524 = assign_temp
5525 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5526 0, 1, 1);
5527 rtx blk_object = adjust_address (object, BLKmode, 0);
5528
5529 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5530 emit_move_insn (object, target);
5531
5532 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5533 alias_set);
5534
5535 emit_move_insn (target, object);
5536
5537 /* We want to return the BLKmode version of the data. */
5538 return blk_object;
5539 }
5540
5541 if (GET_CODE (target) == CONCAT)
5542 {
5543 /* We're storing into a struct containing a single __complex. */
5544
5545 if (bitpos != 0)
5546 abort ();
5547 return store_expr (exp, target, 0);
5548 }
5549
5550 /* If the structure is in a register or if the component
5551 is a bit field, we cannot use addressing to access it.
5552 Use bit-field techniques or SUBREG to store in it. */
5553
5554 if (mode == VOIDmode
5555 || (mode != BLKmode && ! direct_store[(int) mode]
5556 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5557 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5558 || GET_CODE (target) == REG
5559 || GET_CODE (target) == SUBREG
5560 /* If the field isn't aligned enough to store as an ordinary memref,
5561 store it as a bit field. */
5562 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5563 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5564 || bitpos % GET_MODE_ALIGNMENT (mode)))
5565 /* If the RHS and field are a constant size and the size of the
5566 RHS isn't the same size as the bitfield, we must use bitfield
5567 operations. */
5568 || (bitsize >= 0
5569 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5570 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5571 {
5572 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5573
5574 /* If BITSIZE is narrower than the size of the type of EXP
5575 we will be narrowing TEMP. Normally, what's wanted are the
5576 low-order bits. However, if EXP's type is a record and this is
5577 big-endian machine, we want the upper BITSIZE bits. */
5578 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5579 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5580 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5581 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5582 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5583 - bitsize),
5584 temp, 1);
5585
5586 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5587 MODE. */
5588 if (mode != VOIDmode && mode != BLKmode
5589 && mode != TYPE_MODE (TREE_TYPE (exp)))
5590 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5591
5592 /* If the modes of TARGET and TEMP are both BLKmode, both
5593 must be in memory and BITPOS must be aligned on a byte
5594 boundary. If so, we simply do a block copy. */
5595 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5596 {
5597 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5598 || bitpos % BITS_PER_UNIT != 0)
5599 abort ();
5600
5601 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5602 emit_block_move (target, temp,
5603 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5604 / BITS_PER_UNIT),
5605 BLOCK_OP_NORMAL);
5606
5607 return value_mode == VOIDmode ? const0_rtx : target;
5608 }
5609
5610 /* Store the value in the bitfield. */
5611 store_bit_field (target, bitsize, bitpos, mode, temp,
5612 int_size_in_bytes (type));
5613
5614 if (value_mode != VOIDmode)
5615 {
5616 /* The caller wants an rtx for the value.
5617 If possible, avoid refetching from the bitfield itself. */
5618 if (width_mask != 0
5619 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5620 {
5621 tree count;
5622 enum machine_mode tmode;
5623
5624 tmode = GET_MODE (temp);
5625 if (tmode == VOIDmode)
5626 tmode = value_mode;
5627
5628 if (unsignedp)
5629 return expand_and (tmode, temp,
5630 gen_int_mode (width_mask, tmode),
5631 NULL_RTX);
5632
5633 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5634 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5635 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5636 }
5637
5638 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5639 NULL_RTX, value_mode, VOIDmode,
5640 int_size_in_bytes (type));
5641 }
5642 return const0_rtx;
5643 }
5644 else
5645 {
5646 rtx addr = XEXP (target, 0);
5647 rtx to_rtx = target;
5648
5649 /* If a value is wanted, it must be the lhs;
5650 so make the address stable for multiple use. */
5651
5652 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5653 && ! CONSTANT_ADDRESS_P (addr)
5654 /* A frame-pointer reference is already stable. */
5655 && ! (GET_CODE (addr) == PLUS
5656 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5657 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5658 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5659 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5660
5661 /* Now build a reference to just the desired component. */
5662
5663 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5664
5665 if (to_rtx == target)
5666 to_rtx = copy_rtx (to_rtx);
5667
5668 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5669 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5670 set_mem_alias_set (to_rtx, alias_set);
5671
5672 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5673 }
5674 }
5675 \f
5676 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5677 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5678 codes and find the ultimate containing object, which we return.
5679
5680 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5681 bit position, and *PUNSIGNEDP to the signedness of the field.
5682 If the position of the field is variable, we store a tree
5683 giving the variable offset (in units) in *POFFSET.
5684 This offset is in addition to the bit position.
5685 If the position is not variable, we store 0 in *POFFSET.
5686
5687 If any of the extraction expressions is volatile,
5688 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5689
5690 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5691 is a mode that can be used to access the field. In that case, *PBITSIZE
5692 is redundant.
5693
5694 If the field describes a variable-sized object, *PMODE is set to
5695 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5696 this case, but the address of the object can be found. */
5697
5698 tree
5699 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5700 punsignedp, pvolatilep)
5701 tree exp;
5702 HOST_WIDE_INT *pbitsize;
5703 HOST_WIDE_INT *pbitpos;
5704 tree *poffset;
5705 enum machine_mode *pmode;
5706 int *punsignedp;
5707 int *pvolatilep;
5708 {
5709 tree size_tree = 0;
5710 enum machine_mode mode = VOIDmode;
5711 tree offset = size_zero_node;
5712 tree bit_offset = bitsize_zero_node;
5713 tree placeholder_ptr = 0;
5714 tree tem;
5715
5716 /* First get the mode, signedness, and size. We do this from just the
5717 outermost expression. */
5718 if (TREE_CODE (exp) == COMPONENT_REF)
5719 {
5720 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5721 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5722 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5723
5724 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5725 }
5726 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5727 {
5728 size_tree = TREE_OPERAND (exp, 1);
5729 *punsignedp = TREE_UNSIGNED (exp);
5730 }
5731 else
5732 {
5733 mode = TYPE_MODE (TREE_TYPE (exp));
5734 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5735
5736 if (mode == BLKmode)
5737 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5738 else
5739 *pbitsize = GET_MODE_BITSIZE (mode);
5740 }
5741
5742 if (size_tree != 0)
5743 {
5744 if (! host_integerp (size_tree, 1))
5745 mode = BLKmode, *pbitsize = -1;
5746 else
5747 *pbitsize = tree_low_cst (size_tree, 1);
5748 }
5749
5750 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5751 and find the ultimate containing object. */
5752 while (1)
5753 {
5754 if (TREE_CODE (exp) == BIT_FIELD_REF)
5755 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5756 else if (TREE_CODE (exp) == COMPONENT_REF)
5757 {
5758 tree field = TREE_OPERAND (exp, 1);
5759 tree this_offset = DECL_FIELD_OFFSET (field);
5760
5761 /* If this field hasn't been filled in yet, don't go
5762 past it. This should only happen when folding expressions
5763 made during type construction. */
5764 if (this_offset == 0)
5765 break;
5766 else if (! TREE_CONSTANT (this_offset)
5767 && contains_placeholder_p (this_offset))
5768 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5769
5770 offset = size_binop (PLUS_EXPR, offset, this_offset);
5771 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5772 DECL_FIELD_BIT_OFFSET (field));
5773
5774 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5775 }
5776
5777 else if (TREE_CODE (exp) == ARRAY_REF
5778 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5779 {
5780 tree index = TREE_OPERAND (exp, 1);
5781 tree array = TREE_OPERAND (exp, 0);
5782 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5783 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5784 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5785
5786 /* We assume all arrays have sizes that are a multiple of a byte.
5787 First subtract the lower bound, if any, in the type of the
5788 index, then convert to sizetype and multiply by the size of the
5789 array element. */
5790 if (low_bound != 0 && ! integer_zerop (low_bound))
5791 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5792 index, low_bound));
5793
5794 /* If the index has a self-referential type, pass it to a
5795 WITH_RECORD_EXPR; if the component size is, pass our
5796 component to one. */
5797 if (! TREE_CONSTANT (index)
5798 && contains_placeholder_p (index))
5799 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5800 if (! TREE_CONSTANT (unit_size)
5801 && contains_placeholder_p (unit_size))
5802 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5803
5804 offset = size_binop (PLUS_EXPR, offset,
5805 size_binop (MULT_EXPR,
5806 convert (sizetype, index),
5807 unit_size));
5808 }
5809
5810 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5811 {
5812 tree new = find_placeholder (exp, &placeholder_ptr);
5813
5814 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5815 We might have been called from tree optimization where we
5816 haven't set up an object yet. */
5817 if (new == 0)
5818 break;
5819 else
5820 exp = new;
5821
5822 continue;
5823 }
5824 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5825 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5826 && ! ((TREE_CODE (exp) == NOP_EXPR
5827 || TREE_CODE (exp) == CONVERT_EXPR)
5828 && (TYPE_MODE (TREE_TYPE (exp))
5829 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5830 break;
5831
5832 /* If any reference in the chain is volatile, the effect is volatile. */
5833 if (TREE_THIS_VOLATILE (exp))
5834 *pvolatilep = 1;
5835
5836 exp = TREE_OPERAND (exp, 0);
5837 }
5838
5839 /* If OFFSET is constant, see if we can return the whole thing as a
5840 constant bit position. Otherwise, split it up. */
5841 if (host_integerp (offset, 0)
5842 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5843 bitsize_unit_node))
5844 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5845 && host_integerp (tem, 0))
5846 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5847 else
5848 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5849
5850 *pmode = mode;
5851 return exp;
5852 }
5853
5854 /* Return 1 if T is an expression that get_inner_reference handles. */
5855
5856 int
5857 handled_component_p (t)
5858 tree t;
5859 {
5860 switch (TREE_CODE (t))
5861 {
5862 case BIT_FIELD_REF:
5863 case COMPONENT_REF:
5864 case ARRAY_REF:
5865 case ARRAY_RANGE_REF:
5866 case NON_LVALUE_EXPR:
5867 case VIEW_CONVERT_EXPR:
5868 return 1;
5869
5870 case NOP_EXPR:
5871 case CONVERT_EXPR:
5872 return (TYPE_MODE (TREE_TYPE (t))
5873 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5874
5875 default:
5876 return 0;
5877 }
5878 }
5879 \f
5880 /* Given an rtx VALUE that may contain additions and multiplications, return
5881 an equivalent value that just refers to a register, memory, or constant.
5882 This is done by generating instructions to perform the arithmetic and
5883 returning a pseudo-register containing the value.
5884
5885 The returned value may be a REG, SUBREG, MEM or constant. */
5886
5887 rtx
5888 force_operand (value, target)
5889 rtx value, target;
5890 {
5891 rtx op1, op2;
5892 /* Use subtarget as the target for operand 0 of a binary operation. */
5893 rtx subtarget = get_subtarget (target);
5894 enum rtx_code code = GET_CODE (value);
5895
5896 /* Check for a PIC address load. */
5897 if ((code == PLUS || code == MINUS)
5898 && XEXP (value, 0) == pic_offset_table_rtx
5899 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5900 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5901 || GET_CODE (XEXP (value, 1)) == CONST))
5902 {
5903 if (!subtarget)
5904 subtarget = gen_reg_rtx (GET_MODE (value));
5905 emit_move_insn (subtarget, value);
5906 return subtarget;
5907 }
5908
5909 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5910 {
5911 if (!target)
5912 target = gen_reg_rtx (GET_MODE (value));
5913 convert_move (target, force_operand (XEXP (value, 0), NULL),
5914 code == ZERO_EXTEND);
5915 return target;
5916 }
5917
5918 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5919 {
5920 op2 = XEXP (value, 1);
5921 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5922 subtarget = 0;
5923 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5924 {
5925 code = PLUS;
5926 op2 = negate_rtx (GET_MODE (value), op2);
5927 }
5928
5929 /* Check for an addition with OP2 a constant integer and our first
5930 operand a PLUS of a virtual register and something else. In that
5931 case, we want to emit the sum of the virtual register and the
5932 constant first and then add the other value. This allows virtual
5933 register instantiation to simply modify the constant rather than
5934 creating another one around this addition. */
5935 if (code == PLUS && GET_CODE (op2) == CONST_INT
5936 && GET_CODE (XEXP (value, 0)) == PLUS
5937 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5938 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5939 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5940 {
5941 rtx temp = expand_simple_binop (GET_MODE (value), code,
5942 XEXP (XEXP (value, 0), 0), op2,
5943 subtarget, 0, OPTAB_LIB_WIDEN);
5944 return expand_simple_binop (GET_MODE (value), code, temp,
5945 force_operand (XEXP (XEXP (value,
5946 0), 1), 0),
5947 target, 0, OPTAB_LIB_WIDEN);
5948 }
5949
5950 op1 = force_operand (XEXP (value, 0), subtarget);
5951 op2 = force_operand (op2, NULL_RTX);
5952 switch (code)
5953 {
5954 case MULT:
5955 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5956 case DIV:
5957 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5958 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5959 target, 1, OPTAB_LIB_WIDEN);
5960 else
5961 return expand_divmod (0,
5962 FLOAT_MODE_P (GET_MODE (value))
5963 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5964 GET_MODE (value), op1, op2, target, 0);
5965 break;
5966 case MOD:
5967 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5968 target, 0);
5969 break;
5970 case UDIV:
5971 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5972 target, 1);
5973 break;
5974 case UMOD:
5975 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5976 target, 1);
5977 break;
5978 case ASHIFTRT:
5979 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5980 target, 0, OPTAB_LIB_WIDEN);
5981 break;
5982 default:
5983 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5984 target, 1, OPTAB_LIB_WIDEN);
5985 }
5986 }
5987 if (GET_RTX_CLASS (code) == '1')
5988 {
5989 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5990 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5991 }
5992
5993 #ifdef INSN_SCHEDULING
5994 /* On machines that have insn scheduling, we want all memory reference to be
5995 explicit, so we need to deal with such paradoxical SUBREGs. */
5996 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5997 && (GET_MODE_SIZE (GET_MODE (value))
5998 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5999 value
6000 = simplify_gen_subreg (GET_MODE (value),
6001 force_reg (GET_MODE (SUBREG_REG (value)),
6002 force_operand (SUBREG_REG (value),
6003 NULL_RTX)),
6004 GET_MODE (SUBREG_REG (value)),
6005 SUBREG_BYTE (value));
6006 #endif
6007
6008 return value;
6009 }
6010 \f
6011 /* Subroutine of expand_expr: return nonzero iff there is no way that
6012 EXP can reference X, which is being modified. TOP_P is nonzero if this
6013 call is going to be used to determine whether we need a temporary
6014 for EXP, as opposed to a recursive call to this function.
6015
6016 It is always safe for this routine to return zero since it merely
6017 searches for optimization opportunities. */
6018
6019 int
6020 safe_from_p (x, exp, top_p)
6021 rtx x;
6022 tree exp;
6023 int top_p;
6024 {
6025 rtx exp_rtl = 0;
6026 int i, nops;
6027 static tree save_expr_list;
6028
6029 if (x == 0
6030 /* If EXP has varying size, we MUST use a target since we currently
6031 have no way of allocating temporaries of variable size
6032 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6033 So we assume here that something at a higher level has prevented a
6034 clash. This is somewhat bogus, but the best we can do. Only
6035 do this when X is BLKmode and when we are at the top level. */
6036 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6037 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6038 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6039 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6040 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6041 != INTEGER_CST)
6042 && GET_MODE (x) == BLKmode)
6043 /* If X is in the outgoing argument area, it is always safe. */
6044 || (GET_CODE (x) == MEM
6045 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6046 || (GET_CODE (XEXP (x, 0)) == PLUS
6047 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6048 return 1;
6049
6050 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6051 find the underlying pseudo. */
6052 if (GET_CODE (x) == SUBREG)
6053 {
6054 x = SUBREG_REG (x);
6055 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6056 return 0;
6057 }
6058
6059 /* A SAVE_EXPR might appear many times in the expression passed to the
6060 top-level safe_from_p call, and if it has a complex subexpression,
6061 examining it multiple times could result in a combinatorial explosion.
6062 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6063 with optimization took about 28 minutes to compile -- even though it was
6064 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6065 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6066 we have processed. Note that the only test of top_p was above. */
6067
6068 if (top_p)
6069 {
6070 int rtn;
6071 tree t;
6072
6073 save_expr_list = 0;
6074
6075 rtn = safe_from_p (x, exp, 0);
6076
6077 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
6078 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
6079
6080 return rtn;
6081 }
6082
6083 /* Now look at our tree code and possibly recurse. */
6084 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6085 {
6086 case 'd':
6087 exp_rtl = DECL_RTL_IF_SET (exp);
6088 break;
6089
6090 case 'c':
6091 return 1;
6092
6093 case 'x':
6094 if (TREE_CODE (exp) == TREE_LIST)
6095 return ((TREE_VALUE (exp) == 0
6096 || safe_from_p (x, TREE_VALUE (exp), 0))
6097 && (TREE_CHAIN (exp) == 0
6098 || safe_from_p (x, TREE_CHAIN (exp), 0)));
6099 else if (TREE_CODE (exp) == ERROR_MARK)
6100 return 1; /* An already-visited SAVE_EXPR? */
6101 else
6102 return 0;
6103
6104 case '1':
6105 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6106
6107 case '2':
6108 case '<':
6109 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
6110 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
6111
6112 case 'e':
6113 case 'r':
6114 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6115 the expression. If it is set, we conflict iff we are that rtx or
6116 both are in memory. Otherwise, we check all operands of the
6117 expression recursively. */
6118
6119 switch (TREE_CODE (exp))
6120 {
6121 case ADDR_EXPR:
6122 /* If the operand is static or we are static, we can't conflict.
6123 Likewise if we don't conflict with the operand at all. */
6124 if (staticp (TREE_OPERAND (exp, 0))
6125 || TREE_STATIC (exp)
6126 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6127 return 1;
6128
6129 /* Otherwise, the only way this can conflict is if we are taking
6130 the address of a DECL a that address if part of X, which is
6131 very rare. */
6132 exp = TREE_OPERAND (exp, 0);
6133 if (DECL_P (exp))
6134 {
6135 if (!DECL_RTL_SET_P (exp)
6136 || GET_CODE (DECL_RTL (exp)) != MEM)
6137 return 0;
6138 else
6139 exp_rtl = XEXP (DECL_RTL (exp), 0);
6140 }
6141 break;
6142
6143 case INDIRECT_REF:
6144 if (GET_CODE (x) == MEM
6145 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6146 get_alias_set (exp)))
6147 return 0;
6148 break;
6149
6150 case CALL_EXPR:
6151 /* Assume that the call will clobber all hard registers and
6152 all of memory. */
6153 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6154 || GET_CODE (x) == MEM)
6155 return 0;
6156 break;
6157
6158 case RTL_EXPR:
6159 /* If a sequence exists, we would have to scan every instruction
6160 in the sequence to see if it was safe. This is probably not
6161 worthwhile. */
6162 if (RTL_EXPR_SEQUENCE (exp))
6163 return 0;
6164
6165 exp_rtl = RTL_EXPR_RTL (exp);
6166 break;
6167
6168 case WITH_CLEANUP_EXPR:
6169 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6170 break;
6171
6172 case CLEANUP_POINT_EXPR:
6173 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6174
6175 case SAVE_EXPR:
6176 exp_rtl = SAVE_EXPR_RTL (exp);
6177 if (exp_rtl)
6178 break;
6179
6180 /* If we've already scanned this, don't do it again. Otherwise,
6181 show we've scanned it and record for clearing the flag if we're
6182 going on. */
6183 if (TREE_PRIVATE (exp))
6184 return 1;
6185
6186 TREE_PRIVATE (exp) = 1;
6187 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6188 {
6189 TREE_PRIVATE (exp) = 0;
6190 return 0;
6191 }
6192
6193 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6194 return 1;
6195
6196 case BIND_EXPR:
6197 /* The only operand we look at is operand 1. The rest aren't
6198 part of the expression. */
6199 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6200
6201 case METHOD_CALL_EXPR:
6202 /* This takes an rtx argument, but shouldn't appear here. */
6203 abort ();
6204
6205 default:
6206 break;
6207 }
6208
6209 /* If we have an rtx, we do not need to scan our operands. */
6210 if (exp_rtl)
6211 break;
6212
6213 nops = first_rtl_op (TREE_CODE (exp));
6214 for (i = 0; i < nops; i++)
6215 if (TREE_OPERAND (exp, i) != 0
6216 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6217 return 0;
6218
6219 /* If this is a language-specific tree code, it may require
6220 special handling. */
6221 if ((unsigned int) TREE_CODE (exp)
6222 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6223 && !(*lang_hooks.safe_from_p) (x, exp))
6224 return 0;
6225 }
6226
6227 /* If we have an rtl, find any enclosed object. Then see if we conflict
6228 with it. */
6229 if (exp_rtl)
6230 {
6231 if (GET_CODE (exp_rtl) == SUBREG)
6232 {
6233 exp_rtl = SUBREG_REG (exp_rtl);
6234 if (GET_CODE (exp_rtl) == REG
6235 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6236 return 0;
6237 }
6238
6239 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6240 are memory and they conflict. */
6241 return ! (rtx_equal_p (x, exp_rtl)
6242 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6243 && true_dependence (exp_rtl, VOIDmode, x,
6244 rtx_addr_varies_p)));
6245 }
6246
6247 /* If we reach here, it is safe. */
6248 return 1;
6249 }
6250
6251 /* Subroutine of expand_expr: return rtx if EXP is a
6252 variable or parameter; else return 0. */
6253
6254 static rtx
6255 var_rtx (exp)
6256 tree exp;
6257 {
6258 STRIP_NOPS (exp);
6259 switch (TREE_CODE (exp))
6260 {
6261 case PARM_DECL:
6262 case VAR_DECL:
6263 return DECL_RTL (exp);
6264 default:
6265 return 0;
6266 }
6267 }
6268
6269 #ifdef MAX_INTEGER_COMPUTATION_MODE
6270
6271 void
6272 check_max_integer_computation_mode (exp)
6273 tree exp;
6274 {
6275 enum tree_code code;
6276 enum machine_mode mode;
6277
6278 /* Strip any NOPs that don't change the mode. */
6279 STRIP_NOPS (exp);
6280 code = TREE_CODE (exp);
6281
6282 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6283 if (code == NOP_EXPR
6284 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6285 return;
6286
6287 /* First check the type of the overall operation. We need only look at
6288 unary, binary and relational operations. */
6289 if (TREE_CODE_CLASS (code) == '1'
6290 || TREE_CODE_CLASS (code) == '2'
6291 || TREE_CODE_CLASS (code) == '<')
6292 {
6293 mode = TYPE_MODE (TREE_TYPE (exp));
6294 if (GET_MODE_CLASS (mode) == MODE_INT
6295 && mode > MAX_INTEGER_COMPUTATION_MODE)
6296 internal_error ("unsupported wide integer operation");
6297 }
6298
6299 /* Check operand of a unary op. */
6300 if (TREE_CODE_CLASS (code) == '1')
6301 {
6302 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6303 if (GET_MODE_CLASS (mode) == MODE_INT
6304 && mode > MAX_INTEGER_COMPUTATION_MODE)
6305 internal_error ("unsupported wide integer operation");
6306 }
6307
6308 /* Check operands of a binary/comparison op. */
6309 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6310 {
6311 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6312 if (GET_MODE_CLASS (mode) == MODE_INT
6313 && mode > MAX_INTEGER_COMPUTATION_MODE)
6314 internal_error ("unsupported wide integer operation");
6315
6316 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6317 if (GET_MODE_CLASS (mode) == MODE_INT
6318 && mode > MAX_INTEGER_COMPUTATION_MODE)
6319 internal_error ("unsupported wide integer operation");
6320 }
6321 }
6322 #endif
6323 \f
6324 /* Return the highest power of two that EXP is known to be a multiple of.
6325 This is used in updating alignment of MEMs in array references. */
6326
6327 static HOST_WIDE_INT
6328 highest_pow2_factor (exp)
6329 tree exp;
6330 {
6331 HOST_WIDE_INT c0, c1;
6332
6333 switch (TREE_CODE (exp))
6334 {
6335 case INTEGER_CST:
6336 /* We can find the lowest bit that's a one. If the low
6337 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6338 We need to handle this case since we can find it in a COND_EXPR,
6339 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6340 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6341 later ICE. */
6342 if (TREE_CONSTANT_OVERFLOW (exp))
6343 return BIGGEST_ALIGNMENT;
6344 else
6345 {
6346 /* Note: tree_low_cst is intentionally not used here,
6347 we don't care about the upper bits. */
6348 c0 = TREE_INT_CST_LOW (exp);
6349 c0 &= -c0;
6350 return c0 ? c0 : BIGGEST_ALIGNMENT;
6351 }
6352 break;
6353
6354 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6355 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6356 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6357 return MIN (c0, c1);
6358
6359 case MULT_EXPR:
6360 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6361 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6362 return c0 * c1;
6363
6364 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6365 case CEIL_DIV_EXPR:
6366 if (integer_pow2p (TREE_OPERAND (exp, 1))
6367 && host_integerp (TREE_OPERAND (exp, 1), 1))
6368 {
6369 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6370 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6371 return MAX (1, c0 / c1);
6372 }
6373 break;
6374
6375 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6376 case SAVE_EXPR: case WITH_RECORD_EXPR:
6377 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6378
6379 case COMPOUND_EXPR:
6380 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6381
6382 case COND_EXPR:
6383 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6384 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6385 return MIN (c0, c1);
6386
6387 default:
6388 break;
6389 }
6390
6391 return 1;
6392 }
6393
6394 /* Similar, except that it is known that the expression must be a multiple
6395 of the alignment of TYPE. */
6396
6397 static HOST_WIDE_INT
6398 highest_pow2_factor_for_type (type, exp)
6399 tree type;
6400 tree exp;
6401 {
6402 HOST_WIDE_INT type_align, factor;
6403
6404 factor = highest_pow2_factor (exp);
6405 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6406 return MAX (factor, type_align);
6407 }
6408 \f
6409 /* Return an object on the placeholder list that matches EXP, a
6410 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6411 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6412 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6413 is a location which initially points to a starting location in the
6414 placeholder list (zero means start of the list) and where a pointer into
6415 the placeholder list at which the object is found is placed. */
6416
6417 tree
6418 find_placeholder (exp, plist)
6419 tree exp;
6420 tree *plist;
6421 {
6422 tree type = TREE_TYPE (exp);
6423 tree placeholder_expr;
6424
6425 for (placeholder_expr
6426 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6427 placeholder_expr != 0;
6428 placeholder_expr = TREE_CHAIN (placeholder_expr))
6429 {
6430 tree need_type = TYPE_MAIN_VARIANT (type);
6431 tree elt;
6432
6433 /* Find the outermost reference that is of the type we want. If none,
6434 see if any object has a type that is a pointer to the type we
6435 want. */
6436 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6437 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6438 || TREE_CODE (elt) == COND_EXPR)
6439 ? TREE_OPERAND (elt, 1)
6440 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6441 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6442 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6443 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6444 ? TREE_OPERAND (elt, 0) : 0))
6445 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6446 {
6447 if (plist)
6448 *plist = placeholder_expr;
6449 return elt;
6450 }
6451
6452 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6453 elt
6454 = ((TREE_CODE (elt) == COMPOUND_EXPR
6455 || TREE_CODE (elt) == COND_EXPR)
6456 ? TREE_OPERAND (elt, 1)
6457 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6458 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6459 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6460 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6461 ? TREE_OPERAND (elt, 0) : 0))
6462 if (POINTER_TYPE_P (TREE_TYPE (elt))
6463 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6464 == need_type))
6465 {
6466 if (plist)
6467 *plist = placeholder_expr;
6468 return build1 (INDIRECT_REF, need_type, elt);
6469 }
6470 }
6471
6472 return 0;
6473 }
6474 \f
6475 /* expand_expr: generate code for computing expression EXP.
6476 An rtx for the computed value is returned. The value is never null.
6477 In the case of a void EXP, const0_rtx is returned.
6478
6479 The value may be stored in TARGET if TARGET is nonzero.
6480 TARGET is just a suggestion; callers must assume that
6481 the rtx returned may not be the same as TARGET.
6482
6483 If TARGET is CONST0_RTX, it means that the value will be ignored.
6484
6485 If TMODE is not VOIDmode, it suggests generating the
6486 result in mode TMODE. But this is done only when convenient.
6487 Otherwise, TMODE is ignored and the value generated in its natural mode.
6488 TMODE is just a suggestion; callers must assume that
6489 the rtx returned may not have mode TMODE.
6490
6491 Note that TARGET may have neither TMODE nor MODE. In that case, it
6492 probably will not be used.
6493
6494 If MODIFIER is EXPAND_SUM then when EXP is an addition
6495 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6496 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6497 products as above, or REG or MEM, or constant.
6498 Ordinarily in such cases we would output mul or add instructions
6499 and then return a pseudo reg containing the sum.
6500
6501 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6502 it also marks a label as absolutely required (it can't be dead).
6503 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6504 This is used for outputting expressions used in initializers.
6505
6506 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6507 with a constant address even if that address is not normally legitimate.
6508 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6509
6510 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6511 a call parameter. Such targets require special care as we haven't yet
6512 marked TARGET so that it's safe from being trashed by libcalls. We
6513 don't want to use TARGET for anything but the final result;
6514 Intermediate values must go elsewhere. Additionally, calls to
6515 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6516
6517 rtx
6518 expand_expr (exp, target, tmode, modifier)
6519 tree exp;
6520 rtx target;
6521 enum machine_mode tmode;
6522 enum expand_modifier modifier;
6523 {
6524 rtx op0, op1, temp;
6525 tree type = TREE_TYPE (exp);
6526 int unsignedp = TREE_UNSIGNED (type);
6527 enum machine_mode mode;
6528 enum tree_code code = TREE_CODE (exp);
6529 optab this_optab;
6530 rtx subtarget, original_target;
6531 int ignore;
6532 tree context;
6533
6534 /* Handle ERROR_MARK before anybody tries to access its type. */
6535 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6536 {
6537 op0 = CONST0_RTX (tmode);
6538 if (op0 != 0)
6539 return op0;
6540 return const0_rtx;
6541 }
6542
6543 mode = TYPE_MODE (type);
6544 /* Use subtarget as the target for operand 0 of a binary operation. */
6545 subtarget = get_subtarget (target);
6546 original_target = target;
6547 ignore = (target == const0_rtx
6548 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6549 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6550 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6551 && TREE_CODE (type) == VOID_TYPE));
6552
6553 /* If we are going to ignore this result, we need only do something
6554 if there is a side-effect somewhere in the expression. If there
6555 is, short-circuit the most common cases here. Note that we must
6556 not call expand_expr with anything but const0_rtx in case this
6557 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6558
6559 if (ignore)
6560 {
6561 if (! TREE_SIDE_EFFECTS (exp))
6562 return const0_rtx;
6563
6564 /* Ensure we reference a volatile object even if value is ignored, but
6565 don't do this if all we are doing is taking its address. */
6566 if (TREE_THIS_VOLATILE (exp)
6567 && TREE_CODE (exp) != FUNCTION_DECL
6568 && mode != VOIDmode && mode != BLKmode
6569 && modifier != EXPAND_CONST_ADDRESS)
6570 {
6571 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6572 if (GET_CODE (temp) == MEM)
6573 temp = copy_to_reg (temp);
6574 return const0_rtx;
6575 }
6576
6577 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6578 || code == INDIRECT_REF || code == BUFFER_REF)
6579 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6580 modifier);
6581
6582 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6583 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6584 {
6585 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6586 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6587 return const0_rtx;
6588 }
6589 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6590 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6591 /* If the second operand has no side effects, just evaluate
6592 the first. */
6593 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6594 modifier);
6595 else if (code == BIT_FIELD_REF)
6596 {
6597 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6598 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6599 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6600 return const0_rtx;
6601 }
6602
6603 target = 0;
6604 }
6605
6606 #ifdef MAX_INTEGER_COMPUTATION_MODE
6607 /* Only check stuff here if the mode we want is different from the mode
6608 of the expression; if it's the same, check_max_integer_computation_mode
6609 will handle it. Do we really need to check this stuff at all? */
6610
6611 if (target
6612 && GET_MODE (target) != mode
6613 && TREE_CODE (exp) != INTEGER_CST
6614 && TREE_CODE (exp) != PARM_DECL
6615 && TREE_CODE (exp) != ARRAY_REF
6616 && TREE_CODE (exp) != ARRAY_RANGE_REF
6617 && TREE_CODE (exp) != COMPONENT_REF
6618 && TREE_CODE (exp) != BIT_FIELD_REF
6619 && TREE_CODE (exp) != INDIRECT_REF
6620 && TREE_CODE (exp) != CALL_EXPR
6621 && TREE_CODE (exp) != VAR_DECL
6622 && TREE_CODE (exp) != RTL_EXPR)
6623 {
6624 enum machine_mode mode = GET_MODE (target);
6625
6626 if (GET_MODE_CLASS (mode) == MODE_INT
6627 && mode > MAX_INTEGER_COMPUTATION_MODE)
6628 internal_error ("unsupported wide integer operation");
6629 }
6630
6631 if (tmode != mode
6632 && TREE_CODE (exp) != INTEGER_CST
6633 && TREE_CODE (exp) != PARM_DECL
6634 && TREE_CODE (exp) != ARRAY_REF
6635 && TREE_CODE (exp) != ARRAY_RANGE_REF
6636 && TREE_CODE (exp) != COMPONENT_REF
6637 && TREE_CODE (exp) != BIT_FIELD_REF
6638 && TREE_CODE (exp) != INDIRECT_REF
6639 && TREE_CODE (exp) != VAR_DECL
6640 && TREE_CODE (exp) != CALL_EXPR
6641 && TREE_CODE (exp) != RTL_EXPR
6642 && GET_MODE_CLASS (tmode) == MODE_INT
6643 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6644 internal_error ("unsupported wide integer operation");
6645
6646 check_max_integer_computation_mode (exp);
6647 #endif
6648
6649 /* If will do cse, generate all results into pseudo registers
6650 since 1) that allows cse to find more things
6651 and 2) otherwise cse could produce an insn the machine
6652 cannot support. An exception is a CONSTRUCTOR into a multi-word
6653 MEM: that's much more likely to be most efficient into the MEM.
6654 Another is a CALL_EXPR which must return in memory. */
6655
6656 if (! cse_not_expected && mode != BLKmode && target
6657 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6658 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6659 && ! (code == CALL_EXPR && aggregate_value_p (exp)))
6660 target = 0;
6661
6662 switch (code)
6663 {
6664 case LABEL_DECL:
6665 {
6666 tree function = decl_function_context (exp);
6667 /* Handle using a label in a containing function. */
6668 if (function != current_function_decl
6669 && function != inline_function_decl && function != 0)
6670 {
6671 struct function *p = find_function_data (function);
6672 p->expr->x_forced_labels
6673 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6674 p->expr->x_forced_labels);
6675 }
6676 else
6677 {
6678 if (modifier == EXPAND_INITIALIZER)
6679 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6680 label_rtx (exp),
6681 forced_labels);
6682 }
6683
6684 temp = gen_rtx_MEM (FUNCTION_MODE,
6685 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6686 if (function != current_function_decl
6687 && function != inline_function_decl && function != 0)
6688 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6689 return temp;
6690 }
6691
6692 case PARM_DECL:
6693 if (!DECL_RTL_SET_P (exp))
6694 {
6695 error_with_decl (exp, "prior parameter's size depends on `%s'");
6696 return CONST0_RTX (mode);
6697 }
6698
6699 /* ... fall through ... */
6700
6701 case VAR_DECL:
6702 /* If a static var's type was incomplete when the decl was written,
6703 but the type is complete now, lay out the decl now. */
6704 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6705 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6706 {
6707 rtx value = DECL_RTL_IF_SET (exp);
6708
6709 layout_decl (exp, 0);
6710
6711 /* If the RTL was already set, update its mode and memory
6712 attributes. */
6713 if (value != 0)
6714 {
6715 PUT_MODE (value, DECL_MODE (exp));
6716 SET_DECL_RTL (exp, 0);
6717 set_mem_attributes (value, exp, 1);
6718 SET_DECL_RTL (exp, value);
6719 }
6720 }
6721
6722 /* ... fall through ... */
6723
6724 case FUNCTION_DECL:
6725 case RESULT_DECL:
6726 if (DECL_RTL (exp) == 0)
6727 abort ();
6728
6729 /* Ensure variable marked as used even if it doesn't go through
6730 a parser. If it hasn't be used yet, write out an external
6731 definition. */
6732 if (! TREE_USED (exp))
6733 {
6734 assemble_external (exp);
6735 TREE_USED (exp) = 1;
6736 }
6737
6738 /* Show we haven't gotten RTL for this yet. */
6739 temp = 0;
6740
6741 /* Handle variables inherited from containing functions. */
6742 context = decl_function_context (exp);
6743
6744 /* We treat inline_function_decl as an alias for the current function
6745 because that is the inline function whose vars, types, etc.
6746 are being merged into the current function.
6747 See expand_inline_function. */
6748
6749 if (context != 0 && context != current_function_decl
6750 && context != inline_function_decl
6751 /* If var is static, we don't need a static chain to access it. */
6752 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6753 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6754 {
6755 rtx addr;
6756
6757 /* Mark as non-local and addressable. */
6758 DECL_NONLOCAL (exp) = 1;
6759 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6760 abort ();
6761 (*lang_hooks.mark_addressable) (exp);
6762 if (GET_CODE (DECL_RTL (exp)) != MEM)
6763 abort ();
6764 addr = XEXP (DECL_RTL (exp), 0);
6765 if (GET_CODE (addr) == MEM)
6766 addr
6767 = replace_equiv_address (addr,
6768 fix_lexical_addr (XEXP (addr, 0), exp));
6769 else
6770 addr = fix_lexical_addr (addr, exp);
6771
6772 temp = replace_equiv_address (DECL_RTL (exp), addr);
6773 }
6774
6775 /* This is the case of an array whose size is to be determined
6776 from its initializer, while the initializer is still being parsed.
6777 See expand_decl. */
6778
6779 else if (GET_CODE (DECL_RTL (exp)) == MEM
6780 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6781 temp = validize_mem (DECL_RTL (exp));
6782
6783 /* If DECL_RTL is memory, we are in the normal case and either
6784 the address is not valid or it is not a register and -fforce-addr
6785 is specified, get the address into a register. */
6786
6787 else if (GET_CODE (DECL_RTL (exp)) == MEM
6788 && modifier != EXPAND_CONST_ADDRESS
6789 && modifier != EXPAND_SUM
6790 && modifier != EXPAND_INITIALIZER
6791 && (! memory_address_p (DECL_MODE (exp),
6792 XEXP (DECL_RTL (exp), 0))
6793 || (flag_force_addr
6794 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6795 temp = replace_equiv_address (DECL_RTL (exp),
6796 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6797
6798 /* If we got something, return it. But first, set the alignment
6799 if the address is a register. */
6800 if (temp != 0)
6801 {
6802 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6803 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6804
6805 return temp;
6806 }
6807
6808 /* If the mode of DECL_RTL does not match that of the decl, it
6809 must be a promoted value. We return a SUBREG of the wanted mode,
6810 but mark it so that we know that it was already extended. */
6811
6812 if (GET_CODE (DECL_RTL (exp)) == REG
6813 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6814 {
6815 /* Get the signedness used for this variable. Ensure we get the
6816 same mode we got when the variable was declared. */
6817 if (GET_MODE (DECL_RTL (exp))
6818 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6819 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6820 abort ();
6821
6822 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6823 SUBREG_PROMOTED_VAR_P (temp) = 1;
6824 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6825 return temp;
6826 }
6827
6828 return DECL_RTL (exp);
6829
6830 case INTEGER_CST:
6831 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6832 TREE_INT_CST_HIGH (exp), mode);
6833
6834 /* ??? If overflow is set, fold will have done an incomplete job,
6835 which can result in (plus xx (const_int 0)), which can get
6836 simplified by validate_replace_rtx during virtual register
6837 instantiation, which can result in unrecognizable insns.
6838 Avoid this by forcing all overflows into registers. */
6839 if (TREE_CONSTANT_OVERFLOW (exp)
6840 && modifier != EXPAND_INITIALIZER)
6841 temp = force_reg (mode, temp);
6842
6843 return temp;
6844
6845 case CONST_DECL:
6846 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6847
6848 case REAL_CST:
6849 /* If optimized, generate immediate CONST_DOUBLE
6850 which will be turned into memory by reload if necessary.
6851
6852 We used to force a register so that loop.c could see it. But
6853 this does not allow gen_* patterns to perform optimizations with
6854 the constants. It also produces two insns in cases like "x = 1.0;".
6855 On most machines, floating-point constants are not permitted in
6856 many insns, so we'd end up copying it to a register in any case.
6857
6858 Now, we do the copying in expand_binop, if appropriate. */
6859 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6860 TYPE_MODE (TREE_TYPE (exp)));
6861
6862 case COMPLEX_CST:
6863 case STRING_CST:
6864 if (! TREE_CST_RTL (exp))
6865 output_constant_def (exp, 1);
6866
6867 /* TREE_CST_RTL probably contains a constant address.
6868 On RISC machines where a constant address isn't valid,
6869 make some insns to get that address into a register. */
6870 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6871 && modifier != EXPAND_CONST_ADDRESS
6872 && modifier != EXPAND_INITIALIZER
6873 && modifier != EXPAND_SUM
6874 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6875 || (flag_force_addr
6876 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6877 return replace_equiv_address (TREE_CST_RTL (exp),
6878 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6879 return TREE_CST_RTL (exp);
6880
6881 case EXPR_WITH_FILE_LOCATION:
6882 {
6883 rtx to_return;
6884 const char *saved_input_filename = input_filename;
6885 int saved_lineno = lineno;
6886 input_filename = EXPR_WFL_FILENAME (exp);
6887 lineno = EXPR_WFL_LINENO (exp);
6888 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6889 emit_line_note (input_filename, lineno);
6890 /* Possibly avoid switching back and forth here. */
6891 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6892 input_filename = saved_input_filename;
6893 lineno = saved_lineno;
6894 return to_return;
6895 }
6896
6897 case SAVE_EXPR:
6898 context = decl_function_context (exp);
6899
6900 /* If this SAVE_EXPR was at global context, assume we are an
6901 initialization function and move it into our context. */
6902 if (context == 0)
6903 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6904
6905 /* We treat inline_function_decl as an alias for the current function
6906 because that is the inline function whose vars, types, etc.
6907 are being merged into the current function.
6908 See expand_inline_function. */
6909 if (context == current_function_decl || context == inline_function_decl)
6910 context = 0;
6911
6912 /* If this is non-local, handle it. */
6913 if (context)
6914 {
6915 /* The following call just exists to abort if the context is
6916 not of a containing function. */
6917 find_function_data (context);
6918
6919 temp = SAVE_EXPR_RTL (exp);
6920 if (temp && GET_CODE (temp) == REG)
6921 {
6922 put_var_into_stack (exp, /*rescan=*/true);
6923 temp = SAVE_EXPR_RTL (exp);
6924 }
6925 if (temp == 0 || GET_CODE (temp) != MEM)
6926 abort ();
6927 return
6928 replace_equiv_address (temp,
6929 fix_lexical_addr (XEXP (temp, 0), exp));
6930 }
6931 if (SAVE_EXPR_RTL (exp) == 0)
6932 {
6933 if (mode == VOIDmode)
6934 temp = const0_rtx;
6935 else
6936 temp = assign_temp (build_qualified_type (type,
6937 (TYPE_QUALS (type)
6938 | TYPE_QUAL_CONST)),
6939 3, 0, 0);
6940
6941 SAVE_EXPR_RTL (exp) = temp;
6942 if (!optimize && GET_CODE (temp) == REG)
6943 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6944 save_expr_regs);
6945
6946 /* If the mode of TEMP does not match that of the expression, it
6947 must be a promoted value. We pass store_expr a SUBREG of the
6948 wanted mode but mark it so that we know that it was already
6949 extended. */
6950
6951 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6952 {
6953 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6954 promote_mode (type, mode, &unsignedp, 0);
6955 SUBREG_PROMOTED_VAR_P (temp) = 1;
6956 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6957 }
6958
6959 if (temp == const0_rtx)
6960 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6961 else
6962 store_expr (TREE_OPERAND (exp, 0), temp,
6963 modifier == EXPAND_STACK_PARM ? 2 : 0);
6964
6965 TREE_USED (exp) = 1;
6966 }
6967
6968 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6969 must be a promoted value. We return a SUBREG of the wanted mode,
6970 but mark it so that we know that it was already extended. */
6971
6972 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6973 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6974 {
6975 /* Compute the signedness and make the proper SUBREG. */
6976 promote_mode (type, mode, &unsignedp, 0);
6977 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6978 SUBREG_PROMOTED_VAR_P (temp) = 1;
6979 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6980 return temp;
6981 }
6982
6983 return SAVE_EXPR_RTL (exp);
6984
6985 case UNSAVE_EXPR:
6986 {
6987 rtx temp;
6988 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6989 TREE_OPERAND (exp, 0)
6990 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6991 return temp;
6992 }
6993
6994 case PLACEHOLDER_EXPR:
6995 {
6996 tree old_list = placeholder_list;
6997 tree placeholder_expr = 0;
6998
6999 exp = find_placeholder (exp, &placeholder_expr);
7000 if (exp == 0)
7001 abort ();
7002
7003 placeholder_list = TREE_CHAIN (placeholder_expr);
7004 temp = expand_expr (exp, original_target, tmode, modifier);
7005 placeholder_list = old_list;
7006 return temp;
7007 }
7008
7009 case WITH_RECORD_EXPR:
7010 /* Put the object on the placeholder list, expand our first operand,
7011 and pop the list. */
7012 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
7013 placeholder_list);
7014 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
7015 modifier);
7016 placeholder_list = TREE_CHAIN (placeholder_list);
7017 return target;
7018
7019 case GOTO_EXPR:
7020 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7021 expand_goto (TREE_OPERAND (exp, 0));
7022 else
7023 expand_computed_goto (TREE_OPERAND (exp, 0));
7024 return const0_rtx;
7025
7026 case EXIT_EXPR:
7027 expand_exit_loop_if_false (NULL,
7028 invert_truthvalue (TREE_OPERAND (exp, 0)));
7029 return const0_rtx;
7030
7031 case LABELED_BLOCK_EXPR:
7032 if (LABELED_BLOCK_BODY (exp))
7033 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
7034 /* Should perhaps use expand_label, but this is simpler and safer. */
7035 do_pending_stack_adjust ();
7036 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
7037 return const0_rtx;
7038
7039 case EXIT_BLOCK_EXPR:
7040 if (EXIT_BLOCK_RETURN (exp))
7041 sorry ("returned value in block_exit_expr");
7042 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
7043 return const0_rtx;
7044
7045 case LOOP_EXPR:
7046 push_temp_slots ();
7047 expand_start_loop (1);
7048 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
7049 expand_end_loop ();
7050 pop_temp_slots ();
7051
7052 return const0_rtx;
7053
7054 case BIND_EXPR:
7055 {
7056 tree vars = TREE_OPERAND (exp, 0);
7057
7058 /* Need to open a binding contour here because
7059 if there are any cleanups they must be contained here. */
7060 expand_start_bindings (2);
7061
7062 /* Mark the corresponding BLOCK for output in its proper place. */
7063 if (TREE_OPERAND (exp, 2) != 0
7064 && ! TREE_USED (TREE_OPERAND (exp, 2)))
7065 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
7066
7067 /* If VARS have not yet been expanded, expand them now. */
7068 while (vars)
7069 {
7070 if (!DECL_RTL_SET_P (vars))
7071 expand_decl (vars);
7072 expand_decl_init (vars);
7073 vars = TREE_CHAIN (vars);
7074 }
7075
7076 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
7077
7078 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7079
7080 return temp;
7081 }
7082
7083 case RTL_EXPR:
7084 if (RTL_EXPR_SEQUENCE (exp))
7085 {
7086 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
7087 abort ();
7088 emit_insn (RTL_EXPR_SEQUENCE (exp));
7089 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
7090 }
7091 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
7092 free_temps_for_rtl_expr (exp);
7093 return RTL_EXPR_RTL (exp);
7094
7095 case CONSTRUCTOR:
7096 /* If we don't need the result, just ensure we evaluate any
7097 subexpressions. */
7098 if (ignore)
7099 {
7100 tree elt;
7101
7102 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
7103 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7104
7105 return const0_rtx;
7106 }
7107
7108 /* All elts simple constants => refer to a constant in memory. But
7109 if this is a non-BLKmode mode, let it store a field at a time
7110 since that should make a CONST_INT or CONST_DOUBLE when we
7111 fold. Likewise, if we have a target we can use, it is best to
7112 store directly into the target unless the type is large enough
7113 that memcpy will be used. If we are making an initializer and
7114 all operands are constant, put it in memory as well.
7115
7116 FIXME: Avoid trying to fill vector constructors piece-meal.
7117 Output them with output_constant_def below unless we're sure
7118 they're zeros. This should go away when vector initializers
7119 are treated like VECTOR_CST instead of arrays.
7120 */
7121 else if ((TREE_STATIC (exp)
7122 && ((mode == BLKmode
7123 && ! (target != 0 && safe_from_p (target, exp, 1)))
7124 || TREE_ADDRESSABLE (exp)
7125 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7126 && (! MOVE_BY_PIECES_P
7127 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7128 TYPE_ALIGN (type)))
7129 && ((TREE_CODE (type) == VECTOR_TYPE
7130 && !is_zeros_p (exp))
7131 || ! mostly_zeros_p (exp)))))
7132 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
7133 {
7134 rtx constructor = output_constant_def (exp, 1);
7135
7136 if (modifier != EXPAND_CONST_ADDRESS
7137 && modifier != EXPAND_INITIALIZER
7138 && modifier != EXPAND_SUM)
7139 constructor = validize_mem (constructor);
7140
7141 return constructor;
7142 }
7143 else
7144 {
7145 /* Handle calls that pass values in multiple non-contiguous
7146 locations. The Irix 6 ABI has examples of this. */
7147 if (target == 0 || ! safe_from_p (target, exp, 1)
7148 || GET_CODE (target) == PARALLEL
7149 || modifier == EXPAND_STACK_PARM)
7150 target
7151 = assign_temp (build_qualified_type (type,
7152 (TYPE_QUALS (type)
7153 | (TREE_READONLY (exp)
7154 * TYPE_QUAL_CONST))),
7155 0, TREE_ADDRESSABLE (exp), 1);
7156
7157 store_constructor (exp, target, 0, int_expr_size (exp));
7158 return target;
7159 }
7160
7161 case INDIRECT_REF:
7162 {
7163 tree exp1 = TREE_OPERAND (exp, 0);
7164 tree index;
7165 tree string = string_constant (exp1, &index);
7166
7167 /* Try to optimize reads from const strings. */
7168 if (string
7169 && TREE_CODE (string) == STRING_CST
7170 && TREE_CODE (index) == INTEGER_CST
7171 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7172 && GET_MODE_CLASS (mode) == MODE_INT
7173 && GET_MODE_SIZE (mode) == 1
7174 && modifier != EXPAND_WRITE)
7175 return gen_int_mode (TREE_STRING_POINTER (string)
7176 [TREE_INT_CST_LOW (index)], mode);
7177
7178 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7179 op0 = memory_address (mode, op0);
7180 temp = gen_rtx_MEM (mode, op0);
7181 set_mem_attributes (temp, exp, 0);
7182
7183 /* If we are writing to this object and its type is a record with
7184 readonly fields, we must mark it as readonly so it will
7185 conflict with readonly references to those fields. */
7186 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7187 RTX_UNCHANGING_P (temp) = 1;
7188
7189 return temp;
7190 }
7191
7192 case ARRAY_REF:
7193 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7194 abort ();
7195
7196 {
7197 tree array = TREE_OPERAND (exp, 0);
7198 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7199 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7200 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7201 HOST_WIDE_INT i;
7202
7203 /* Optimize the special-case of a zero lower bound.
7204
7205 We convert the low_bound to sizetype to avoid some problems
7206 with constant folding. (E.g. suppose the lower bound is 1,
7207 and its mode is QI. Without the conversion, (ARRAY
7208 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7209 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7210
7211 if (! integer_zerop (low_bound))
7212 index = size_diffop (index, convert (sizetype, low_bound));
7213
7214 /* Fold an expression like: "foo"[2].
7215 This is not done in fold so it won't happen inside &.
7216 Don't fold if this is for wide characters since it's too
7217 difficult to do correctly and this is a very rare case. */
7218
7219 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7220 && TREE_CODE (array) == STRING_CST
7221 && TREE_CODE (index) == INTEGER_CST
7222 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7223 && GET_MODE_CLASS (mode) == MODE_INT
7224 && GET_MODE_SIZE (mode) == 1)
7225 return gen_int_mode (TREE_STRING_POINTER (array)
7226 [TREE_INT_CST_LOW (index)], mode);
7227
7228 /* If this is a constant index into a constant array,
7229 just get the value from the array. Handle both the cases when
7230 we have an explicit constructor and when our operand is a variable
7231 that was declared const. */
7232
7233 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7234 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
7235 && TREE_CODE (index) == INTEGER_CST
7236 && 0 > compare_tree_int (index,
7237 list_length (CONSTRUCTOR_ELTS
7238 (TREE_OPERAND (exp, 0)))))
7239 {
7240 tree elem;
7241
7242 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7243 i = TREE_INT_CST_LOW (index);
7244 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7245 ;
7246
7247 if (elem)
7248 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7249 modifier);
7250 }
7251
7252 else if (optimize >= 1
7253 && modifier != EXPAND_CONST_ADDRESS
7254 && modifier != EXPAND_INITIALIZER
7255 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7256 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7257 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7258 {
7259 if (TREE_CODE (index) == INTEGER_CST)
7260 {
7261 tree init = DECL_INITIAL (array);
7262
7263 if (TREE_CODE (init) == CONSTRUCTOR)
7264 {
7265 tree elem;
7266
7267 for (elem = CONSTRUCTOR_ELTS (init);
7268 (elem
7269 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7270 elem = TREE_CHAIN (elem))
7271 ;
7272
7273 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7274 return expand_expr (fold (TREE_VALUE (elem)), target,
7275 tmode, modifier);
7276 }
7277 else if (TREE_CODE (init) == STRING_CST
7278 && 0 > compare_tree_int (index,
7279 TREE_STRING_LENGTH (init)))
7280 {
7281 tree type = TREE_TYPE (TREE_TYPE (init));
7282 enum machine_mode mode = TYPE_MODE (type);
7283
7284 if (GET_MODE_CLASS (mode) == MODE_INT
7285 && GET_MODE_SIZE (mode) == 1)
7286 return gen_int_mode (TREE_STRING_POINTER (init)
7287 [TREE_INT_CST_LOW (index)], mode);
7288 }
7289 }
7290 }
7291 }
7292 /* Fall through. */
7293
7294 case COMPONENT_REF:
7295 case BIT_FIELD_REF:
7296 case ARRAY_RANGE_REF:
7297 /* If the operand is a CONSTRUCTOR, we can just extract the
7298 appropriate field if it is present. Don't do this if we have
7299 already written the data since we want to refer to that copy
7300 and varasm.c assumes that's what we'll do. */
7301 if (code == COMPONENT_REF
7302 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
7303 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
7304 {
7305 tree elt;
7306
7307 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7308 elt = TREE_CHAIN (elt))
7309 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7310 /* We can normally use the value of the field in the
7311 CONSTRUCTOR. However, if this is a bitfield in
7312 an integral mode that we can fit in a HOST_WIDE_INT,
7313 we must mask only the number of bits in the bitfield,
7314 since this is done implicitly by the constructor. If
7315 the bitfield does not meet either of those conditions,
7316 we can't do this optimization. */
7317 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7318 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7319 == MODE_INT)
7320 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7321 <= HOST_BITS_PER_WIDE_INT))))
7322 {
7323 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7324 && modifier == EXPAND_STACK_PARM)
7325 target = 0;
7326 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7327 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7328 {
7329 HOST_WIDE_INT bitsize
7330 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7331 enum machine_mode imode
7332 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7333
7334 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7335 {
7336 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7337 op0 = expand_and (imode, op0, op1, target);
7338 }
7339 else
7340 {
7341 tree count
7342 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7343 0);
7344
7345 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7346 target, 0);
7347 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7348 target, 0);
7349 }
7350 }
7351
7352 return op0;
7353 }
7354 }
7355
7356 {
7357 enum machine_mode mode1;
7358 HOST_WIDE_INT bitsize, bitpos;
7359 tree offset;
7360 int volatilep = 0;
7361 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7362 &mode1, &unsignedp, &volatilep);
7363 rtx orig_op0;
7364
7365 /* If we got back the original object, something is wrong. Perhaps
7366 we are evaluating an expression too early. In any event, don't
7367 infinitely recurse. */
7368 if (tem == exp)
7369 abort ();
7370
7371 /* If TEM's type is a union of variable size, pass TARGET to the inner
7372 computation, since it will need a temporary and TARGET is known
7373 to have to do. This occurs in unchecked conversion in Ada. */
7374
7375 orig_op0 = op0
7376 = expand_expr (tem,
7377 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7378 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7379 != INTEGER_CST)
7380 && modifier != EXPAND_STACK_PARM
7381 ? target : NULL_RTX),
7382 VOIDmode,
7383 (modifier == EXPAND_INITIALIZER
7384 || modifier == EXPAND_CONST_ADDRESS
7385 || modifier == EXPAND_STACK_PARM)
7386 ? modifier : EXPAND_NORMAL);
7387
7388 /* If this is a constant, put it into a register if it is a
7389 legitimate constant and OFFSET is 0 and memory if it isn't. */
7390 if (CONSTANT_P (op0))
7391 {
7392 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7393 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7394 && offset == 0)
7395 op0 = force_reg (mode, op0);
7396 else
7397 op0 = validize_mem (force_const_mem (mode, op0));
7398 }
7399
7400 if (offset != 0)
7401 {
7402 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7403 EXPAND_SUM);
7404
7405 /* If this object is in a register, put it into memory.
7406 This case can't occur in C, but can in Ada if we have
7407 unchecked conversion of an expression from a scalar type to
7408 an array or record type. */
7409 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7410 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7411 {
7412 /* If the operand is a SAVE_EXPR, we can deal with this by
7413 forcing the SAVE_EXPR into memory. */
7414 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7415 {
7416 put_var_into_stack (TREE_OPERAND (exp, 0),
7417 /*rescan=*/true);
7418 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7419 }
7420 else
7421 {
7422 tree nt
7423 = build_qualified_type (TREE_TYPE (tem),
7424 (TYPE_QUALS (TREE_TYPE (tem))
7425 | TYPE_QUAL_CONST));
7426 rtx memloc = assign_temp (nt, 1, 1, 1);
7427
7428 emit_move_insn (memloc, op0);
7429 op0 = memloc;
7430 }
7431 }
7432
7433 if (GET_CODE (op0) != MEM)
7434 abort ();
7435
7436 #ifdef POINTERS_EXTEND_UNSIGNED
7437 if (GET_MODE (offset_rtx) != Pmode)
7438 offset_rtx = convert_memory_address (Pmode, offset_rtx);
7439 #else
7440 if (GET_MODE (offset_rtx) != ptr_mode)
7441 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7442 #endif
7443
7444 /* A constant address in OP0 can have VOIDmode, we must not try
7445 to call force_reg for that case. Avoid that case. */
7446 if (GET_CODE (op0) == MEM
7447 && GET_MODE (op0) == BLKmode
7448 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7449 && bitsize != 0
7450 && (bitpos % bitsize) == 0
7451 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7452 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7453 {
7454 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7455 bitpos = 0;
7456 }
7457
7458 op0 = offset_address (op0, offset_rtx,
7459 highest_pow2_factor (offset));
7460 }
7461
7462 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7463 record its alignment as BIGGEST_ALIGNMENT. */
7464 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7465 && is_aligning_offset (offset, tem))
7466 set_mem_align (op0, BIGGEST_ALIGNMENT);
7467
7468 /* Don't forget about volatility even if this is a bitfield. */
7469 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7470 {
7471 if (op0 == orig_op0)
7472 op0 = copy_rtx (op0);
7473
7474 MEM_VOLATILE_P (op0) = 1;
7475 }
7476
7477 /* The following code doesn't handle CONCAT.
7478 Assume only bitpos == 0 can be used for CONCAT, due to
7479 one element arrays having the same mode as its element. */
7480 if (GET_CODE (op0) == CONCAT)
7481 {
7482 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7483 abort ();
7484 return op0;
7485 }
7486
7487 /* In cases where an aligned union has an unaligned object
7488 as a field, we might be extracting a BLKmode value from
7489 an integer-mode (e.g., SImode) object. Handle this case
7490 by doing the extract into an object as wide as the field
7491 (which we know to be the width of a basic mode), then
7492 storing into memory, and changing the mode to BLKmode. */
7493 if (mode1 == VOIDmode
7494 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7495 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7496 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7497 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7498 && modifier != EXPAND_CONST_ADDRESS
7499 && modifier != EXPAND_INITIALIZER)
7500 /* If the field isn't aligned enough to fetch as a memref,
7501 fetch it as a bit field. */
7502 || (mode1 != BLKmode
7503 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7504 && ((TYPE_ALIGN (TREE_TYPE (tem))
7505 < GET_MODE_ALIGNMENT (mode))
7506 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7507 /* If the type and the field are a constant size and the
7508 size of the type isn't the same size as the bitfield,
7509 we must use bitfield operations. */
7510 || (bitsize >= 0
7511 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7512 == INTEGER_CST)
7513 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7514 bitsize)))
7515 {
7516 enum machine_mode ext_mode = mode;
7517
7518 if (ext_mode == BLKmode
7519 && ! (target != 0 && GET_CODE (op0) == MEM
7520 && GET_CODE (target) == MEM
7521 && bitpos % BITS_PER_UNIT == 0))
7522 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7523
7524 if (ext_mode == BLKmode)
7525 {
7526 /* In this case, BITPOS must start at a byte boundary and
7527 TARGET, if specified, must be a MEM. */
7528 if (GET_CODE (op0) != MEM
7529 || (target != 0 && GET_CODE (target) != MEM)
7530 || bitpos % BITS_PER_UNIT != 0)
7531 abort ();
7532
7533 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7534 if (target == 0)
7535 target = assign_temp (type, 0, 1, 1);
7536
7537 emit_block_move (target, op0,
7538 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7539 / BITS_PER_UNIT),
7540 (modifier == EXPAND_STACK_PARM
7541 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7542
7543 return target;
7544 }
7545
7546 op0 = validize_mem (op0);
7547
7548 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7549 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7550
7551 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7552 (modifier == EXPAND_STACK_PARM
7553 ? NULL_RTX : target),
7554 ext_mode, ext_mode,
7555 int_size_in_bytes (TREE_TYPE (tem)));
7556
7557 /* If the result is a record type and BITSIZE is narrower than
7558 the mode of OP0, an integral mode, and this is a big endian
7559 machine, we must put the field into the high-order bits. */
7560 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7561 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7562 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7563 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7564 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7565 - bitsize),
7566 op0, 1);
7567
7568 if (mode == BLKmode)
7569 {
7570 rtx new = assign_temp (build_qualified_type
7571 ((*lang_hooks.types.type_for_mode)
7572 (ext_mode, 0),
7573 TYPE_QUAL_CONST), 0, 1, 1);
7574
7575 emit_move_insn (new, op0);
7576 op0 = copy_rtx (new);
7577 PUT_MODE (op0, BLKmode);
7578 set_mem_attributes (op0, exp, 1);
7579 }
7580
7581 return op0;
7582 }
7583
7584 /* If the result is BLKmode, use that to access the object
7585 now as well. */
7586 if (mode == BLKmode)
7587 mode1 = BLKmode;
7588
7589 /* Get a reference to just this component. */
7590 if (modifier == EXPAND_CONST_ADDRESS
7591 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7592 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7593 else
7594 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7595
7596 if (op0 == orig_op0)
7597 op0 = copy_rtx (op0);
7598
7599 set_mem_attributes (op0, exp, 0);
7600 if (GET_CODE (XEXP (op0, 0)) == REG)
7601 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7602
7603 MEM_VOLATILE_P (op0) |= volatilep;
7604 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7605 || modifier == EXPAND_CONST_ADDRESS
7606 || modifier == EXPAND_INITIALIZER)
7607 return op0;
7608 else if (target == 0)
7609 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7610
7611 convert_move (target, op0, unsignedp);
7612 return target;
7613 }
7614
7615 case VTABLE_REF:
7616 {
7617 rtx insn, before = get_last_insn (), vtbl_ref;
7618
7619 /* Evaluate the interior expression. */
7620 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7621 tmode, modifier);
7622
7623 /* Get or create an instruction off which to hang a note. */
7624 if (REG_P (subtarget))
7625 {
7626 target = subtarget;
7627 insn = get_last_insn ();
7628 if (insn == before)
7629 abort ();
7630 if (! INSN_P (insn))
7631 insn = prev_nonnote_insn (insn);
7632 }
7633 else
7634 {
7635 target = gen_reg_rtx (GET_MODE (subtarget));
7636 insn = emit_move_insn (target, subtarget);
7637 }
7638
7639 /* Collect the data for the note. */
7640 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7641 vtbl_ref = plus_constant (vtbl_ref,
7642 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7643 /* Discard the initial CONST that was added. */
7644 vtbl_ref = XEXP (vtbl_ref, 0);
7645
7646 REG_NOTES (insn)
7647 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7648
7649 return target;
7650 }
7651
7652 /* Intended for a reference to a buffer of a file-object in Pascal.
7653 But it's not certain that a special tree code will really be
7654 necessary for these. INDIRECT_REF might work for them. */
7655 case BUFFER_REF:
7656 abort ();
7657
7658 case IN_EXPR:
7659 {
7660 /* Pascal set IN expression.
7661
7662 Algorithm:
7663 rlo = set_low - (set_low%bits_per_word);
7664 the_word = set [ (index - rlo)/bits_per_word ];
7665 bit_index = index % bits_per_word;
7666 bitmask = 1 << bit_index;
7667 return !!(the_word & bitmask); */
7668
7669 tree set = TREE_OPERAND (exp, 0);
7670 tree index = TREE_OPERAND (exp, 1);
7671 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7672 tree set_type = TREE_TYPE (set);
7673 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7674 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7675 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7676 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7677 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7678 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7679 rtx setaddr = XEXP (setval, 0);
7680 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7681 rtx rlow;
7682 rtx diff, quo, rem, addr, bit, result;
7683
7684 /* If domain is empty, answer is no. Likewise if index is constant
7685 and out of bounds. */
7686 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7687 && TREE_CODE (set_low_bound) == INTEGER_CST
7688 && tree_int_cst_lt (set_high_bound, set_low_bound))
7689 || (TREE_CODE (index) == INTEGER_CST
7690 && TREE_CODE (set_low_bound) == INTEGER_CST
7691 && tree_int_cst_lt (index, set_low_bound))
7692 || (TREE_CODE (set_high_bound) == INTEGER_CST
7693 && TREE_CODE (index) == INTEGER_CST
7694 && tree_int_cst_lt (set_high_bound, index))))
7695 return const0_rtx;
7696
7697 if (target == 0)
7698 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7699
7700 /* If we get here, we have to generate the code for both cases
7701 (in range and out of range). */
7702
7703 op0 = gen_label_rtx ();
7704 op1 = gen_label_rtx ();
7705
7706 if (! (GET_CODE (index_val) == CONST_INT
7707 && GET_CODE (lo_r) == CONST_INT))
7708 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7709 GET_MODE (index_val), iunsignedp, op1);
7710
7711 if (! (GET_CODE (index_val) == CONST_INT
7712 && GET_CODE (hi_r) == CONST_INT))
7713 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7714 GET_MODE (index_val), iunsignedp, op1);
7715
7716 /* Calculate the element number of bit zero in the first word
7717 of the set. */
7718 if (GET_CODE (lo_r) == CONST_INT)
7719 rlow = GEN_INT (INTVAL (lo_r)
7720 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7721 else
7722 rlow = expand_binop (index_mode, and_optab, lo_r,
7723 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7724 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7725
7726 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7727 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7728
7729 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7730 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7731 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7732 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7733
7734 addr = memory_address (byte_mode,
7735 expand_binop (index_mode, add_optab, diff,
7736 setaddr, NULL_RTX, iunsignedp,
7737 OPTAB_LIB_WIDEN));
7738
7739 /* Extract the bit we want to examine. */
7740 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7741 gen_rtx_MEM (byte_mode, addr),
7742 make_tree (TREE_TYPE (index), rem),
7743 NULL_RTX, 1);
7744 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7745 GET_MODE (target) == byte_mode ? target : 0,
7746 1, OPTAB_LIB_WIDEN);
7747
7748 if (result != target)
7749 convert_move (target, result, 1);
7750
7751 /* Output the code to handle the out-of-range case. */
7752 emit_jump (op0);
7753 emit_label (op1);
7754 emit_move_insn (target, const0_rtx);
7755 emit_label (op0);
7756 return target;
7757 }
7758
7759 case WITH_CLEANUP_EXPR:
7760 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7761 {
7762 WITH_CLEANUP_EXPR_RTL (exp)
7763 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7764 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7765 CLEANUP_EH_ONLY (exp));
7766
7767 /* That's it for this cleanup. */
7768 TREE_OPERAND (exp, 1) = 0;
7769 }
7770 return WITH_CLEANUP_EXPR_RTL (exp);
7771
7772 case CLEANUP_POINT_EXPR:
7773 {
7774 /* Start a new binding layer that will keep track of all cleanup
7775 actions to be performed. */
7776 expand_start_bindings (2);
7777
7778 target_temp_slot_level = temp_slot_level;
7779
7780 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7781 /* If we're going to use this value, load it up now. */
7782 if (! ignore)
7783 op0 = force_not_mem (op0);
7784 preserve_temp_slots (op0);
7785 expand_end_bindings (NULL_TREE, 0, 0);
7786 }
7787 return op0;
7788
7789 case CALL_EXPR:
7790 /* Check for a built-in function. */
7791 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7792 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7793 == FUNCTION_DECL)
7794 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7795 {
7796 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7797 == BUILT_IN_FRONTEND)
7798 return (*lang_hooks.expand_expr) (exp, original_target,
7799 tmode, modifier);
7800 else
7801 return expand_builtin (exp, target, subtarget, tmode, ignore);
7802 }
7803
7804 return expand_call (exp, target, ignore);
7805
7806 case NON_LVALUE_EXPR:
7807 case NOP_EXPR:
7808 case CONVERT_EXPR:
7809 case REFERENCE_EXPR:
7810 if (TREE_OPERAND (exp, 0) == error_mark_node)
7811 return const0_rtx;
7812
7813 if (TREE_CODE (type) == UNION_TYPE)
7814 {
7815 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7816
7817 /* If both input and output are BLKmode, this conversion isn't doing
7818 anything except possibly changing memory attribute. */
7819 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7820 {
7821 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7822 modifier);
7823
7824 result = copy_rtx (result);
7825 set_mem_attributes (result, exp, 0);
7826 return result;
7827 }
7828
7829 if (target == 0)
7830 target = assign_temp (type, 0, 1, 1);
7831
7832 if (GET_CODE (target) == MEM)
7833 /* Store data into beginning of memory target. */
7834 store_expr (TREE_OPERAND (exp, 0),
7835 adjust_address (target, TYPE_MODE (valtype), 0),
7836 modifier == EXPAND_STACK_PARM ? 2 : 0);
7837
7838 else if (GET_CODE (target) == REG)
7839 /* Store this field into a union of the proper type. */
7840 store_field (target,
7841 MIN ((int_size_in_bytes (TREE_TYPE
7842 (TREE_OPERAND (exp, 0)))
7843 * BITS_PER_UNIT),
7844 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7845 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7846 VOIDmode, 0, type, 0);
7847 else
7848 abort ();
7849
7850 /* Return the entire union. */
7851 return target;
7852 }
7853
7854 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7855 {
7856 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7857 modifier);
7858
7859 /* If the signedness of the conversion differs and OP0 is
7860 a promoted SUBREG, clear that indication since we now
7861 have to do the proper extension. */
7862 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7863 && GET_CODE (op0) == SUBREG)
7864 SUBREG_PROMOTED_VAR_P (op0) = 0;
7865
7866 return op0;
7867 }
7868
7869 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7870 if (GET_MODE (op0) == mode)
7871 return op0;
7872
7873 /* If OP0 is a constant, just convert it into the proper mode. */
7874 if (CONSTANT_P (op0))
7875 {
7876 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7877 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7878
7879 if (modifier == EXPAND_INITIALIZER)
7880 return simplify_gen_subreg (mode, op0, inner_mode,
7881 subreg_lowpart_offset (mode,
7882 inner_mode));
7883 else
7884 return convert_modes (mode, inner_mode, op0,
7885 TREE_UNSIGNED (inner_type));
7886 }
7887
7888 if (modifier == EXPAND_INITIALIZER)
7889 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7890
7891 if (target == 0)
7892 return
7893 convert_to_mode (mode, op0,
7894 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7895 else
7896 convert_move (target, op0,
7897 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7898 return target;
7899
7900 case VIEW_CONVERT_EXPR:
7901 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7902
7903 /* If the input and output modes are both the same, we are done.
7904 Otherwise, if neither mode is BLKmode and both are within a word, we
7905 can use gen_lowpart. If neither is true, make sure the operand is
7906 in memory and convert the MEM to the new mode. */
7907 if (TYPE_MODE (type) == GET_MODE (op0))
7908 ;
7909 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7910 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7911 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7912 op0 = gen_lowpart (TYPE_MODE (type), op0);
7913 else if (GET_CODE (op0) != MEM)
7914 {
7915 /* If the operand is not a MEM, force it into memory. Since we
7916 are going to be be changing the mode of the MEM, don't call
7917 force_const_mem for constants because we don't allow pool
7918 constants to change mode. */
7919 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7920
7921 if (TREE_ADDRESSABLE (exp))
7922 abort ();
7923
7924 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7925 target
7926 = assign_stack_temp_for_type
7927 (TYPE_MODE (inner_type),
7928 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7929
7930 emit_move_insn (target, op0);
7931 op0 = target;
7932 }
7933
7934 /* At this point, OP0 is in the correct mode. If the output type is such
7935 that the operand is known to be aligned, indicate that it is.
7936 Otherwise, we need only be concerned about alignment for non-BLKmode
7937 results. */
7938 if (GET_CODE (op0) == MEM)
7939 {
7940 op0 = copy_rtx (op0);
7941
7942 if (TYPE_ALIGN_OK (type))
7943 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7944 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7945 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7946 {
7947 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7948 HOST_WIDE_INT temp_size
7949 = MAX (int_size_in_bytes (inner_type),
7950 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7951 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7952 temp_size, 0, type);
7953 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7954
7955 if (TREE_ADDRESSABLE (exp))
7956 abort ();
7957
7958 if (GET_MODE (op0) == BLKmode)
7959 emit_block_move (new_with_op0_mode, op0,
7960 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7961 (modifier == EXPAND_STACK_PARM
7962 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7963 else
7964 emit_move_insn (new_with_op0_mode, op0);
7965
7966 op0 = new;
7967 }
7968
7969 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7970 }
7971
7972 return op0;
7973
7974 case PLUS_EXPR:
7975 this_optab = ! unsignedp && flag_trapv
7976 && (GET_MODE_CLASS (mode) == MODE_INT)
7977 ? addv_optab : add_optab;
7978
7979 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7980 something else, make sure we add the register to the constant and
7981 then to the other thing. This case can occur during strength
7982 reduction and doing it this way will produce better code if the
7983 frame pointer or argument pointer is eliminated.
7984
7985 fold-const.c will ensure that the constant is always in the inner
7986 PLUS_EXPR, so the only case we need to do anything about is if
7987 sp, ap, or fp is our second argument, in which case we must swap
7988 the innermost first argument and our second argument. */
7989
7990 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7991 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7992 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7993 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7994 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7995 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7996 {
7997 tree t = TREE_OPERAND (exp, 1);
7998
7999 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8000 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8001 }
8002
8003 /* If the result is to be ptr_mode and we are adding an integer to
8004 something, we might be forming a constant. So try to use
8005 plus_constant. If it produces a sum and we can't accept it,
8006 use force_operand. This allows P = &ARR[const] to generate
8007 efficient code on machines where a SYMBOL_REF is not a valid
8008 address.
8009
8010 If this is an EXPAND_SUM call, always return the sum. */
8011 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8012 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8013 {
8014 if (modifier == EXPAND_STACK_PARM)
8015 target = 0;
8016 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8017 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8018 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8019 {
8020 rtx constant_part;
8021
8022 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8023 EXPAND_SUM);
8024 /* Use immed_double_const to ensure that the constant is
8025 truncated according to the mode of OP1, then sign extended
8026 to a HOST_WIDE_INT. Using the constant directly can result
8027 in non-canonical RTL in a 64x32 cross compile. */
8028 constant_part
8029 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8030 (HOST_WIDE_INT) 0,
8031 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8032 op1 = plus_constant (op1, INTVAL (constant_part));
8033 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8034 op1 = force_operand (op1, target);
8035 return op1;
8036 }
8037
8038 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8039 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
8040 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8041 {
8042 rtx constant_part;
8043
8044 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8045 (modifier == EXPAND_INITIALIZER
8046 ? EXPAND_INITIALIZER : EXPAND_SUM));
8047 if (! CONSTANT_P (op0))
8048 {
8049 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8050 VOIDmode, modifier);
8051 /* Don't go to both_summands if modifier
8052 says it's not right to return a PLUS. */
8053 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8054 goto binop2;
8055 goto both_summands;
8056 }
8057 /* Use immed_double_const to ensure that the constant is
8058 truncated according to the mode of OP1, then sign extended
8059 to a HOST_WIDE_INT. Using the constant directly can result
8060 in non-canonical RTL in a 64x32 cross compile. */
8061 constant_part
8062 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8063 (HOST_WIDE_INT) 0,
8064 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8065 op0 = plus_constant (op0, INTVAL (constant_part));
8066 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8067 op0 = force_operand (op0, target);
8068 return op0;
8069 }
8070 }
8071
8072 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8073 subtarget = 0;
8074
8075 /* No sense saving up arithmetic to be done
8076 if it's all in the wrong mode to form part of an address.
8077 And force_operand won't know whether to sign-extend or
8078 zero-extend. */
8079 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8080 || mode != ptr_mode)
8081 {
8082 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8083 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8084 if (op0 == const0_rtx)
8085 return op1;
8086 if (op1 == const0_rtx)
8087 return op0;
8088 goto binop2;
8089 }
8090
8091 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8092 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8093
8094 /* We come here from MINUS_EXPR when the second operand is a
8095 constant. */
8096 both_summands:
8097 /* Make sure any term that's a sum with a constant comes last. */
8098 if (GET_CODE (op0) == PLUS
8099 && CONSTANT_P (XEXP (op0, 1)))
8100 {
8101 temp = op0;
8102 op0 = op1;
8103 op1 = temp;
8104 }
8105 /* If adding to a sum including a constant,
8106 associate it to put the constant outside. */
8107 if (GET_CODE (op1) == PLUS
8108 && CONSTANT_P (XEXP (op1, 1)))
8109 {
8110 rtx constant_term = const0_rtx;
8111
8112 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
8113 if (temp != 0)
8114 op0 = temp;
8115 /* Ensure that MULT comes first if there is one. */
8116 else if (GET_CODE (op0) == MULT)
8117 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
8118 else
8119 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
8120
8121 /* Let's also eliminate constants from op0 if possible. */
8122 op0 = eliminate_constant_term (op0, &constant_term);
8123
8124 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8125 their sum should be a constant. Form it into OP1, since the
8126 result we want will then be OP0 + OP1. */
8127
8128 temp = simplify_binary_operation (PLUS, mode, constant_term,
8129 XEXP (op1, 1));
8130 if (temp != 0)
8131 op1 = temp;
8132 else
8133 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
8134 }
8135
8136 /* Put a constant term last and put a multiplication first. */
8137 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
8138 temp = op1, op1 = op0, op0 = temp;
8139
8140 temp = simplify_binary_operation (PLUS, mode, op0, op1);
8141 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
8142
8143 case MINUS_EXPR:
8144 /* For initializers, we are allowed to return a MINUS of two
8145 symbolic constants. Here we handle all cases when both operands
8146 are constant. */
8147 /* Handle difference of two symbolic constants,
8148 for the sake of an initializer. */
8149 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8150 && really_constant_p (TREE_OPERAND (exp, 0))
8151 && really_constant_p (TREE_OPERAND (exp, 1)))
8152 {
8153 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
8154 modifier);
8155 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8156 modifier);
8157
8158 /* If the last operand is a CONST_INT, use plus_constant of
8159 the negated constant. Else make the MINUS. */
8160 if (GET_CODE (op1) == CONST_INT)
8161 return plus_constant (op0, - INTVAL (op1));
8162 else
8163 return gen_rtx_MINUS (mode, op0, op1);
8164 }
8165
8166 this_optab = ! unsignedp && flag_trapv
8167 && (GET_MODE_CLASS(mode) == MODE_INT)
8168 ? subv_optab : sub_optab;
8169
8170 /* No sense saving up arithmetic to be done
8171 if it's all in the wrong mode to form part of an address.
8172 And force_operand won't know whether to sign-extend or
8173 zero-extend. */
8174 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8175 || mode != ptr_mode)
8176 goto binop;
8177
8178 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8179 subtarget = 0;
8180
8181 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8182 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8183
8184 /* Convert A - const to A + (-const). */
8185 if (GET_CODE (op1) == CONST_INT)
8186 {
8187 op1 = negate_rtx (mode, op1);
8188 goto both_summands;
8189 }
8190
8191 goto binop2;
8192
8193 case MULT_EXPR:
8194 /* If first operand is constant, swap them.
8195 Thus the following special case checks need only
8196 check the second operand. */
8197 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8198 {
8199 tree t1 = TREE_OPERAND (exp, 0);
8200 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8201 TREE_OPERAND (exp, 1) = t1;
8202 }
8203
8204 /* Attempt to return something suitable for generating an
8205 indexed address, for machines that support that. */
8206
8207 if (modifier == EXPAND_SUM && mode == ptr_mode
8208 && host_integerp (TREE_OPERAND (exp, 1), 0))
8209 {
8210 tree exp1 = TREE_OPERAND (exp, 1);
8211
8212 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8213 EXPAND_SUM);
8214
8215 /* If we knew for certain that this is arithmetic for an array
8216 reference, and we knew the bounds of the array, then we could
8217 apply the distributive law across (PLUS X C) for constant C.
8218 Without such knowledge, we risk overflowing the computation
8219 when both X and C are large, but X+C isn't. */
8220 /* ??? Could perhaps special-case EXP being unsigned and C being
8221 positive. In that case we are certain that X+C is no smaller
8222 than X and so the transformed expression will overflow iff the
8223 original would have. */
8224
8225 if (GET_CODE (op0) != REG)
8226 op0 = force_operand (op0, NULL_RTX);
8227 if (GET_CODE (op0) != REG)
8228 op0 = copy_to_mode_reg (mode, op0);
8229
8230 return gen_rtx_MULT (mode, op0,
8231 gen_int_mode (tree_low_cst (exp1, 0),
8232 TYPE_MODE (TREE_TYPE (exp1))));
8233 }
8234
8235 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8236 subtarget = 0;
8237
8238 if (modifier == EXPAND_STACK_PARM)
8239 target = 0;
8240
8241 /* Check for multiplying things that have been extended
8242 from a narrower type. If this machine supports multiplying
8243 in that narrower type with a result in the desired type,
8244 do it that way, and avoid the explicit type-conversion. */
8245 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8246 && TREE_CODE (type) == INTEGER_TYPE
8247 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8248 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8249 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8250 && int_fits_type_p (TREE_OPERAND (exp, 1),
8251 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8252 /* Don't use a widening multiply if a shift will do. */
8253 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8254 > HOST_BITS_PER_WIDE_INT)
8255 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8256 ||
8257 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8258 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8259 ==
8260 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8261 /* If both operands are extended, they must either both
8262 be zero-extended or both be sign-extended. */
8263 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8264 ==
8265 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8266 {
8267 enum machine_mode innermode
8268 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8269 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8270 ? smul_widen_optab : umul_widen_optab);
8271 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8272 ? umul_widen_optab : smul_widen_optab);
8273 if (mode == GET_MODE_WIDER_MODE (innermode))
8274 {
8275 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8276 {
8277 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8278 NULL_RTX, VOIDmode, 0);
8279 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8280 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8281 VOIDmode, 0);
8282 else
8283 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8284 NULL_RTX, VOIDmode, 0);
8285 goto binop2;
8286 }
8287 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8288 && innermode == word_mode)
8289 {
8290 rtx htem;
8291 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8292 NULL_RTX, VOIDmode, 0);
8293 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8294 op1 = convert_modes (innermode, mode,
8295 expand_expr (TREE_OPERAND (exp, 1),
8296 NULL_RTX, VOIDmode, 0),
8297 unsignedp);
8298 else
8299 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8300 NULL_RTX, VOIDmode, 0);
8301 temp = expand_binop (mode, other_optab, op0, op1, target,
8302 unsignedp, OPTAB_LIB_WIDEN);
8303 htem = expand_mult_highpart_adjust (innermode,
8304 gen_highpart (innermode, temp),
8305 op0, op1,
8306 gen_highpart (innermode, temp),
8307 unsignedp);
8308 emit_move_insn (gen_highpart (innermode, temp), htem);
8309 return temp;
8310 }
8311 }
8312 }
8313 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8314 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8315 return expand_mult (mode, op0, op1, target, unsignedp);
8316
8317 case TRUNC_DIV_EXPR:
8318 case FLOOR_DIV_EXPR:
8319 case CEIL_DIV_EXPR:
8320 case ROUND_DIV_EXPR:
8321 case EXACT_DIV_EXPR:
8322 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8323 subtarget = 0;
8324 if (modifier == EXPAND_STACK_PARM)
8325 target = 0;
8326 /* Possible optimization: compute the dividend with EXPAND_SUM
8327 then if the divisor is constant can optimize the case
8328 where some terms of the dividend have coeffs divisible by it. */
8329 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8330 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8331 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8332
8333 case RDIV_EXPR:
8334 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8335 expensive divide. If not, combine will rebuild the original
8336 computation. */
8337 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8338 && TREE_CODE (type) == REAL_TYPE
8339 && !real_onep (TREE_OPERAND (exp, 0)))
8340 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8341 build (RDIV_EXPR, type,
8342 build_real (type, dconst1),
8343 TREE_OPERAND (exp, 1))),
8344 target, tmode, modifier);
8345 this_optab = sdiv_optab;
8346 goto binop;
8347
8348 case TRUNC_MOD_EXPR:
8349 case FLOOR_MOD_EXPR:
8350 case CEIL_MOD_EXPR:
8351 case ROUND_MOD_EXPR:
8352 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8353 subtarget = 0;
8354 if (modifier == EXPAND_STACK_PARM)
8355 target = 0;
8356 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8357 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8358 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8359
8360 case FIX_ROUND_EXPR:
8361 case FIX_FLOOR_EXPR:
8362 case FIX_CEIL_EXPR:
8363 abort (); /* Not used for C. */
8364
8365 case FIX_TRUNC_EXPR:
8366 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8367 if (target == 0 || modifier == EXPAND_STACK_PARM)
8368 target = gen_reg_rtx (mode);
8369 expand_fix (target, op0, unsignedp);
8370 return target;
8371
8372 case FLOAT_EXPR:
8373 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8374 if (target == 0 || modifier == EXPAND_STACK_PARM)
8375 target = gen_reg_rtx (mode);
8376 /* expand_float can't figure out what to do if FROM has VOIDmode.
8377 So give it the correct mode. With -O, cse will optimize this. */
8378 if (GET_MODE (op0) == VOIDmode)
8379 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8380 op0);
8381 expand_float (target, op0,
8382 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8383 return target;
8384
8385 case NEGATE_EXPR:
8386 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8387 if (modifier == EXPAND_STACK_PARM)
8388 target = 0;
8389 temp = expand_unop (mode,
8390 ! unsignedp && flag_trapv
8391 && (GET_MODE_CLASS(mode) == MODE_INT)
8392 ? negv_optab : neg_optab, op0, target, 0);
8393 if (temp == 0)
8394 abort ();
8395 return temp;
8396
8397 case ABS_EXPR:
8398 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8399 if (modifier == EXPAND_STACK_PARM)
8400 target = 0;
8401
8402 /* Handle complex values specially. */
8403 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8404 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8405 return expand_complex_abs (mode, op0, target, unsignedp);
8406
8407 /* Unsigned abs is simply the operand. Testing here means we don't
8408 risk generating incorrect code below. */
8409 if (TREE_UNSIGNED (type))
8410 return op0;
8411
8412 return expand_abs (mode, op0, target, unsignedp,
8413 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8414
8415 case MAX_EXPR:
8416 case MIN_EXPR:
8417 target = original_target;
8418 if (target == 0
8419 || modifier == EXPAND_STACK_PARM
8420 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8421 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8422 || GET_MODE (target) != mode
8423 || (GET_CODE (target) == REG
8424 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8425 target = gen_reg_rtx (mode);
8426 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8427 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8428
8429 /* First try to do it with a special MIN or MAX instruction.
8430 If that does not win, use a conditional jump to select the proper
8431 value. */
8432 this_optab = (TREE_UNSIGNED (type)
8433 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8434 : (code == MIN_EXPR ? smin_optab : smax_optab));
8435
8436 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8437 OPTAB_WIDEN);
8438 if (temp != 0)
8439 return temp;
8440
8441 /* At this point, a MEM target is no longer useful; we will get better
8442 code without it. */
8443
8444 if (GET_CODE (target) == MEM)
8445 target = gen_reg_rtx (mode);
8446
8447 if (target != op0)
8448 emit_move_insn (target, op0);
8449
8450 op0 = gen_label_rtx ();
8451
8452 /* If this mode is an integer too wide to compare properly,
8453 compare word by word. Rely on cse to optimize constant cases. */
8454 if (GET_MODE_CLASS (mode) == MODE_INT
8455 && ! can_compare_p (GE, mode, ccp_jump))
8456 {
8457 if (code == MAX_EXPR)
8458 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8459 target, op1, NULL_RTX, op0);
8460 else
8461 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8462 op1, target, NULL_RTX, op0);
8463 }
8464 else
8465 {
8466 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8467 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8468 unsignedp, mode, NULL_RTX, NULL_RTX,
8469 op0);
8470 }
8471 emit_move_insn (target, op1);
8472 emit_label (op0);
8473 return target;
8474
8475 case BIT_NOT_EXPR:
8476 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8477 if (modifier == EXPAND_STACK_PARM)
8478 target = 0;
8479 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8480 if (temp == 0)
8481 abort ();
8482 return temp;
8483
8484 case FFS_EXPR:
8485 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8486 if (modifier == EXPAND_STACK_PARM)
8487 target = 0;
8488 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8489 if (temp == 0)
8490 abort ();
8491 return temp;
8492
8493 case CLZ_EXPR:
8494 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8495 temp = expand_unop (mode, clz_optab, op0, target, 1);
8496 if (temp == 0)
8497 abort ();
8498 return temp;
8499
8500 case CTZ_EXPR:
8501 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8502 temp = expand_unop (mode, ctz_optab, op0, target, 1);
8503 if (temp == 0)
8504 abort ();
8505 return temp;
8506
8507 case POPCOUNT_EXPR:
8508 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8509 temp = expand_unop (mode, popcount_optab, op0, target, 1);
8510 if (temp == 0)
8511 abort ();
8512 return temp;
8513
8514 case PARITY_EXPR:
8515 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8516 temp = expand_unop (mode, parity_optab, op0, target, 1);
8517 if (temp == 0)
8518 abort ();
8519 return temp;
8520
8521 /* ??? Can optimize bitwise operations with one arg constant.
8522 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8523 and (a bitwise1 b) bitwise2 b (etc)
8524 but that is probably not worth while. */
8525
8526 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8527 boolean values when we want in all cases to compute both of them. In
8528 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8529 as actual zero-or-1 values and then bitwise anding. In cases where
8530 there cannot be any side effects, better code would be made by
8531 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8532 how to recognize those cases. */
8533
8534 case TRUTH_AND_EXPR:
8535 case BIT_AND_EXPR:
8536 this_optab = and_optab;
8537 goto binop;
8538
8539 case TRUTH_OR_EXPR:
8540 case BIT_IOR_EXPR:
8541 this_optab = ior_optab;
8542 goto binop;
8543
8544 case TRUTH_XOR_EXPR:
8545 case BIT_XOR_EXPR:
8546 this_optab = xor_optab;
8547 goto binop;
8548
8549 case LSHIFT_EXPR:
8550 case RSHIFT_EXPR:
8551 case LROTATE_EXPR:
8552 case RROTATE_EXPR:
8553 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8554 subtarget = 0;
8555 if (modifier == EXPAND_STACK_PARM)
8556 target = 0;
8557 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8558 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8559 unsignedp);
8560
8561 /* Could determine the answer when only additive constants differ. Also,
8562 the addition of one can be handled by changing the condition. */
8563 case LT_EXPR:
8564 case LE_EXPR:
8565 case GT_EXPR:
8566 case GE_EXPR:
8567 case EQ_EXPR:
8568 case NE_EXPR:
8569 case UNORDERED_EXPR:
8570 case ORDERED_EXPR:
8571 case UNLT_EXPR:
8572 case UNLE_EXPR:
8573 case UNGT_EXPR:
8574 case UNGE_EXPR:
8575 case UNEQ_EXPR:
8576 temp = do_store_flag (exp,
8577 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8578 tmode != VOIDmode ? tmode : mode, 0);
8579 if (temp != 0)
8580 return temp;
8581
8582 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8583 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8584 && original_target
8585 && GET_CODE (original_target) == REG
8586 && (GET_MODE (original_target)
8587 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8588 {
8589 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8590 VOIDmode, 0);
8591
8592 /* If temp is constant, we can just compute the result. */
8593 if (GET_CODE (temp) == CONST_INT)
8594 {
8595 if (INTVAL (temp) != 0)
8596 emit_move_insn (target, const1_rtx);
8597 else
8598 emit_move_insn (target, const0_rtx);
8599
8600 return target;
8601 }
8602
8603 if (temp != original_target)
8604 {
8605 enum machine_mode mode1 = GET_MODE (temp);
8606 if (mode1 == VOIDmode)
8607 mode1 = tmode != VOIDmode ? tmode : mode;
8608
8609 temp = copy_to_mode_reg (mode1, temp);
8610 }
8611
8612 op1 = gen_label_rtx ();
8613 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8614 GET_MODE (temp), unsignedp, op1);
8615 emit_move_insn (temp, const1_rtx);
8616 emit_label (op1);
8617 return temp;
8618 }
8619
8620 /* If no set-flag instruction, must generate a conditional
8621 store into a temporary variable. Drop through
8622 and handle this like && and ||. */
8623
8624 case TRUTH_ANDIF_EXPR:
8625 case TRUTH_ORIF_EXPR:
8626 if (! ignore
8627 && (target == 0
8628 || modifier == EXPAND_STACK_PARM
8629 || ! safe_from_p (target, exp, 1)
8630 /* Make sure we don't have a hard reg (such as function's return
8631 value) live across basic blocks, if not optimizing. */
8632 || (!optimize && GET_CODE (target) == REG
8633 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8634 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8635
8636 if (target)
8637 emit_clr_insn (target);
8638
8639 op1 = gen_label_rtx ();
8640 jumpifnot (exp, op1);
8641
8642 if (target)
8643 emit_0_to_1_insn (target);
8644
8645 emit_label (op1);
8646 return ignore ? const0_rtx : target;
8647
8648 case TRUTH_NOT_EXPR:
8649 if (modifier == EXPAND_STACK_PARM)
8650 target = 0;
8651 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8652 /* The parser is careful to generate TRUTH_NOT_EXPR
8653 only with operands that are always zero or one. */
8654 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8655 target, 1, OPTAB_LIB_WIDEN);
8656 if (temp == 0)
8657 abort ();
8658 return temp;
8659
8660 case COMPOUND_EXPR:
8661 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8662 emit_queue ();
8663 return expand_expr (TREE_OPERAND (exp, 1),
8664 (ignore ? const0_rtx : target),
8665 VOIDmode, modifier);
8666
8667 case COND_EXPR:
8668 /* If we would have a "singleton" (see below) were it not for a
8669 conversion in each arm, bring that conversion back out. */
8670 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8671 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8672 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8673 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8674 {
8675 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8676 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8677
8678 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8679 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8680 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8681 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8682 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8683 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8684 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8685 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8686 return expand_expr (build1 (NOP_EXPR, type,
8687 build (COND_EXPR, TREE_TYPE (iftrue),
8688 TREE_OPERAND (exp, 0),
8689 iftrue, iffalse)),
8690 target, tmode, modifier);
8691 }
8692
8693 {
8694 /* Note that COND_EXPRs whose type is a structure or union
8695 are required to be constructed to contain assignments of
8696 a temporary variable, so that we can evaluate them here
8697 for side effect only. If type is void, we must do likewise. */
8698
8699 /* If an arm of the branch requires a cleanup,
8700 only that cleanup is performed. */
8701
8702 tree singleton = 0;
8703 tree binary_op = 0, unary_op = 0;
8704
8705 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8706 convert it to our mode, if necessary. */
8707 if (integer_onep (TREE_OPERAND (exp, 1))
8708 && integer_zerop (TREE_OPERAND (exp, 2))
8709 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8710 {
8711 if (ignore)
8712 {
8713 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8714 modifier);
8715 return const0_rtx;
8716 }
8717
8718 if (modifier == EXPAND_STACK_PARM)
8719 target = 0;
8720 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8721 if (GET_MODE (op0) == mode)
8722 return op0;
8723
8724 if (target == 0)
8725 target = gen_reg_rtx (mode);
8726 convert_move (target, op0, unsignedp);
8727 return target;
8728 }
8729
8730 /* Check for X ? A + B : A. If we have this, we can copy A to the
8731 output and conditionally add B. Similarly for unary operations.
8732 Don't do this if X has side-effects because those side effects
8733 might affect A or B and the "?" operation is a sequence point in
8734 ANSI. (operand_equal_p tests for side effects.) */
8735
8736 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8737 && operand_equal_p (TREE_OPERAND (exp, 2),
8738 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8739 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8740 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8741 && operand_equal_p (TREE_OPERAND (exp, 1),
8742 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8743 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8744 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8745 && operand_equal_p (TREE_OPERAND (exp, 2),
8746 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8747 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8748 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8749 && operand_equal_p (TREE_OPERAND (exp, 1),
8750 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8751 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8752
8753 /* If we are not to produce a result, we have no target. Otherwise,
8754 if a target was specified use it; it will not be used as an
8755 intermediate target unless it is safe. If no target, use a
8756 temporary. */
8757
8758 if (ignore)
8759 temp = 0;
8760 else if (modifier == EXPAND_STACK_PARM)
8761 temp = assign_temp (type, 0, 0, 1);
8762 else if (original_target
8763 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8764 || (singleton && GET_CODE (original_target) == REG
8765 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8766 && original_target == var_rtx (singleton)))
8767 && GET_MODE (original_target) == mode
8768 #ifdef HAVE_conditional_move
8769 && (! can_conditionally_move_p (mode)
8770 || GET_CODE (original_target) == REG
8771 || TREE_ADDRESSABLE (type))
8772 #endif
8773 && (GET_CODE (original_target) != MEM
8774 || TREE_ADDRESSABLE (type)))
8775 temp = original_target;
8776 else if (TREE_ADDRESSABLE (type))
8777 abort ();
8778 else
8779 temp = assign_temp (type, 0, 0, 1);
8780
8781 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8782 do the test of X as a store-flag operation, do this as
8783 A + ((X != 0) << log C). Similarly for other simple binary
8784 operators. Only do for C == 1 if BRANCH_COST is low. */
8785 if (temp && singleton && binary_op
8786 && (TREE_CODE (binary_op) == PLUS_EXPR
8787 || TREE_CODE (binary_op) == MINUS_EXPR
8788 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8789 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8790 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8791 : integer_onep (TREE_OPERAND (binary_op, 1)))
8792 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8793 {
8794 rtx result;
8795 tree cond;
8796 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8797 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8798 ? addv_optab : add_optab)
8799 : TREE_CODE (binary_op) == MINUS_EXPR
8800 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8801 ? subv_optab : sub_optab)
8802 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8803 : xor_optab);
8804
8805 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8806 if (singleton == TREE_OPERAND (exp, 1))
8807 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8808 else
8809 cond = TREE_OPERAND (exp, 0);
8810
8811 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8812 ? temp : NULL_RTX),
8813 mode, BRANCH_COST <= 1);
8814
8815 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8816 result = expand_shift (LSHIFT_EXPR, mode, result,
8817 build_int_2 (tree_log2
8818 (TREE_OPERAND
8819 (binary_op, 1)),
8820 0),
8821 (safe_from_p (temp, singleton, 1)
8822 ? temp : NULL_RTX), 0);
8823
8824 if (result)
8825 {
8826 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8827 return expand_binop (mode, boptab, op1, result, temp,
8828 unsignedp, OPTAB_LIB_WIDEN);
8829 }
8830 }
8831
8832 do_pending_stack_adjust ();
8833 NO_DEFER_POP;
8834 op0 = gen_label_rtx ();
8835
8836 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8837 {
8838 if (temp != 0)
8839 {
8840 /* If the target conflicts with the other operand of the
8841 binary op, we can't use it. Also, we can't use the target
8842 if it is a hard register, because evaluating the condition
8843 might clobber it. */
8844 if ((binary_op
8845 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8846 || (GET_CODE (temp) == REG
8847 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8848 temp = gen_reg_rtx (mode);
8849 store_expr (singleton, temp,
8850 modifier == EXPAND_STACK_PARM ? 2 : 0);
8851 }
8852 else
8853 expand_expr (singleton,
8854 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8855 if (singleton == TREE_OPERAND (exp, 1))
8856 jumpif (TREE_OPERAND (exp, 0), op0);
8857 else
8858 jumpifnot (TREE_OPERAND (exp, 0), op0);
8859
8860 start_cleanup_deferral ();
8861 if (binary_op && temp == 0)
8862 /* Just touch the other operand. */
8863 expand_expr (TREE_OPERAND (binary_op, 1),
8864 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8865 else if (binary_op)
8866 store_expr (build (TREE_CODE (binary_op), type,
8867 make_tree (type, temp),
8868 TREE_OPERAND (binary_op, 1)),
8869 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8870 else
8871 store_expr (build1 (TREE_CODE (unary_op), type,
8872 make_tree (type, temp)),
8873 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8874 op1 = op0;
8875 }
8876 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8877 comparison operator. If we have one of these cases, set the
8878 output to A, branch on A (cse will merge these two references),
8879 then set the output to FOO. */
8880 else if (temp
8881 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8882 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8883 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8884 TREE_OPERAND (exp, 1), 0)
8885 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8886 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8887 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8888 {
8889 if (GET_CODE (temp) == REG
8890 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8891 temp = gen_reg_rtx (mode);
8892 store_expr (TREE_OPERAND (exp, 1), temp,
8893 modifier == EXPAND_STACK_PARM ? 2 : 0);
8894 jumpif (TREE_OPERAND (exp, 0), op0);
8895
8896 start_cleanup_deferral ();
8897 store_expr (TREE_OPERAND (exp, 2), temp,
8898 modifier == EXPAND_STACK_PARM ? 2 : 0);
8899 op1 = op0;
8900 }
8901 else if (temp
8902 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8903 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8904 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8905 TREE_OPERAND (exp, 2), 0)
8906 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8907 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8908 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8909 {
8910 if (GET_CODE (temp) == REG
8911 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8912 temp = gen_reg_rtx (mode);
8913 store_expr (TREE_OPERAND (exp, 2), temp,
8914 modifier == EXPAND_STACK_PARM ? 2 : 0);
8915 jumpifnot (TREE_OPERAND (exp, 0), op0);
8916
8917 start_cleanup_deferral ();
8918 store_expr (TREE_OPERAND (exp, 1), temp,
8919 modifier == EXPAND_STACK_PARM ? 2 : 0);
8920 op1 = op0;
8921 }
8922 else
8923 {
8924 op1 = gen_label_rtx ();
8925 jumpifnot (TREE_OPERAND (exp, 0), op0);
8926
8927 start_cleanup_deferral ();
8928
8929 /* One branch of the cond can be void, if it never returns. For
8930 example A ? throw : E */
8931 if (temp != 0
8932 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8933 store_expr (TREE_OPERAND (exp, 1), temp,
8934 modifier == EXPAND_STACK_PARM ? 2 : 0);
8935 else
8936 expand_expr (TREE_OPERAND (exp, 1),
8937 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8938 end_cleanup_deferral ();
8939 emit_queue ();
8940 emit_jump_insn (gen_jump (op1));
8941 emit_barrier ();
8942 emit_label (op0);
8943 start_cleanup_deferral ();
8944 if (temp != 0
8945 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8946 store_expr (TREE_OPERAND (exp, 2), temp,
8947 modifier == EXPAND_STACK_PARM ? 2 : 0);
8948 else
8949 expand_expr (TREE_OPERAND (exp, 2),
8950 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8951 }
8952
8953 end_cleanup_deferral ();
8954
8955 emit_queue ();
8956 emit_label (op1);
8957 OK_DEFER_POP;
8958
8959 return temp;
8960 }
8961
8962 case TARGET_EXPR:
8963 {
8964 /* Something needs to be initialized, but we didn't know
8965 where that thing was when building the tree. For example,
8966 it could be the return value of a function, or a parameter
8967 to a function which lays down in the stack, or a temporary
8968 variable which must be passed by reference.
8969
8970 We guarantee that the expression will either be constructed
8971 or copied into our original target. */
8972
8973 tree slot = TREE_OPERAND (exp, 0);
8974 tree cleanups = NULL_TREE;
8975 tree exp1;
8976
8977 if (TREE_CODE (slot) != VAR_DECL)
8978 abort ();
8979
8980 if (! ignore)
8981 target = original_target;
8982
8983 /* Set this here so that if we get a target that refers to a
8984 register variable that's already been used, put_reg_into_stack
8985 knows that it should fix up those uses. */
8986 TREE_USED (slot) = 1;
8987
8988 if (target == 0)
8989 {
8990 if (DECL_RTL_SET_P (slot))
8991 {
8992 target = DECL_RTL (slot);
8993 /* If we have already expanded the slot, so don't do
8994 it again. (mrs) */
8995 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8996 return target;
8997 }
8998 else
8999 {
9000 target = assign_temp (type, 2, 0, 1);
9001 /* All temp slots at this level must not conflict. */
9002 preserve_temp_slots (target);
9003 SET_DECL_RTL (slot, target);
9004 if (TREE_ADDRESSABLE (slot))
9005 put_var_into_stack (slot, /*rescan=*/false);
9006
9007 /* Since SLOT is not known to the called function
9008 to belong to its stack frame, we must build an explicit
9009 cleanup. This case occurs when we must build up a reference
9010 to pass the reference as an argument. In this case,
9011 it is very likely that such a reference need not be
9012 built here. */
9013
9014 if (TREE_OPERAND (exp, 2) == 0)
9015 TREE_OPERAND (exp, 2)
9016 = (*lang_hooks.maybe_build_cleanup) (slot);
9017 cleanups = TREE_OPERAND (exp, 2);
9018 }
9019 }
9020 else
9021 {
9022 /* This case does occur, when expanding a parameter which
9023 needs to be constructed on the stack. The target
9024 is the actual stack address that we want to initialize.
9025 The function we call will perform the cleanup in this case. */
9026
9027 /* If we have already assigned it space, use that space,
9028 not target that we were passed in, as our target
9029 parameter is only a hint. */
9030 if (DECL_RTL_SET_P (slot))
9031 {
9032 target = DECL_RTL (slot);
9033 /* If we have already expanded the slot, so don't do
9034 it again. (mrs) */
9035 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9036 return target;
9037 }
9038 else
9039 {
9040 SET_DECL_RTL (slot, target);
9041 /* If we must have an addressable slot, then make sure that
9042 the RTL that we just stored in slot is OK. */
9043 if (TREE_ADDRESSABLE (slot))
9044 put_var_into_stack (slot, /*rescan=*/true);
9045 }
9046 }
9047
9048 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
9049 /* Mark it as expanded. */
9050 TREE_OPERAND (exp, 1) = NULL_TREE;
9051
9052 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
9053
9054 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
9055
9056 return target;
9057 }
9058
9059 case INIT_EXPR:
9060 {
9061 tree lhs = TREE_OPERAND (exp, 0);
9062 tree rhs = TREE_OPERAND (exp, 1);
9063
9064 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9065 return temp;
9066 }
9067
9068 case MODIFY_EXPR:
9069 {
9070 /* If lhs is complex, expand calls in rhs before computing it.
9071 That's so we don't compute a pointer and save it over a
9072 call. If lhs is simple, compute it first so we can give it
9073 as a target if the rhs is just a call. This avoids an
9074 extra temp and copy and that prevents a partial-subsumption
9075 which makes bad code. Actually we could treat
9076 component_ref's of vars like vars. */
9077
9078 tree lhs = TREE_OPERAND (exp, 0);
9079 tree rhs = TREE_OPERAND (exp, 1);
9080
9081 temp = 0;
9082
9083 /* Check for |= or &= of a bitfield of size one into another bitfield
9084 of size 1. In this case, (unless we need the result of the
9085 assignment) we can do this more efficiently with a
9086 test followed by an assignment, if necessary.
9087
9088 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9089 things change so we do, this code should be enhanced to
9090 support it. */
9091 if (ignore
9092 && TREE_CODE (lhs) == COMPONENT_REF
9093 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9094 || TREE_CODE (rhs) == BIT_AND_EXPR)
9095 && TREE_OPERAND (rhs, 0) == lhs
9096 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9097 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9098 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9099 {
9100 rtx label = gen_label_rtx ();
9101
9102 do_jump (TREE_OPERAND (rhs, 1),
9103 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
9104 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
9105 expand_assignment (lhs, convert (TREE_TYPE (rhs),
9106 (TREE_CODE (rhs) == BIT_IOR_EXPR
9107 ? integer_one_node
9108 : integer_zero_node)),
9109 0, 0);
9110 do_pending_stack_adjust ();
9111 emit_label (label);
9112 return const0_rtx;
9113 }
9114
9115 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9116
9117 return temp;
9118 }
9119
9120 case RETURN_EXPR:
9121 if (!TREE_OPERAND (exp, 0))
9122 expand_null_return ();
9123 else
9124 expand_return (TREE_OPERAND (exp, 0));
9125 return const0_rtx;
9126
9127 case PREINCREMENT_EXPR:
9128 case PREDECREMENT_EXPR:
9129 return expand_increment (exp, 0, ignore);
9130
9131 case POSTINCREMENT_EXPR:
9132 case POSTDECREMENT_EXPR:
9133 /* Faster to treat as pre-increment if result is not used. */
9134 return expand_increment (exp, ! ignore, ignore);
9135
9136 case ADDR_EXPR:
9137 if (modifier == EXPAND_STACK_PARM)
9138 target = 0;
9139 /* Are we taking the address of a nested function? */
9140 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
9141 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
9142 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
9143 && ! TREE_STATIC (exp))
9144 {
9145 op0 = trampoline_address (TREE_OPERAND (exp, 0));
9146 op0 = force_operand (op0, target);
9147 }
9148 /* If we are taking the address of something erroneous, just
9149 return a zero. */
9150 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9151 return const0_rtx;
9152 /* If we are taking the address of a constant and are at the
9153 top level, we have to use output_constant_def since we can't
9154 call force_const_mem at top level. */
9155 else if (cfun == 0
9156 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9157 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9158 == 'c')))
9159 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
9160 else
9161 {
9162 /* We make sure to pass const0_rtx down if we came in with
9163 ignore set, to avoid doing the cleanups twice for something. */
9164 op0 = expand_expr (TREE_OPERAND (exp, 0),
9165 ignore ? const0_rtx : NULL_RTX, VOIDmode,
9166 (modifier == EXPAND_INITIALIZER
9167 ? modifier : EXPAND_CONST_ADDRESS));
9168
9169 /* If we are going to ignore the result, OP0 will have been set
9170 to const0_rtx, so just return it. Don't get confused and
9171 think we are taking the address of the constant. */
9172 if (ignore)
9173 return op0;
9174
9175 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9176 clever and returns a REG when given a MEM. */
9177 op0 = protect_from_queue (op0, 1);
9178
9179 /* We would like the object in memory. If it is a constant, we can
9180 have it be statically allocated into memory. For a non-constant,
9181 we need to allocate some memory and store the value into it. */
9182
9183 if (CONSTANT_P (op0))
9184 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9185 op0);
9186 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9187 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9188 || GET_CODE (op0) == PARALLEL)
9189 {
9190 /* If the operand is a SAVE_EXPR, we can deal with this by
9191 forcing the SAVE_EXPR into memory. */
9192 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9193 {
9194 put_var_into_stack (TREE_OPERAND (exp, 0),
9195 /*rescan=*/true);
9196 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9197 }
9198 else
9199 {
9200 /* If this object is in a register, it can't be BLKmode. */
9201 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9202 rtx memloc = assign_temp (inner_type, 1, 1, 1);
9203
9204 if (GET_CODE (op0) == PARALLEL)
9205 /* Handle calls that pass values in multiple
9206 non-contiguous locations. The Irix 6 ABI has examples
9207 of this. */
9208 emit_group_store (memloc, op0,
9209 int_size_in_bytes (inner_type));
9210 else
9211 emit_move_insn (memloc, op0);
9212
9213 op0 = memloc;
9214 }
9215 }
9216
9217 if (GET_CODE (op0) != MEM)
9218 abort ();
9219
9220 mark_temp_addr_taken (op0);
9221 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9222 {
9223 op0 = XEXP (op0, 0);
9224 #ifdef POINTERS_EXTEND_UNSIGNED
9225 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9226 && mode == ptr_mode)
9227 op0 = convert_memory_address (ptr_mode, op0);
9228 #endif
9229 return op0;
9230 }
9231
9232 /* If OP0 is not aligned as least as much as the type requires, we
9233 need to make a temporary, copy OP0 to it, and take the address of
9234 the temporary. We want to use the alignment of the type, not of
9235 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9236 the test for BLKmode means that can't happen. The test for
9237 BLKmode is because we never make mis-aligned MEMs with
9238 non-BLKmode.
9239
9240 We don't need to do this at all if the machine doesn't have
9241 strict alignment. */
9242 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9243 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9244 > MEM_ALIGN (op0))
9245 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9246 {
9247 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9248 rtx new;
9249
9250 if (TYPE_ALIGN_OK (inner_type))
9251 abort ();
9252
9253 if (TREE_ADDRESSABLE (inner_type))
9254 {
9255 /* We can't make a bitwise copy of this object, so fail. */
9256 error ("cannot take the address of an unaligned member");
9257 return const0_rtx;
9258 }
9259
9260 new = assign_stack_temp_for_type
9261 (TYPE_MODE (inner_type),
9262 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9263 : int_size_in_bytes (inner_type),
9264 1, build_qualified_type (inner_type,
9265 (TYPE_QUALS (inner_type)
9266 | TYPE_QUAL_CONST)));
9267
9268 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9269 (modifier == EXPAND_STACK_PARM
9270 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9271
9272 op0 = new;
9273 }
9274
9275 op0 = force_operand (XEXP (op0, 0), target);
9276 }
9277
9278 if (flag_force_addr
9279 && GET_CODE (op0) != REG
9280 && modifier != EXPAND_CONST_ADDRESS
9281 && modifier != EXPAND_INITIALIZER
9282 && modifier != EXPAND_SUM)
9283 op0 = force_reg (Pmode, op0);
9284
9285 if (GET_CODE (op0) == REG
9286 && ! REG_USERVAR_P (op0))
9287 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9288
9289 #ifdef POINTERS_EXTEND_UNSIGNED
9290 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9291 && mode == ptr_mode)
9292 op0 = convert_memory_address (ptr_mode, op0);
9293 #endif
9294
9295 return op0;
9296
9297 case ENTRY_VALUE_EXPR:
9298 abort ();
9299
9300 /* COMPLEX type for Extended Pascal & Fortran */
9301 case COMPLEX_EXPR:
9302 {
9303 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9304 rtx insns;
9305
9306 /* Get the rtx code of the operands. */
9307 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9308 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9309
9310 if (! target)
9311 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9312
9313 start_sequence ();
9314
9315 /* Move the real (op0) and imaginary (op1) parts to their location. */
9316 emit_move_insn (gen_realpart (mode, target), op0);
9317 emit_move_insn (gen_imagpart (mode, target), op1);
9318
9319 insns = get_insns ();
9320 end_sequence ();
9321
9322 /* Complex construction should appear as a single unit. */
9323 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9324 each with a separate pseudo as destination.
9325 It's not correct for flow to treat them as a unit. */
9326 if (GET_CODE (target) != CONCAT)
9327 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9328 else
9329 emit_insn (insns);
9330
9331 return target;
9332 }
9333
9334 case REALPART_EXPR:
9335 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9336 return gen_realpart (mode, op0);
9337
9338 case IMAGPART_EXPR:
9339 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9340 return gen_imagpart (mode, op0);
9341
9342 case CONJ_EXPR:
9343 {
9344 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9345 rtx imag_t;
9346 rtx insns;
9347
9348 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9349
9350 if (! target)
9351 target = gen_reg_rtx (mode);
9352
9353 start_sequence ();
9354
9355 /* Store the realpart and the negated imagpart to target. */
9356 emit_move_insn (gen_realpart (partmode, target),
9357 gen_realpart (partmode, op0));
9358
9359 imag_t = gen_imagpart (partmode, target);
9360 temp = expand_unop (partmode,
9361 ! unsignedp && flag_trapv
9362 && (GET_MODE_CLASS(partmode) == MODE_INT)
9363 ? negv_optab : neg_optab,
9364 gen_imagpart (partmode, op0), imag_t, 0);
9365 if (temp != imag_t)
9366 emit_move_insn (imag_t, temp);
9367
9368 insns = get_insns ();
9369 end_sequence ();
9370
9371 /* Conjugate should appear as a single unit
9372 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9373 each with a separate pseudo as destination.
9374 It's not correct for flow to treat them as a unit. */
9375 if (GET_CODE (target) != CONCAT)
9376 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9377 else
9378 emit_insn (insns);
9379
9380 return target;
9381 }
9382
9383 case TRY_CATCH_EXPR:
9384 {
9385 tree handler = TREE_OPERAND (exp, 1);
9386
9387 expand_eh_region_start ();
9388
9389 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9390
9391 expand_eh_region_end_cleanup (handler);
9392
9393 return op0;
9394 }
9395
9396 case TRY_FINALLY_EXPR:
9397 {
9398 tree try_block = TREE_OPERAND (exp, 0);
9399 tree finally_block = TREE_OPERAND (exp, 1);
9400
9401 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9402 {
9403 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9404 is not sufficient, so we cannot expand the block twice.
9405 So we play games with GOTO_SUBROUTINE_EXPR to let us
9406 expand the thing only once. */
9407 /* When not optimizing, we go ahead with this form since
9408 (1) user breakpoints operate more predictably without
9409 code duplication, and
9410 (2) we're not running any of the global optimizers
9411 that would explode in time/space with the highly
9412 connected CFG created by the indirect branching. */
9413
9414 rtx finally_label = gen_label_rtx ();
9415 rtx done_label = gen_label_rtx ();
9416 rtx return_link = gen_reg_rtx (Pmode);
9417 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9418 (tree) finally_label, (tree) return_link);
9419 TREE_SIDE_EFFECTS (cleanup) = 1;
9420
9421 /* Start a new binding layer that will keep track of all cleanup
9422 actions to be performed. */
9423 expand_start_bindings (2);
9424 target_temp_slot_level = temp_slot_level;
9425
9426 expand_decl_cleanup (NULL_TREE, cleanup);
9427 op0 = expand_expr (try_block, target, tmode, modifier);
9428
9429 preserve_temp_slots (op0);
9430 expand_end_bindings (NULL_TREE, 0, 0);
9431 emit_jump (done_label);
9432 emit_label (finally_label);
9433 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9434 emit_indirect_jump (return_link);
9435 emit_label (done_label);
9436 }
9437 else
9438 {
9439 expand_start_bindings (2);
9440 target_temp_slot_level = temp_slot_level;
9441
9442 expand_decl_cleanup (NULL_TREE, finally_block);
9443 op0 = expand_expr (try_block, target, tmode, modifier);
9444
9445 preserve_temp_slots (op0);
9446 expand_end_bindings (NULL_TREE, 0, 0);
9447 }
9448
9449 return op0;
9450 }
9451
9452 case GOTO_SUBROUTINE_EXPR:
9453 {
9454 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9455 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9456 rtx return_address = gen_label_rtx ();
9457 emit_move_insn (return_link,
9458 gen_rtx_LABEL_REF (Pmode, return_address));
9459 emit_jump (subr);
9460 emit_label (return_address);
9461 return const0_rtx;
9462 }
9463
9464 case VA_ARG_EXPR:
9465 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9466
9467 case EXC_PTR_EXPR:
9468 return get_exception_pointer (cfun);
9469
9470 case FDESC_EXPR:
9471 /* Function descriptors are not valid except for as
9472 initialization constants, and should not be expanded. */
9473 abort ();
9474
9475 default:
9476 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9477 }
9478
9479 /* Here to do an ordinary binary operator, generating an instruction
9480 from the optab already placed in `this_optab'. */
9481 binop:
9482 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9483 subtarget = 0;
9484 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9485 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9486 binop2:
9487 if (modifier == EXPAND_STACK_PARM)
9488 target = 0;
9489 temp = expand_binop (mode, this_optab, op0, op1, target,
9490 unsignedp, OPTAB_LIB_WIDEN);
9491 if (temp == 0)
9492 abort ();
9493 return temp;
9494 }
9495 \f
9496 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9497 when applied to the address of EXP produces an address known to be
9498 aligned more than BIGGEST_ALIGNMENT. */
9499
9500 static int
9501 is_aligning_offset (offset, exp)
9502 tree offset;
9503 tree exp;
9504 {
9505 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9506 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9507 || TREE_CODE (offset) == NOP_EXPR
9508 || TREE_CODE (offset) == CONVERT_EXPR
9509 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9510 offset = TREE_OPERAND (offset, 0);
9511
9512 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9513 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9514 if (TREE_CODE (offset) != BIT_AND_EXPR
9515 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9516 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9517 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9518 return 0;
9519
9520 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9521 It must be NEGATE_EXPR. Then strip any more conversions. */
9522 offset = TREE_OPERAND (offset, 0);
9523 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9524 || TREE_CODE (offset) == NOP_EXPR
9525 || TREE_CODE (offset) == CONVERT_EXPR)
9526 offset = TREE_OPERAND (offset, 0);
9527
9528 if (TREE_CODE (offset) != NEGATE_EXPR)
9529 return 0;
9530
9531 offset = TREE_OPERAND (offset, 0);
9532 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9533 || TREE_CODE (offset) == NOP_EXPR
9534 || TREE_CODE (offset) == CONVERT_EXPR)
9535 offset = TREE_OPERAND (offset, 0);
9536
9537 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9538 whose type is the same as EXP. */
9539 return (TREE_CODE (offset) == ADDR_EXPR
9540 && (TREE_OPERAND (offset, 0) == exp
9541 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9542 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9543 == TREE_TYPE (exp)))));
9544 }
9545 \f
9546 /* Return the tree node if an ARG corresponds to a string constant or zero
9547 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9548 in bytes within the string that ARG is accessing. The type of the
9549 offset will be `sizetype'. */
9550
9551 tree
9552 string_constant (arg, ptr_offset)
9553 tree arg;
9554 tree *ptr_offset;
9555 {
9556 STRIP_NOPS (arg);
9557
9558 if (TREE_CODE (arg) == ADDR_EXPR
9559 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9560 {
9561 *ptr_offset = size_zero_node;
9562 return TREE_OPERAND (arg, 0);
9563 }
9564 else if (TREE_CODE (arg) == PLUS_EXPR)
9565 {
9566 tree arg0 = TREE_OPERAND (arg, 0);
9567 tree arg1 = TREE_OPERAND (arg, 1);
9568
9569 STRIP_NOPS (arg0);
9570 STRIP_NOPS (arg1);
9571
9572 if (TREE_CODE (arg0) == ADDR_EXPR
9573 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9574 {
9575 *ptr_offset = convert (sizetype, arg1);
9576 return TREE_OPERAND (arg0, 0);
9577 }
9578 else if (TREE_CODE (arg1) == ADDR_EXPR
9579 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9580 {
9581 *ptr_offset = convert (sizetype, arg0);
9582 return TREE_OPERAND (arg1, 0);
9583 }
9584 }
9585
9586 return 0;
9587 }
9588 \f
9589 /* Expand code for a post- or pre- increment or decrement
9590 and return the RTX for the result.
9591 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9592
9593 static rtx
9594 expand_increment (exp, post, ignore)
9595 tree exp;
9596 int post, ignore;
9597 {
9598 rtx op0, op1;
9599 rtx temp, value;
9600 tree incremented = TREE_OPERAND (exp, 0);
9601 optab this_optab = add_optab;
9602 int icode;
9603 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9604 int op0_is_copy = 0;
9605 int single_insn = 0;
9606 /* 1 means we can't store into OP0 directly,
9607 because it is a subreg narrower than a word,
9608 and we don't dare clobber the rest of the word. */
9609 int bad_subreg = 0;
9610
9611 /* Stabilize any component ref that might need to be
9612 evaluated more than once below. */
9613 if (!post
9614 || TREE_CODE (incremented) == BIT_FIELD_REF
9615 || (TREE_CODE (incremented) == COMPONENT_REF
9616 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9617 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9618 incremented = stabilize_reference (incremented);
9619 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9620 ones into save exprs so that they don't accidentally get evaluated
9621 more than once by the code below. */
9622 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9623 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9624 incremented = save_expr (incremented);
9625
9626 /* Compute the operands as RTX.
9627 Note whether OP0 is the actual lvalue or a copy of it:
9628 I believe it is a copy iff it is a register or subreg
9629 and insns were generated in computing it. */
9630
9631 temp = get_last_insn ();
9632 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9633
9634 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9635 in place but instead must do sign- or zero-extension during assignment,
9636 so we copy it into a new register and let the code below use it as
9637 a copy.
9638
9639 Note that we can safely modify this SUBREG since it is know not to be
9640 shared (it was made by the expand_expr call above). */
9641
9642 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9643 {
9644 if (post)
9645 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9646 else
9647 bad_subreg = 1;
9648 }
9649 else if (GET_CODE (op0) == SUBREG
9650 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9651 {
9652 /* We cannot increment this SUBREG in place. If we are
9653 post-incrementing, get a copy of the old value. Otherwise,
9654 just mark that we cannot increment in place. */
9655 if (post)
9656 op0 = copy_to_reg (op0);
9657 else
9658 bad_subreg = 1;
9659 }
9660
9661 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9662 && temp != get_last_insn ());
9663 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9664
9665 /* Decide whether incrementing or decrementing. */
9666 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9667 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9668 this_optab = sub_optab;
9669
9670 /* Convert decrement by a constant into a negative increment. */
9671 if (this_optab == sub_optab
9672 && GET_CODE (op1) == CONST_INT)
9673 {
9674 op1 = GEN_INT (-INTVAL (op1));
9675 this_optab = add_optab;
9676 }
9677
9678 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9679 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9680
9681 /* For a preincrement, see if we can do this with a single instruction. */
9682 if (!post)
9683 {
9684 icode = (int) this_optab->handlers[(int) mode].insn_code;
9685 if (icode != (int) CODE_FOR_nothing
9686 /* Make sure that OP0 is valid for operands 0 and 1
9687 of the insn we want to queue. */
9688 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9689 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9690 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9691 single_insn = 1;
9692 }
9693
9694 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9695 then we cannot just increment OP0. We must therefore contrive to
9696 increment the original value. Then, for postincrement, we can return
9697 OP0 since it is a copy of the old value. For preincrement, expand here
9698 unless we can do it with a single insn.
9699
9700 Likewise if storing directly into OP0 would clobber high bits
9701 we need to preserve (bad_subreg). */
9702 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9703 {
9704 /* This is the easiest way to increment the value wherever it is.
9705 Problems with multiple evaluation of INCREMENTED are prevented
9706 because either (1) it is a component_ref or preincrement,
9707 in which case it was stabilized above, or (2) it is an array_ref
9708 with constant index in an array in a register, which is
9709 safe to reevaluate. */
9710 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9711 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9712 ? MINUS_EXPR : PLUS_EXPR),
9713 TREE_TYPE (exp),
9714 incremented,
9715 TREE_OPERAND (exp, 1));
9716
9717 while (TREE_CODE (incremented) == NOP_EXPR
9718 || TREE_CODE (incremented) == CONVERT_EXPR)
9719 {
9720 newexp = convert (TREE_TYPE (incremented), newexp);
9721 incremented = TREE_OPERAND (incremented, 0);
9722 }
9723
9724 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9725 return post ? op0 : temp;
9726 }
9727
9728 if (post)
9729 {
9730 /* We have a true reference to the value in OP0.
9731 If there is an insn to add or subtract in this mode, queue it.
9732 Queueing the increment insn avoids the register shuffling
9733 that often results if we must increment now and first save
9734 the old value for subsequent use. */
9735
9736 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9737 op0 = stabilize (op0);
9738 #endif
9739
9740 icode = (int) this_optab->handlers[(int) mode].insn_code;
9741 if (icode != (int) CODE_FOR_nothing
9742 /* Make sure that OP0 is valid for operands 0 and 1
9743 of the insn we want to queue. */
9744 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9745 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9746 {
9747 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9748 op1 = force_reg (mode, op1);
9749
9750 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9751 }
9752 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9753 {
9754 rtx addr = (general_operand (XEXP (op0, 0), mode)
9755 ? force_reg (Pmode, XEXP (op0, 0))
9756 : copy_to_reg (XEXP (op0, 0)));
9757 rtx temp, result;
9758
9759 op0 = replace_equiv_address (op0, addr);
9760 temp = force_reg (GET_MODE (op0), op0);
9761 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9762 op1 = force_reg (mode, op1);
9763
9764 /* The increment queue is LIFO, thus we have to `queue'
9765 the instructions in reverse order. */
9766 enqueue_insn (op0, gen_move_insn (op0, temp));
9767 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9768 return result;
9769 }
9770 }
9771
9772 /* Preincrement, or we can't increment with one simple insn. */
9773 if (post)
9774 /* Save a copy of the value before inc or dec, to return it later. */
9775 temp = value = copy_to_reg (op0);
9776 else
9777 /* Arrange to return the incremented value. */
9778 /* Copy the rtx because expand_binop will protect from the queue,
9779 and the results of that would be invalid for us to return
9780 if our caller does emit_queue before using our result. */
9781 temp = copy_rtx (value = op0);
9782
9783 /* Increment however we can. */
9784 op1 = expand_binop (mode, this_optab, value, op1, op0,
9785 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9786
9787 /* Make sure the value is stored into OP0. */
9788 if (op1 != op0)
9789 emit_move_insn (op0, op1);
9790
9791 return temp;
9792 }
9793 \f
9794 /* Generate code to calculate EXP using a store-flag instruction
9795 and return an rtx for the result. EXP is either a comparison
9796 or a TRUTH_NOT_EXPR whose operand is a comparison.
9797
9798 If TARGET is nonzero, store the result there if convenient.
9799
9800 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9801 cheap.
9802
9803 Return zero if there is no suitable set-flag instruction
9804 available on this machine.
9805
9806 Once expand_expr has been called on the arguments of the comparison,
9807 we are committed to doing the store flag, since it is not safe to
9808 re-evaluate the expression. We emit the store-flag insn by calling
9809 emit_store_flag, but only expand the arguments if we have a reason
9810 to believe that emit_store_flag will be successful. If we think that
9811 it will, but it isn't, we have to simulate the store-flag with a
9812 set/jump/set sequence. */
9813
9814 static rtx
9815 do_store_flag (exp, target, mode, only_cheap)
9816 tree exp;
9817 rtx target;
9818 enum machine_mode mode;
9819 int only_cheap;
9820 {
9821 enum rtx_code code;
9822 tree arg0, arg1, type;
9823 tree tem;
9824 enum machine_mode operand_mode;
9825 int invert = 0;
9826 int unsignedp;
9827 rtx op0, op1;
9828 enum insn_code icode;
9829 rtx subtarget = target;
9830 rtx result, label;
9831
9832 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9833 result at the end. We can't simply invert the test since it would
9834 have already been inverted if it were valid. This case occurs for
9835 some floating-point comparisons. */
9836
9837 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9838 invert = 1, exp = TREE_OPERAND (exp, 0);
9839
9840 arg0 = TREE_OPERAND (exp, 0);
9841 arg1 = TREE_OPERAND (exp, 1);
9842
9843 /* Don't crash if the comparison was erroneous. */
9844 if (arg0 == error_mark_node || arg1 == error_mark_node)
9845 return const0_rtx;
9846
9847 type = TREE_TYPE (arg0);
9848 operand_mode = TYPE_MODE (type);
9849 unsignedp = TREE_UNSIGNED (type);
9850
9851 /* We won't bother with BLKmode store-flag operations because it would mean
9852 passing a lot of information to emit_store_flag. */
9853 if (operand_mode == BLKmode)
9854 return 0;
9855
9856 /* We won't bother with store-flag operations involving function pointers
9857 when function pointers must be canonicalized before comparisons. */
9858 #ifdef HAVE_canonicalize_funcptr_for_compare
9859 if (HAVE_canonicalize_funcptr_for_compare
9860 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9861 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9862 == FUNCTION_TYPE))
9863 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9864 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9865 == FUNCTION_TYPE))))
9866 return 0;
9867 #endif
9868
9869 STRIP_NOPS (arg0);
9870 STRIP_NOPS (arg1);
9871
9872 /* Get the rtx comparison code to use. We know that EXP is a comparison
9873 operation of some type. Some comparisons against 1 and -1 can be
9874 converted to comparisons with zero. Do so here so that the tests
9875 below will be aware that we have a comparison with zero. These
9876 tests will not catch constants in the first operand, but constants
9877 are rarely passed as the first operand. */
9878
9879 switch (TREE_CODE (exp))
9880 {
9881 case EQ_EXPR:
9882 code = EQ;
9883 break;
9884 case NE_EXPR:
9885 code = NE;
9886 break;
9887 case LT_EXPR:
9888 if (integer_onep (arg1))
9889 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9890 else
9891 code = unsignedp ? LTU : LT;
9892 break;
9893 case LE_EXPR:
9894 if (! unsignedp && integer_all_onesp (arg1))
9895 arg1 = integer_zero_node, code = LT;
9896 else
9897 code = unsignedp ? LEU : LE;
9898 break;
9899 case GT_EXPR:
9900 if (! unsignedp && integer_all_onesp (arg1))
9901 arg1 = integer_zero_node, code = GE;
9902 else
9903 code = unsignedp ? GTU : GT;
9904 break;
9905 case GE_EXPR:
9906 if (integer_onep (arg1))
9907 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9908 else
9909 code = unsignedp ? GEU : GE;
9910 break;
9911
9912 case UNORDERED_EXPR:
9913 code = UNORDERED;
9914 break;
9915 case ORDERED_EXPR:
9916 code = ORDERED;
9917 break;
9918 case UNLT_EXPR:
9919 code = UNLT;
9920 break;
9921 case UNLE_EXPR:
9922 code = UNLE;
9923 break;
9924 case UNGT_EXPR:
9925 code = UNGT;
9926 break;
9927 case UNGE_EXPR:
9928 code = UNGE;
9929 break;
9930 case UNEQ_EXPR:
9931 code = UNEQ;
9932 break;
9933
9934 default:
9935 abort ();
9936 }
9937
9938 /* Put a constant second. */
9939 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9940 {
9941 tem = arg0; arg0 = arg1; arg1 = tem;
9942 code = swap_condition (code);
9943 }
9944
9945 /* If this is an equality or inequality test of a single bit, we can
9946 do this by shifting the bit being tested to the low-order bit and
9947 masking the result with the constant 1. If the condition was EQ,
9948 we xor it with 1. This does not require an scc insn and is faster
9949 than an scc insn even if we have it. */
9950
9951 if ((code == NE || code == EQ)
9952 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9953 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9954 {
9955 tree inner = TREE_OPERAND (arg0, 0);
9956 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
9957 int ops_unsignedp;
9958
9959 /* If INNER is a right shift of a constant and it plus BITNUM does
9960 not overflow, adjust BITNUM and INNER. */
9961
9962 if (TREE_CODE (inner) == RSHIFT_EXPR
9963 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9964 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9965 && bitnum < TYPE_PRECISION (type)
9966 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
9967 bitnum - TYPE_PRECISION (type)))
9968 {
9969 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9970 inner = TREE_OPERAND (inner, 0);
9971 }
9972
9973 /* If we are going to be able to omit the AND below, we must do our
9974 operations as unsigned. If we must use the AND, we have a choice.
9975 Normally unsigned is faster, but for some machines signed is. */
9976 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
9977 #ifdef LOAD_EXTEND_OP
9978 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
9979 #else
9980 : 1
9981 #endif
9982 );
9983
9984 if (! get_subtarget (subtarget)
9985 || GET_MODE (subtarget) != operand_mode
9986 || ! safe_from_p (subtarget, inner, 1))
9987 subtarget = 0;
9988
9989 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
9990
9991 if (bitnum != 0)
9992 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
9993 size_int (bitnum), subtarget, ops_unsignedp);
9994
9995 if (GET_MODE (op0) != mode)
9996 op0 = convert_to_mode (mode, op0, ops_unsignedp);
9997
9998 if ((code == EQ && ! invert) || (code == NE && invert))
9999 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10000 ops_unsignedp, OPTAB_LIB_WIDEN);
10001
10002 /* Put the AND last so it can combine with more things. */
10003 if (bitnum != TYPE_PRECISION (type) - 1)
10004 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10005
10006 return op0;
10007 }
10008
10009 /* Now see if we are likely to be able to do this. Return if not. */
10010 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10011 return 0;
10012
10013 icode = setcc_gen_code[(int) code];
10014 if (icode == CODE_FOR_nothing
10015 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10016 {
10017 /* We can only do this if it is one of the special cases that
10018 can be handled without an scc insn. */
10019 if ((code == LT && integer_zerop (arg1))
10020 || (! only_cheap && code == GE && integer_zerop (arg1)))
10021 ;
10022 else if (BRANCH_COST >= 0
10023 && ! only_cheap && (code == NE || code == EQ)
10024 && TREE_CODE (type) != REAL_TYPE
10025 && ((abs_optab->handlers[(int) operand_mode].insn_code
10026 != CODE_FOR_nothing)
10027 || (ffs_optab->handlers[(int) operand_mode].insn_code
10028 != CODE_FOR_nothing)))
10029 ;
10030 else
10031 return 0;
10032 }
10033
10034 if (! get_subtarget (target)
10035 || GET_MODE (subtarget) != operand_mode
10036 || ! safe_from_p (subtarget, arg1, 1))
10037 subtarget = 0;
10038
10039 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10040 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10041
10042 if (target == 0)
10043 target = gen_reg_rtx (mode);
10044
10045 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10046 because, if the emit_store_flag does anything it will succeed and
10047 OP0 and OP1 will not be used subsequently. */
10048
10049 result = emit_store_flag (target, code,
10050 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10051 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10052 operand_mode, unsignedp, 1);
10053
10054 if (result)
10055 {
10056 if (invert)
10057 result = expand_binop (mode, xor_optab, result, const1_rtx,
10058 result, 0, OPTAB_LIB_WIDEN);
10059 return result;
10060 }
10061
10062 /* If this failed, we have to do this with set/compare/jump/set code. */
10063 if (GET_CODE (target) != REG
10064 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10065 target = gen_reg_rtx (GET_MODE (target));
10066
10067 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10068 result = compare_from_rtx (op0, op1, code, unsignedp,
10069 operand_mode, NULL_RTX);
10070 if (GET_CODE (result) == CONST_INT)
10071 return (((result == const0_rtx && ! invert)
10072 || (result != const0_rtx && invert))
10073 ? const0_rtx : const1_rtx);
10074
10075 /* The code of RESULT may not match CODE if compare_from_rtx
10076 decided to swap its operands and reverse the original code.
10077
10078 We know that compare_from_rtx returns either a CONST_INT or
10079 a new comparison code, so it is safe to just extract the
10080 code from RESULT. */
10081 code = GET_CODE (result);
10082
10083 label = gen_label_rtx ();
10084 if (bcc_gen_fctn[(int) code] == 0)
10085 abort ();
10086
10087 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10088 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10089 emit_label (label);
10090
10091 return target;
10092 }
10093 \f
10094
10095 /* Stubs in case we haven't got a casesi insn. */
10096 #ifndef HAVE_casesi
10097 # define HAVE_casesi 0
10098 # define gen_casesi(a, b, c, d, e) (0)
10099 # define CODE_FOR_casesi CODE_FOR_nothing
10100 #endif
10101
10102 /* If the machine does not have a case insn that compares the bounds,
10103 this means extra overhead for dispatch tables, which raises the
10104 threshold for using them. */
10105 #ifndef CASE_VALUES_THRESHOLD
10106 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10107 #endif /* CASE_VALUES_THRESHOLD */
10108
10109 unsigned int
10110 case_values_threshold ()
10111 {
10112 return CASE_VALUES_THRESHOLD;
10113 }
10114
10115 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10116 0 otherwise (i.e. if there is no casesi instruction). */
10117 int
10118 try_casesi (index_type, index_expr, minval, range,
10119 table_label, default_label)
10120 tree index_type, index_expr, minval, range;
10121 rtx table_label ATTRIBUTE_UNUSED;
10122 rtx default_label;
10123 {
10124 enum machine_mode index_mode = SImode;
10125 int index_bits = GET_MODE_BITSIZE (index_mode);
10126 rtx op1, op2, index;
10127 enum machine_mode op_mode;
10128
10129 if (! HAVE_casesi)
10130 return 0;
10131
10132 /* Convert the index to SImode. */
10133 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10134 {
10135 enum machine_mode omode = TYPE_MODE (index_type);
10136 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10137
10138 /* We must handle the endpoints in the original mode. */
10139 index_expr = build (MINUS_EXPR, index_type,
10140 index_expr, minval);
10141 minval = integer_zero_node;
10142 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10143 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10144 omode, 1, default_label);
10145 /* Now we can safely truncate. */
10146 index = convert_to_mode (index_mode, index, 0);
10147 }
10148 else
10149 {
10150 if (TYPE_MODE (index_type) != index_mode)
10151 {
10152 index_expr = convert ((*lang_hooks.types.type_for_size)
10153 (index_bits, 0), index_expr);
10154 index_type = TREE_TYPE (index_expr);
10155 }
10156
10157 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10158 }
10159 emit_queue ();
10160 index = protect_from_queue (index, 0);
10161 do_pending_stack_adjust ();
10162
10163 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10164 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10165 (index, op_mode))
10166 index = copy_to_mode_reg (op_mode, index);
10167
10168 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10169
10170 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10171 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10172 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10173 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10174 (op1, op_mode))
10175 op1 = copy_to_mode_reg (op_mode, op1);
10176
10177 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10178
10179 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10180 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10181 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10182 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10183 (op2, op_mode))
10184 op2 = copy_to_mode_reg (op_mode, op2);
10185
10186 emit_jump_insn (gen_casesi (index, op1, op2,
10187 table_label, default_label));
10188 return 1;
10189 }
10190
10191 /* Attempt to generate a tablejump instruction; same concept. */
10192 #ifndef HAVE_tablejump
10193 #define HAVE_tablejump 0
10194 #define gen_tablejump(x, y) (0)
10195 #endif
10196
10197 /* Subroutine of the next function.
10198
10199 INDEX is the value being switched on, with the lowest value
10200 in the table already subtracted.
10201 MODE is its expected mode (needed if INDEX is constant).
10202 RANGE is the length of the jump table.
10203 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10204
10205 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10206 index value is out of range. */
10207
10208 static void
10209 do_tablejump (index, mode, range, table_label, default_label)
10210 rtx index, range, table_label, default_label;
10211 enum machine_mode mode;
10212 {
10213 rtx temp, vector;
10214
10215 if (INTVAL (range) > cfun->max_jumptable_ents)
10216 cfun->max_jumptable_ents = INTVAL (range);
10217
10218 /* Do an unsigned comparison (in the proper mode) between the index
10219 expression and the value which represents the length of the range.
10220 Since we just finished subtracting the lower bound of the range
10221 from the index expression, this comparison allows us to simultaneously
10222 check that the original index expression value is both greater than
10223 or equal to the minimum value of the range and less than or equal to
10224 the maximum value of the range. */
10225
10226 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10227 default_label);
10228
10229 /* If index is in range, it must fit in Pmode.
10230 Convert to Pmode so we can index with it. */
10231 if (mode != Pmode)
10232 index = convert_to_mode (Pmode, index, 1);
10233
10234 /* Don't let a MEM slip thru, because then INDEX that comes
10235 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10236 and break_out_memory_refs will go to work on it and mess it up. */
10237 #ifdef PIC_CASE_VECTOR_ADDRESS
10238 if (flag_pic && GET_CODE (index) != REG)
10239 index = copy_to_mode_reg (Pmode, index);
10240 #endif
10241
10242 /* If flag_force_addr were to affect this address
10243 it could interfere with the tricky assumptions made
10244 about addresses that contain label-refs,
10245 which may be valid only very near the tablejump itself. */
10246 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10247 GET_MODE_SIZE, because this indicates how large insns are. The other
10248 uses should all be Pmode, because they are addresses. This code
10249 could fail if addresses and insns are not the same size. */
10250 index = gen_rtx_PLUS (Pmode,
10251 gen_rtx_MULT (Pmode, index,
10252 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10253 gen_rtx_LABEL_REF (Pmode, table_label));
10254 #ifdef PIC_CASE_VECTOR_ADDRESS
10255 if (flag_pic)
10256 index = PIC_CASE_VECTOR_ADDRESS (index);
10257 else
10258 #endif
10259 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10260 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10261 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10262 RTX_UNCHANGING_P (vector) = 1;
10263 convert_move (temp, vector, 0);
10264
10265 emit_jump_insn (gen_tablejump (temp, table_label));
10266
10267 /* If we are generating PIC code or if the table is PC-relative, the
10268 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10269 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10270 emit_barrier ();
10271 }
10272
10273 int
10274 try_tablejump (index_type, index_expr, minval, range,
10275 table_label, default_label)
10276 tree index_type, index_expr, minval, range;
10277 rtx table_label, default_label;
10278 {
10279 rtx index;
10280
10281 if (! HAVE_tablejump)
10282 return 0;
10283
10284 index_expr = fold (build (MINUS_EXPR, index_type,
10285 convert (index_type, index_expr),
10286 convert (index_type, minval)));
10287 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10288 emit_queue ();
10289 index = protect_from_queue (index, 0);
10290 do_pending_stack_adjust ();
10291
10292 do_tablejump (index, TYPE_MODE (index_type),
10293 convert_modes (TYPE_MODE (index_type),
10294 TYPE_MODE (TREE_TYPE (range)),
10295 expand_expr (range, NULL_RTX,
10296 VOIDmode, 0),
10297 TREE_UNSIGNED (TREE_TYPE (range))),
10298 table_label, default_label);
10299 return 1;
10300 }
10301
10302 /* Nonzero if the mode is a valid vector mode for this architecture.
10303 This returns nonzero even if there is no hardware support for the
10304 vector mode, but we can emulate with narrower modes. */
10305
10306 int
10307 vector_mode_valid_p (mode)
10308 enum machine_mode mode;
10309 {
10310 enum mode_class class = GET_MODE_CLASS (mode);
10311 enum machine_mode innermode;
10312
10313 /* Doh! What's going on? */
10314 if (class != MODE_VECTOR_INT
10315 && class != MODE_VECTOR_FLOAT)
10316 return 0;
10317
10318 /* Hardware support. Woo hoo! */
10319 if (VECTOR_MODE_SUPPORTED_P (mode))
10320 return 1;
10321
10322 innermode = GET_MODE_INNER (mode);
10323
10324 /* We should probably return 1 if requesting V4DI and we have no DI,
10325 but we have V2DI, but this is probably very unlikely. */
10326
10327 /* If we have support for the inner mode, we can safely emulate it.
10328 We may not have V2DI, but me can emulate with a pair of DIs. */
10329 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10330 }
10331
10332 #include "gt-expr.h"