expr.c (emit_single_push_insn): If padding is needed downward...
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
53
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
56
57 #ifdef PUSH_ROUNDING
58
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
62 #endif
63 #endif
64
65 #endif
66
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
70 #else
71 #define STACK_PUSH_CODE PRE_INC
72 #endif
73 #endif
74
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
78 #endif
79
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
84 #else
85 #define TARGET_MEM_FUNCTIONS 0
86 #endif
87
88
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
95 int cse_not_expected;
96
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 tree placeholder_list = 0;
99
100 /* This structure is used by move_by_pieces to describe the move to
101 be performed. */
102 struct move_by_pieces
103 {
104 rtx to;
105 rtx to_addr;
106 int autinc_to;
107 int explicit_inc_to;
108 rtx from;
109 rtx from_addr;
110 int autinc_from;
111 int explicit_inc_from;
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
114 int reverse;
115 };
116
117 /* This structure is used by store_by_pieces to describe the clear to
118 be performed. */
119
120 struct store_by_pieces
121 {
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
128 rtx (*constfun) PARAMS ((void *, HOST_WIDE_INT, enum machine_mode));
129 void *constfundata;
130 int reverse;
131 };
132
133 static rtx enqueue_insn PARAMS ((rtx, rtx));
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
135 PARAMS ((unsigned HOST_WIDE_INT,
136 unsigned int));
137 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
138 struct move_by_pieces *));
139 static bool block_move_libcall_safe_for_call_parm PARAMS ((void));
140 static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
141 static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
142 static tree emit_block_move_libcall_fn PARAMS ((int));
143 static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned));
144 static rtx clear_by_pieces_1 PARAMS ((void *, HOST_WIDE_INT,
145 enum machine_mode));
146 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
147 unsigned int));
148 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
149 unsigned int));
150 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
151 enum machine_mode,
152 struct store_by_pieces *));
153 static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
154 static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
155 static tree clear_storage_libcall_fn PARAMS ((int));
156 static rtx compress_float_constant PARAMS ((rtx, rtx));
157 static rtx get_subtarget PARAMS ((rtx));
158 static int is_zeros_p PARAMS ((tree));
159 static int mostly_zeros_p PARAMS ((tree));
160 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, tree, int, int));
163 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
164 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
165 HOST_WIDE_INT, enum machine_mode,
166 tree, enum machine_mode, int, tree,
167 int));
168 static rtx var_rtx PARAMS ((tree));
169
170 static unsigned HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
171 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree,
172 tree));
173
174 static int is_aligning_offset PARAMS ((tree, tree));
175 static rtx expand_increment PARAMS ((tree, int, int));
176 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
177 #ifdef PUSH_ROUNDING
178 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
179 #endif
180 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
181 static rtx const_vector_from_tree PARAMS ((tree));
182
183 /* Record for each mode whether we can move a register directly to or
184 from an object of that mode in memory. If we can't, we won't try
185 to use that mode directly when accessing a field of that mode. */
186
187 static char direct_load[NUM_MACHINE_MODES];
188 static char direct_store[NUM_MACHINE_MODES];
189
190 /* Record for each mode whether we can float-extend from memory. */
191
192 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
193
194 /* If a memory-to-memory move would take MOVE_RATIO or more simple
195 move-instruction sequences, we will do a movstr or libcall instead. */
196
197 #ifndef MOVE_RATIO
198 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
199 #define MOVE_RATIO 2
200 #else
201 /* If we are optimizing for space (-Os), cut down the default move ratio. */
202 #define MOVE_RATIO (optimize_size ? 3 : 15)
203 #endif
204 #endif
205
206 /* This macro is used to determine whether move_by_pieces should be called
207 to perform a structure copy. */
208 #ifndef MOVE_BY_PIECES_P
209 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
210 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
211 #endif
212
213 /* If a clear memory operation would take CLEAR_RATIO or more simple
214 move-instruction sequences, we will do a clrstr or libcall instead. */
215
216 #ifndef CLEAR_RATIO
217 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
218 #define CLEAR_RATIO 2
219 #else
220 /* If we are optimizing for space, cut down the default clear ratio. */
221 #define CLEAR_RATIO (optimize_size ? 3 : 15)
222 #endif
223 #endif
224
225 /* This macro is used to determine whether clear_by_pieces should be
226 called to clear storage. */
227 #ifndef CLEAR_BY_PIECES_P
228 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
229 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
230 #endif
231
232 /* This macro is used to determine whether store_by_pieces should be
233 called to "memset" storage with byte values other than zero, or
234 to "memcpy" storage when the source is a constant string. */
235 #ifndef STORE_BY_PIECES_P
236 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
237 #endif
238
239 /* This array records the insn_code of insns to perform block moves. */
240 enum insn_code movstr_optab[NUM_MACHINE_MODES];
241
242 /* This array records the insn_code of insns to perform block clears. */
243 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
244
245 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
246
247 #ifndef SLOW_UNALIGNED_ACCESS
248 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
249 #endif
250 \f
251 /* This is run once per compilation to set up which modes can be used
252 directly in memory and to initialize the block move optab. */
253
254 void
255 init_expr_once ()
256 {
257 rtx insn, pat;
258 enum machine_mode mode;
259 int num_clobbers;
260 rtx mem, mem1;
261 rtx reg;
262
263 /* Try indexing by frame ptr and try by stack ptr.
264 It is known that on the Convex the stack ptr isn't a valid index.
265 With luck, one or the other is valid on any machine. */
266 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
267 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
268
269 /* A scratch register we can modify in-place below to avoid
270 useless RTL allocations. */
271 reg = gen_rtx_REG (VOIDmode, -1);
272
273 insn = rtx_alloc (INSN);
274 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
275 PATTERN (insn) = pat;
276
277 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
278 mode = (enum machine_mode) ((int) mode + 1))
279 {
280 int regno;
281
282 direct_load[(int) mode] = direct_store[(int) mode] = 0;
283 PUT_MODE (mem, mode);
284 PUT_MODE (mem1, mode);
285 PUT_MODE (reg, mode);
286
287 /* See if there is some register that can be used in this mode and
288 directly loaded or stored from memory. */
289
290 if (mode != VOIDmode && mode != BLKmode)
291 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
292 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
293 regno++)
294 {
295 if (! HARD_REGNO_MODE_OK (regno, mode))
296 continue;
297
298 REGNO (reg) = regno;
299
300 SET_SRC (pat) = mem;
301 SET_DEST (pat) = reg;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_load[(int) mode] = 1;
304
305 SET_SRC (pat) = mem1;
306 SET_DEST (pat) = reg;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_load[(int) mode] = 1;
309
310 SET_SRC (pat) = reg;
311 SET_DEST (pat) = mem;
312 if (recog (pat, insn, &num_clobbers) >= 0)
313 direct_store[(int) mode] = 1;
314
315 SET_SRC (pat) = reg;
316 SET_DEST (pat) = mem1;
317 if (recog (pat, insn, &num_clobbers) >= 0)
318 direct_store[(int) mode] = 1;
319 }
320 }
321
322 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
323
324 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
325 mode = GET_MODE_WIDER_MODE (mode))
326 {
327 enum machine_mode srcmode;
328 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
329 srcmode = GET_MODE_WIDER_MODE (srcmode))
330 {
331 enum insn_code ic;
332
333 ic = can_extend_p (mode, srcmode, 0);
334 if (ic == CODE_FOR_nothing)
335 continue;
336
337 PUT_MODE (mem, srcmode);
338
339 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
340 float_extend_from_mem[mode][srcmode] = true;
341 }
342 }
343 }
344
345 /* This is run at the start of compiling a function. */
346
347 void
348 init_expr ()
349 {
350 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
351
352 pending_chain = 0;
353 pending_stack_adjust = 0;
354 stack_pointer_delta = 0;
355 inhibit_defer_pop = 0;
356 saveregs_value = 0;
357 apply_args_value = 0;
358 forced_labels = 0;
359 }
360
361 /* Small sanity check that the queue is empty at the end of a function. */
362
363 void
364 finish_expr_for_function ()
365 {
366 if (pending_chain)
367 abort ();
368 }
369 \f
370 /* Manage the queue of increment instructions to be output
371 for POSTINCREMENT_EXPR expressions, etc. */
372
373 /* Queue up to increment (or change) VAR later. BODY says how:
374 BODY should be the same thing you would pass to emit_insn
375 to increment right away. It will go to emit_insn later on.
376
377 The value is a QUEUED expression to be used in place of VAR
378 where you want to guarantee the pre-incrementation value of VAR. */
379
380 static rtx
381 enqueue_insn (var, body)
382 rtx var, body;
383 {
384 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
385 body, pending_chain);
386 return pending_chain;
387 }
388
389 /* Use protect_from_queue to convert a QUEUED expression
390 into something that you can put immediately into an instruction.
391 If the queued incrementation has not happened yet,
392 protect_from_queue returns the variable itself.
393 If the incrementation has happened, protect_from_queue returns a temp
394 that contains a copy of the old value of the variable.
395
396 Any time an rtx which might possibly be a QUEUED is to be put
397 into an instruction, it must be passed through protect_from_queue first.
398 QUEUED expressions are not meaningful in instructions.
399
400 Do not pass a value through protect_from_queue and then hold
401 on to it for a while before putting it in an instruction!
402 If the queue is flushed in between, incorrect code will result. */
403
404 rtx
405 protect_from_queue (x, modify)
406 rtx x;
407 int modify;
408 {
409 RTX_CODE code = GET_CODE (x);
410
411 #if 0 /* A QUEUED can hang around after the queue is forced out. */
412 /* Shortcut for most common case. */
413 if (pending_chain == 0)
414 return x;
415 #endif
416
417 if (code != QUEUED)
418 {
419 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
420 use of autoincrement. Make a copy of the contents of the memory
421 location rather than a copy of the address, but not if the value is
422 of mode BLKmode. Don't modify X in place since it might be
423 shared. */
424 if (code == MEM && GET_MODE (x) != BLKmode
425 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
426 {
427 rtx y = XEXP (x, 0);
428 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
429
430 if (QUEUED_INSN (y))
431 {
432 rtx temp = gen_reg_rtx (GET_MODE (x));
433
434 emit_insn_before (gen_move_insn (temp, new),
435 QUEUED_INSN (y));
436 return temp;
437 }
438
439 /* Copy the address into a pseudo, so that the returned value
440 remains correct across calls to emit_queue. */
441 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
442 }
443
444 /* Otherwise, recursively protect the subexpressions of all
445 the kinds of rtx's that can contain a QUEUED. */
446 if (code == MEM)
447 {
448 rtx tem = protect_from_queue (XEXP (x, 0), 0);
449 if (tem != XEXP (x, 0))
450 {
451 x = copy_rtx (x);
452 XEXP (x, 0) = tem;
453 }
454 }
455 else if (code == PLUS || code == MULT)
456 {
457 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
458 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
459 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
460 {
461 x = copy_rtx (x);
462 XEXP (x, 0) = new0;
463 XEXP (x, 1) = new1;
464 }
465 }
466 return x;
467 }
468 /* If the increment has not happened, use the variable itself. Copy it
469 into a new pseudo so that the value remains correct across calls to
470 emit_queue. */
471 if (QUEUED_INSN (x) == 0)
472 return copy_to_reg (QUEUED_VAR (x));
473 /* If the increment has happened and a pre-increment copy exists,
474 use that copy. */
475 if (QUEUED_COPY (x) != 0)
476 return QUEUED_COPY (x);
477 /* The increment has happened but we haven't set up a pre-increment copy.
478 Set one up now, and use it. */
479 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
480 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
481 QUEUED_INSN (x));
482 return QUEUED_COPY (x);
483 }
484
485 /* Return nonzero if X contains a QUEUED expression:
486 if it contains anything that will be altered by a queued increment.
487 We handle only combinations of MEM, PLUS, MINUS and MULT operators
488 since memory addresses generally contain only those. */
489
490 int
491 queued_subexp_p (x)
492 rtx x;
493 {
494 enum rtx_code code = GET_CODE (x);
495 switch (code)
496 {
497 case QUEUED:
498 return 1;
499 case MEM:
500 return queued_subexp_p (XEXP (x, 0));
501 case MULT:
502 case PLUS:
503 case MINUS:
504 return (queued_subexp_p (XEXP (x, 0))
505 || queued_subexp_p (XEXP (x, 1)));
506 default:
507 return 0;
508 }
509 }
510
511 /* Perform all the pending incrementations. */
512
513 void
514 emit_queue ()
515 {
516 rtx p;
517 while ((p = pending_chain))
518 {
519 rtx body = QUEUED_BODY (p);
520
521 switch (GET_CODE (body))
522 {
523 case INSN:
524 case JUMP_INSN:
525 case CALL_INSN:
526 case CODE_LABEL:
527 case BARRIER:
528 case NOTE:
529 QUEUED_INSN (p) = body;
530 emit_insn (body);
531 break;
532
533 #ifdef ENABLE_CHECKING
534 case SEQUENCE:
535 abort ();
536 break;
537 #endif
538
539 default:
540 QUEUED_INSN (p) = emit_insn (body);
541 break;
542 }
543
544 pending_chain = QUEUED_NEXT (p);
545 }
546 }
547 \f
548 /* Copy data from FROM to TO, where the machine modes are not the same.
549 Both modes may be integer, or both may be floating.
550 UNSIGNEDP should be nonzero if FROM is an unsigned type.
551 This causes zero-extension instead of sign-extension. */
552
553 void
554 convert_move (to, from, unsignedp)
555 rtx to, from;
556 int unsignedp;
557 {
558 enum machine_mode to_mode = GET_MODE (to);
559 enum machine_mode from_mode = GET_MODE (from);
560 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
561 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
562 enum insn_code code;
563 rtx libcall;
564
565 /* rtx code for making an equivalent value. */
566 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
567 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
568
569 to = protect_from_queue (to, 1);
570 from = protect_from_queue (from, 0);
571
572 if (to_real != from_real)
573 abort ();
574
575 /* If FROM is a SUBREG that indicates that we have already done at least
576 the required extension, strip it. We don't handle such SUBREGs as
577 TO here. */
578
579 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
580 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
581 >= GET_MODE_SIZE (to_mode))
582 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
583 from = gen_lowpart (to_mode, from), from_mode = to_mode;
584
585 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
586 abort ();
587
588 if (to_mode == from_mode
589 || (from_mode == VOIDmode && CONSTANT_P (from)))
590 {
591 emit_move_insn (to, from);
592 return;
593 }
594
595 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
596 {
597 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
598 abort ();
599
600 if (VECTOR_MODE_P (to_mode))
601 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
602 else
603 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
604
605 emit_move_insn (to, from);
606 return;
607 }
608
609 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
610 {
611 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
612 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
613 return;
614 }
615
616 if (to_real != from_real)
617 abort ();
618
619 if (to_real)
620 {
621 rtx value, insns;
622
623 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
624 {
625 /* Try converting directly if the insn is supported. */
626 if ((code = can_extend_p (to_mode, from_mode, 0))
627 != CODE_FOR_nothing)
628 {
629 emit_unop_insn (code, to, from, UNKNOWN);
630 return;
631 }
632 }
633
634 #ifdef HAVE_trunchfqf2
635 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
636 {
637 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
638 return;
639 }
640 #endif
641 #ifdef HAVE_trunctqfqf2
642 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
643 {
644 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
645 return;
646 }
647 #endif
648 #ifdef HAVE_truncsfqf2
649 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
650 {
651 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
652 return;
653 }
654 #endif
655 #ifdef HAVE_truncdfqf2
656 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
657 {
658 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
659 return;
660 }
661 #endif
662 #ifdef HAVE_truncxfqf2
663 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
664 {
665 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
666 return;
667 }
668 #endif
669 #ifdef HAVE_trunctfqf2
670 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
671 {
672 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
673 return;
674 }
675 #endif
676
677 #ifdef HAVE_trunctqfhf2
678 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
679 {
680 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
681 return;
682 }
683 #endif
684 #ifdef HAVE_truncsfhf2
685 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
686 {
687 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
688 return;
689 }
690 #endif
691 #ifdef HAVE_truncdfhf2
692 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
693 {
694 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
695 return;
696 }
697 #endif
698 #ifdef HAVE_truncxfhf2
699 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
700 {
701 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
702 return;
703 }
704 #endif
705 #ifdef HAVE_trunctfhf2
706 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
707 {
708 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
709 return;
710 }
711 #endif
712
713 #ifdef HAVE_truncsftqf2
714 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
715 {
716 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
717 return;
718 }
719 #endif
720 #ifdef HAVE_truncdftqf2
721 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
722 {
723 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
724 return;
725 }
726 #endif
727 #ifdef HAVE_truncxftqf2
728 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
729 {
730 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
731 return;
732 }
733 #endif
734 #ifdef HAVE_trunctftqf2
735 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
736 {
737 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
738 return;
739 }
740 #endif
741
742 #ifdef HAVE_truncdfsf2
743 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
744 {
745 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
746 return;
747 }
748 #endif
749 #ifdef HAVE_truncxfsf2
750 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
751 {
752 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
753 return;
754 }
755 #endif
756 #ifdef HAVE_trunctfsf2
757 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
758 {
759 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
760 return;
761 }
762 #endif
763 #ifdef HAVE_truncxfdf2
764 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
765 {
766 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
767 return;
768 }
769 #endif
770 #ifdef HAVE_trunctfdf2
771 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
772 {
773 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
774 return;
775 }
776 #endif
777
778 libcall = (rtx) 0;
779 switch (from_mode)
780 {
781 case SFmode:
782 switch (to_mode)
783 {
784 case DFmode:
785 libcall = extendsfdf2_libfunc;
786 break;
787
788 case XFmode:
789 libcall = extendsfxf2_libfunc;
790 break;
791
792 case TFmode:
793 libcall = extendsftf2_libfunc;
794 break;
795
796 default:
797 break;
798 }
799 break;
800
801 case DFmode:
802 switch (to_mode)
803 {
804 case SFmode:
805 libcall = truncdfsf2_libfunc;
806 break;
807
808 case XFmode:
809 libcall = extenddfxf2_libfunc;
810 break;
811
812 case TFmode:
813 libcall = extenddftf2_libfunc;
814 break;
815
816 default:
817 break;
818 }
819 break;
820
821 case XFmode:
822 switch (to_mode)
823 {
824 case SFmode:
825 libcall = truncxfsf2_libfunc;
826 break;
827
828 case DFmode:
829 libcall = truncxfdf2_libfunc;
830 break;
831
832 default:
833 break;
834 }
835 break;
836
837 case TFmode:
838 switch (to_mode)
839 {
840 case SFmode:
841 libcall = trunctfsf2_libfunc;
842 break;
843
844 case DFmode:
845 libcall = trunctfdf2_libfunc;
846 break;
847
848 default:
849 break;
850 }
851 break;
852
853 default:
854 break;
855 }
856
857 if (libcall == (rtx) 0)
858 /* This conversion is not implemented yet. */
859 abort ();
860
861 start_sequence ();
862 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
863 1, from, from_mode);
864 insns = get_insns ();
865 end_sequence ();
866 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
867 from));
868 return;
869 }
870
871 /* Now both modes are integers. */
872
873 /* Handle expanding beyond a word. */
874 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
875 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
876 {
877 rtx insns;
878 rtx lowpart;
879 rtx fill_value;
880 rtx lowfrom;
881 int i;
882 enum machine_mode lowpart_mode;
883 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
884
885 /* Try converting directly if the insn is supported. */
886 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
887 != CODE_FOR_nothing)
888 {
889 /* If FROM is a SUBREG, put it into a register. Do this
890 so that we always generate the same set of insns for
891 better cse'ing; if an intermediate assignment occurred,
892 we won't be doing the operation directly on the SUBREG. */
893 if (optimize > 0 && GET_CODE (from) == SUBREG)
894 from = force_reg (from_mode, from);
895 emit_unop_insn (code, to, from, equiv_code);
896 return;
897 }
898 /* Next, try converting via full word. */
899 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
900 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
901 != CODE_FOR_nothing))
902 {
903 if (GET_CODE (to) == REG)
904 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
905 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
906 emit_unop_insn (code, to,
907 gen_lowpart (word_mode, to), equiv_code);
908 return;
909 }
910
911 /* No special multiword conversion insn; do it by hand. */
912 start_sequence ();
913
914 /* Since we will turn this into a no conflict block, we must ensure
915 that the source does not overlap the target. */
916
917 if (reg_overlap_mentioned_p (to, from))
918 from = force_reg (from_mode, from);
919
920 /* Get a copy of FROM widened to a word, if necessary. */
921 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
922 lowpart_mode = word_mode;
923 else
924 lowpart_mode = from_mode;
925
926 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
927
928 lowpart = gen_lowpart (lowpart_mode, to);
929 emit_move_insn (lowpart, lowfrom);
930
931 /* Compute the value to put in each remaining word. */
932 if (unsignedp)
933 fill_value = const0_rtx;
934 else
935 {
936 #ifdef HAVE_slt
937 if (HAVE_slt
938 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
939 && STORE_FLAG_VALUE == -1)
940 {
941 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
942 lowpart_mode, 0);
943 fill_value = gen_reg_rtx (word_mode);
944 emit_insn (gen_slt (fill_value));
945 }
946 else
947 #endif
948 {
949 fill_value
950 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
951 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
952 NULL_RTX, 0);
953 fill_value = convert_to_mode (word_mode, fill_value, 1);
954 }
955 }
956
957 /* Fill the remaining words. */
958 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
959 {
960 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
961 rtx subword = operand_subword (to, index, 1, to_mode);
962
963 if (subword == 0)
964 abort ();
965
966 if (fill_value != subword)
967 emit_move_insn (subword, fill_value);
968 }
969
970 insns = get_insns ();
971 end_sequence ();
972
973 emit_no_conflict_block (insns, to, from, NULL_RTX,
974 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
975 return;
976 }
977
978 /* Truncating multi-word to a word or less. */
979 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
980 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
981 {
982 if (!((GET_CODE (from) == MEM
983 && ! MEM_VOLATILE_P (from)
984 && direct_load[(int) to_mode]
985 && ! mode_dependent_address_p (XEXP (from, 0)))
986 || GET_CODE (from) == REG
987 || GET_CODE (from) == SUBREG))
988 from = force_reg (from_mode, from);
989 convert_move (to, gen_lowpart (word_mode, from), 0);
990 return;
991 }
992
993 /* Handle pointer conversion. */ /* SPEE 900220. */
994 if (to_mode == PQImode)
995 {
996 if (from_mode != QImode)
997 from = convert_to_mode (QImode, from, unsignedp);
998
999 #ifdef HAVE_truncqipqi2
1000 if (HAVE_truncqipqi2)
1001 {
1002 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
1003 return;
1004 }
1005 #endif /* HAVE_truncqipqi2 */
1006 abort ();
1007 }
1008
1009 if (from_mode == PQImode)
1010 {
1011 if (to_mode != QImode)
1012 {
1013 from = convert_to_mode (QImode, from, unsignedp);
1014 from_mode = QImode;
1015 }
1016 else
1017 {
1018 #ifdef HAVE_extendpqiqi2
1019 if (HAVE_extendpqiqi2)
1020 {
1021 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1022 return;
1023 }
1024 #endif /* HAVE_extendpqiqi2 */
1025 abort ();
1026 }
1027 }
1028
1029 if (to_mode == PSImode)
1030 {
1031 if (from_mode != SImode)
1032 from = convert_to_mode (SImode, from, unsignedp);
1033
1034 #ifdef HAVE_truncsipsi2
1035 if (HAVE_truncsipsi2)
1036 {
1037 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1038 return;
1039 }
1040 #endif /* HAVE_truncsipsi2 */
1041 abort ();
1042 }
1043
1044 if (from_mode == PSImode)
1045 {
1046 if (to_mode != SImode)
1047 {
1048 from = convert_to_mode (SImode, from, unsignedp);
1049 from_mode = SImode;
1050 }
1051 else
1052 {
1053 #ifdef HAVE_extendpsisi2
1054 if (! unsignedp && HAVE_extendpsisi2)
1055 {
1056 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1057 return;
1058 }
1059 #endif /* HAVE_extendpsisi2 */
1060 #ifdef HAVE_zero_extendpsisi2
1061 if (unsignedp && HAVE_zero_extendpsisi2)
1062 {
1063 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1064 return;
1065 }
1066 #endif /* HAVE_zero_extendpsisi2 */
1067 abort ();
1068 }
1069 }
1070
1071 if (to_mode == PDImode)
1072 {
1073 if (from_mode != DImode)
1074 from = convert_to_mode (DImode, from, unsignedp);
1075
1076 #ifdef HAVE_truncdipdi2
1077 if (HAVE_truncdipdi2)
1078 {
1079 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1080 return;
1081 }
1082 #endif /* HAVE_truncdipdi2 */
1083 abort ();
1084 }
1085
1086 if (from_mode == PDImode)
1087 {
1088 if (to_mode != DImode)
1089 {
1090 from = convert_to_mode (DImode, from, unsignedp);
1091 from_mode = DImode;
1092 }
1093 else
1094 {
1095 #ifdef HAVE_extendpdidi2
1096 if (HAVE_extendpdidi2)
1097 {
1098 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1099 return;
1100 }
1101 #endif /* HAVE_extendpdidi2 */
1102 abort ();
1103 }
1104 }
1105
1106 /* Now follow all the conversions between integers
1107 no more than a word long. */
1108
1109 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1110 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1111 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1112 GET_MODE_BITSIZE (from_mode)))
1113 {
1114 if (!((GET_CODE (from) == MEM
1115 && ! MEM_VOLATILE_P (from)
1116 && direct_load[(int) to_mode]
1117 && ! mode_dependent_address_p (XEXP (from, 0)))
1118 || GET_CODE (from) == REG
1119 || GET_CODE (from) == SUBREG))
1120 from = force_reg (from_mode, from);
1121 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1122 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1123 from = copy_to_reg (from);
1124 emit_move_insn (to, gen_lowpart (to_mode, from));
1125 return;
1126 }
1127
1128 /* Handle extension. */
1129 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1130 {
1131 /* Convert directly if that works. */
1132 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1133 != CODE_FOR_nothing)
1134 {
1135 if (flag_force_mem)
1136 from = force_not_mem (from);
1137
1138 emit_unop_insn (code, to, from, equiv_code);
1139 return;
1140 }
1141 else
1142 {
1143 enum machine_mode intermediate;
1144 rtx tmp;
1145 tree shift_amount;
1146
1147 /* Search for a mode to convert via. */
1148 for (intermediate = from_mode; intermediate != VOIDmode;
1149 intermediate = GET_MODE_WIDER_MODE (intermediate))
1150 if (((can_extend_p (to_mode, intermediate, unsignedp)
1151 != CODE_FOR_nothing)
1152 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1153 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1154 GET_MODE_BITSIZE (intermediate))))
1155 && (can_extend_p (intermediate, from_mode, unsignedp)
1156 != CODE_FOR_nothing))
1157 {
1158 convert_move (to, convert_to_mode (intermediate, from,
1159 unsignedp), unsignedp);
1160 return;
1161 }
1162
1163 /* No suitable intermediate mode.
1164 Generate what we need with shifts. */
1165 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1166 - GET_MODE_BITSIZE (from_mode), 0);
1167 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1168 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1169 to, unsignedp);
1170 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1171 to, unsignedp);
1172 if (tmp != to)
1173 emit_move_insn (to, tmp);
1174 return;
1175 }
1176 }
1177
1178 /* Support special truncate insns for certain modes. */
1179
1180 if (from_mode == DImode && to_mode == SImode)
1181 {
1182 #ifdef HAVE_truncdisi2
1183 if (HAVE_truncdisi2)
1184 {
1185 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1186 return;
1187 }
1188 #endif
1189 convert_move (to, force_reg (from_mode, from), unsignedp);
1190 return;
1191 }
1192
1193 if (from_mode == DImode && to_mode == HImode)
1194 {
1195 #ifdef HAVE_truncdihi2
1196 if (HAVE_truncdihi2)
1197 {
1198 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1199 return;
1200 }
1201 #endif
1202 convert_move (to, force_reg (from_mode, from), unsignedp);
1203 return;
1204 }
1205
1206 if (from_mode == DImode && to_mode == QImode)
1207 {
1208 #ifdef HAVE_truncdiqi2
1209 if (HAVE_truncdiqi2)
1210 {
1211 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1212 return;
1213 }
1214 #endif
1215 convert_move (to, force_reg (from_mode, from), unsignedp);
1216 return;
1217 }
1218
1219 if (from_mode == SImode && to_mode == HImode)
1220 {
1221 #ifdef HAVE_truncsihi2
1222 if (HAVE_truncsihi2)
1223 {
1224 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1225 return;
1226 }
1227 #endif
1228 convert_move (to, force_reg (from_mode, from), unsignedp);
1229 return;
1230 }
1231
1232 if (from_mode == SImode && to_mode == QImode)
1233 {
1234 #ifdef HAVE_truncsiqi2
1235 if (HAVE_truncsiqi2)
1236 {
1237 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1238 return;
1239 }
1240 #endif
1241 convert_move (to, force_reg (from_mode, from), unsignedp);
1242 return;
1243 }
1244
1245 if (from_mode == HImode && to_mode == QImode)
1246 {
1247 #ifdef HAVE_trunchiqi2
1248 if (HAVE_trunchiqi2)
1249 {
1250 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1251 return;
1252 }
1253 #endif
1254 convert_move (to, force_reg (from_mode, from), unsignedp);
1255 return;
1256 }
1257
1258 if (from_mode == TImode && to_mode == DImode)
1259 {
1260 #ifdef HAVE_trunctidi2
1261 if (HAVE_trunctidi2)
1262 {
1263 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1264 return;
1265 }
1266 #endif
1267 convert_move (to, force_reg (from_mode, from), unsignedp);
1268 return;
1269 }
1270
1271 if (from_mode == TImode && to_mode == SImode)
1272 {
1273 #ifdef HAVE_trunctisi2
1274 if (HAVE_trunctisi2)
1275 {
1276 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1277 return;
1278 }
1279 #endif
1280 convert_move (to, force_reg (from_mode, from), unsignedp);
1281 return;
1282 }
1283
1284 if (from_mode == TImode && to_mode == HImode)
1285 {
1286 #ifdef HAVE_trunctihi2
1287 if (HAVE_trunctihi2)
1288 {
1289 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1290 return;
1291 }
1292 #endif
1293 convert_move (to, force_reg (from_mode, from), unsignedp);
1294 return;
1295 }
1296
1297 if (from_mode == TImode && to_mode == QImode)
1298 {
1299 #ifdef HAVE_trunctiqi2
1300 if (HAVE_trunctiqi2)
1301 {
1302 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1303 return;
1304 }
1305 #endif
1306 convert_move (to, force_reg (from_mode, from), unsignedp);
1307 return;
1308 }
1309
1310 /* Handle truncation of volatile memrefs, and so on;
1311 the things that couldn't be truncated directly,
1312 and for which there was no special instruction. */
1313 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1314 {
1315 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1316 emit_move_insn (to, temp);
1317 return;
1318 }
1319
1320 /* Mode combination is not recognized. */
1321 abort ();
1322 }
1323
1324 /* Return an rtx for a value that would result
1325 from converting X to mode MODE.
1326 Both X and MODE may be floating, or both integer.
1327 UNSIGNEDP is nonzero if X is an unsigned value.
1328 This can be done by referring to a part of X in place
1329 or by copying to a new temporary with conversion.
1330
1331 This function *must not* call protect_from_queue
1332 except when putting X into an insn (in which case convert_move does it). */
1333
1334 rtx
1335 convert_to_mode (mode, x, unsignedp)
1336 enum machine_mode mode;
1337 rtx x;
1338 int unsignedp;
1339 {
1340 return convert_modes (mode, VOIDmode, x, unsignedp);
1341 }
1342
1343 /* Return an rtx for a value that would result
1344 from converting X from mode OLDMODE to mode MODE.
1345 Both modes may be floating, or both integer.
1346 UNSIGNEDP is nonzero if X is an unsigned value.
1347
1348 This can be done by referring to a part of X in place
1349 or by copying to a new temporary with conversion.
1350
1351 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1352
1353 This function *must not* call protect_from_queue
1354 except when putting X into an insn (in which case convert_move does it). */
1355
1356 rtx
1357 convert_modes (mode, oldmode, x, unsignedp)
1358 enum machine_mode mode, oldmode;
1359 rtx x;
1360 int unsignedp;
1361 {
1362 rtx temp;
1363
1364 /* If FROM is a SUBREG that indicates that we have already done at least
1365 the required extension, strip it. */
1366
1367 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1368 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1369 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1370 x = gen_lowpart (mode, x);
1371
1372 if (GET_MODE (x) != VOIDmode)
1373 oldmode = GET_MODE (x);
1374
1375 if (mode == oldmode)
1376 return x;
1377
1378 /* There is one case that we must handle specially: If we are converting
1379 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1380 we are to interpret the constant as unsigned, gen_lowpart will do
1381 the wrong if the constant appears negative. What we want to do is
1382 make the high-order word of the constant zero, not all ones. */
1383
1384 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1385 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1386 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1387 {
1388 HOST_WIDE_INT val = INTVAL (x);
1389
1390 if (oldmode != VOIDmode
1391 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1392 {
1393 int width = GET_MODE_BITSIZE (oldmode);
1394
1395 /* We need to zero extend VAL. */
1396 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1397 }
1398
1399 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1400 }
1401
1402 /* We can do this with a gen_lowpart if both desired and current modes
1403 are integer, and this is either a constant integer, a register, or a
1404 non-volatile MEM. Except for the constant case where MODE is no
1405 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1406
1407 if ((GET_CODE (x) == CONST_INT
1408 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1409 || (GET_MODE_CLASS (mode) == MODE_INT
1410 && GET_MODE_CLASS (oldmode) == MODE_INT
1411 && (GET_CODE (x) == CONST_DOUBLE
1412 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1413 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1414 && direct_load[(int) mode])
1415 || (GET_CODE (x) == REG
1416 && (! HARD_REGISTER_P (x)
1417 || HARD_REGNO_MODE_OK (REGNO (x), mode))
1418 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1419 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1420 {
1421 /* ?? If we don't know OLDMODE, we have to assume here that
1422 X does not need sign- or zero-extension. This may not be
1423 the case, but it's the best we can do. */
1424 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1425 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1426 {
1427 HOST_WIDE_INT val = INTVAL (x);
1428 int width = GET_MODE_BITSIZE (oldmode);
1429
1430 /* We must sign or zero-extend in this case. Start by
1431 zero-extending, then sign extend if we need to. */
1432 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1433 if (! unsignedp
1434 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1435 val |= (HOST_WIDE_INT) (-1) << width;
1436
1437 return gen_int_mode (val, mode);
1438 }
1439
1440 return gen_lowpart (mode, x);
1441 }
1442
1443 temp = gen_reg_rtx (mode);
1444 convert_move (temp, x, unsignedp);
1445 return temp;
1446 }
1447 \f
1448 /* This macro is used to determine what the largest unit size that
1449 move_by_pieces can use is. */
1450
1451 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1452 move efficiently, as opposed to MOVE_MAX which is the maximum
1453 number of bytes we can move with a single instruction. */
1454
1455 #ifndef MOVE_MAX_PIECES
1456 #define MOVE_MAX_PIECES MOVE_MAX
1457 #endif
1458
1459 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1460 store efficiently. Due to internal GCC limitations, this is
1461 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1462 for an immediate constant. */
1463
1464 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1465
1466 /* Determine whether the LEN bytes can be moved by using several move
1467 instructions. Return nonzero if a call to move_by_pieces should
1468 succeed. */
1469
1470 int
1471 can_move_by_pieces (len, align)
1472 unsigned HOST_WIDE_INT len;
1473 unsigned int align ATTRIBUTE_UNUSED;
1474 {
1475 return MOVE_BY_PIECES_P (len, align);
1476 }
1477
1478 /* Generate several move instructions to copy LEN bytes from block FROM to
1479 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1480 and TO through protect_from_queue before calling.
1481
1482 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1483 used to push FROM to the stack.
1484
1485 ALIGN is maximum stack alignment we can assume.
1486
1487 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1488 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1489 stpcpy. */
1490
1491 rtx
1492 move_by_pieces (to, from, len, align, endp)
1493 rtx to, from;
1494 unsigned HOST_WIDE_INT len;
1495 unsigned int align;
1496 int endp;
1497 {
1498 struct move_by_pieces data;
1499 rtx to_addr, from_addr = XEXP (from, 0);
1500 unsigned int max_size = MOVE_MAX_PIECES + 1;
1501 enum machine_mode mode = VOIDmode, tmode;
1502 enum insn_code icode;
1503
1504 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1505
1506 data.offset = 0;
1507 data.from_addr = from_addr;
1508 if (to)
1509 {
1510 to_addr = XEXP (to, 0);
1511 data.to = to;
1512 data.autinc_to
1513 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1514 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1515 data.reverse
1516 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1517 }
1518 else
1519 {
1520 to_addr = NULL_RTX;
1521 data.to = NULL_RTX;
1522 data.autinc_to = 1;
1523 #ifdef STACK_GROWS_DOWNWARD
1524 data.reverse = 1;
1525 #else
1526 data.reverse = 0;
1527 #endif
1528 }
1529 data.to_addr = to_addr;
1530 data.from = from;
1531 data.autinc_from
1532 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1533 || GET_CODE (from_addr) == POST_INC
1534 || GET_CODE (from_addr) == POST_DEC);
1535
1536 data.explicit_inc_from = 0;
1537 data.explicit_inc_to = 0;
1538 if (data.reverse) data.offset = len;
1539 data.len = len;
1540
1541 /* If copying requires more than two move insns,
1542 copy addresses to registers (to make displacements shorter)
1543 and use post-increment if available. */
1544 if (!(data.autinc_from && data.autinc_to)
1545 && move_by_pieces_ninsns (len, align) > 2)
1546 {
1547 /* Find the mode of the largest move... */
1548 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1549 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1550 if (GET_MODE_SIZE (tmode) < max_size)
1551 mode = tmode;
1552
1553 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1554 {
1555 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1556 data.autinc_from = 1;
1557 data.explicit_inc_from = -1;
1558 }
1559 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1560 {
1561 data.from_addr = copy_addr_to_reg (from_addr);
1562 data.autinc_from = 1;
1563 data.explicit_inc_from = 1;
1564 }
1565 if (!data.autinc_from && CONSTANT_P (from_addr))
1566 data.from_addr = copy_addr_to_reg (from_addr);
1567 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1568 {
1569 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1570 data.autinc_to = 1;
1571 data.explicit_inc_to = -1;
1572 }
1573 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1574 {
1575 data.to_addr = copy_addr_to_reg (to_addr);
1576 data.autinc_to = 1;
1577 data.explicit_inc_to = 1;
1578 }
1579 if (!data.autinc_to && CONSTANT_P (to_addr))
1580 data.to_addr = copy_addr_to_reg (to_addr);
1581 }
1582
1583 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1584 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1585 align = MOVE_MAX * BITS_PER_UNIT;
1586
1587 /* First move what we can in the largest integer mode, then go to
1588 successively smaller modes. */
1589
1590 while (max_size > 1)
1591 {
1592 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1593 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1594 if (GET_MODE_SIZE (tmode) < max_size)
1595 mode = tmode;
1596
1597 if (mode == VOIDmode)
1598 break;
1599
1600 icode = mov_optab->handlers[(int) mode].insn_code;
1601 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1602 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1603
1604 max_size = GET_MODE_SIZE (mode);
1605 }
1606
1607 /* The code above should have handled everything. */
1608 if (data.len > 0)
1609 abort ();
1610
1611 if (endp)
1612 {
1613 rtx to1;
1614
1615 if (data.reverse)
1616 abort ();
1617 if (data.autinc_to)
1618 {
1619 if (endp == 2)
1620 {
1621 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1622 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1623 else
1624 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1625 -1));
1626 }
1627 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1628 data.offset);
1629 }
1630 else
1631 {
1632 if (endp == 2)
1633 --data.offset;
1634 to1 = adjust_address (data.to, QImode, data.offset);
1635 }
1636 return to1;
1637 }
1638 else
1639 return data.to;
1640 }
1641
1642 /* Return number of insns required to move L bytes by pieces.
1643 ALIGN (in bits) is maximum alignment we can assume. */
1644
1645 static unsigned HOST_WIDE_INT
1646 move_by_pieces_ninsns (l, align)
1647 unsigned HOST_WIDE_INT l;
1648 unsigned int align;
1649 {
1650 unsigned HOST_WIDE_INT n_insns = 0;
1651 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1652
1653 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1654 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1655 align = MOVE_MAX * BITS_PER_UNIT;
1656
1657 while (max_size > 1)
1658 {
1659 enum machine_mode mode = VOIDmode, tmode;
1660 enum insn_code icode;
1661
1662 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1663 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1664 if (GET_MODE_SIZE (tmode) < max_size)
1665 mode = tmode;
1666
1667 if (mode == VOIDmode)
1668 break;
1669
1670 icode = mov_optab->handlers[(int) mode].insn_code;
1671 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1672 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1673
1674 max_size = GET_MODE_SIZE (mode);
1675 }
1676
1677 if (l)
1678 abort ();
1679 return n_insns;
1680 }
1681
1682 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1683 with move instructions for mode MODE. GENFUN is the gen_... function
1684 to make a move insn for that mode. DATA has all the other info. */
1685
1686 static void
1687 move_by_pieces_1 (genfun, mode, data)
1688 rtx (*genfun) PARAMS ((rtx, ...));
1689 enum machine_mode mode;
1690 struct move_by_pieces *data;
1691 {
1692 unsigned int size = GET_MODE_SIZE (mode);
1693 rtx to1 = NULL_RTX, from1;
1694
1695 while (data->len >= size)
1696 {
1697 if (data->reverse)
1698 data->offset -= size;
1699
1700 if (data->to)
1701 {
1702 if (data->autinc_to)
1703 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1704 data->offset);
1705 else
1706 to1 = adjust_address (data->to, mode, data->offset);
1707 }
1708
1709 if (data->autinc_from)
1710 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1711 data->offset);
1712 else
1713 from1 = adjust_address (data->from, mode, data->offset);
1714
1715 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1716 emit_insn (gen_add2_insn (data->to_addr,
1717 GEN_INT (-(HOST_WIDE_INT)size)));
1718 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1719 emit_insn (gen_add2_insn (data->from_addr,
1720 GEN_INT (-(HOST_WIDE_INT)size)));
1721
1722 if (data->to)
1723 emit_insn ((*genfun) (to1, from1));
1724 else
1725 {
1726 #ifdef PUSH_ROUNDING
1727 emit_single_push_insn (mode, from1, NULL);
1728 #else
1729 abort ();
1730 #endif
1731 }
1732
1733 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1734 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1735 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1736 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1737
1738 if (! data->reverse)
1739 data->offset += size;
1740
1741 data->len -= size;
1742 }
1743 }
1744 \f
1745 /* Emit code to move a block Y to a block X. This may be done with
1746 string-move instructions, with multiple scalar move instructions,
1747 or with a library call.
1748
1749 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1750 SIZE is an rtx that says how long they are.
1751 ALIGN is the maximum alignment we can assume they have.
1752 METHOD describes what kind of copy this is, and what mechanisms may be used.
1753
1754 Return the address of the new block, if memcpy is called and returns it,
1755 0 otherwise. */
1756
1757 rtx
1758 emit_block_move (x, y, size, method)
1759 rtx x, y, size;
1760 enum block_op_methods method;
1761 {
1762 bool may_use_call;
1763 rtx retval = 0;
1764 unsigned int align;
1765
1766 switch (method)
1767 {
1768 case BLOCK_OP_NORMAL:
1769 may_use_call = true;
1770 break;
1771
1772 case BLOCK_OP_CALL_PARM:
1773 may_use_call = block_move_libcall_safe_for_call_parm ();
1774
1775 /* Make inhibit_defer_pop nonzero around the library call
1776 to force it to pop the arguments right away. */
1777 NO_DEFER_POP;
1778 break;
1779
1780 case BLOCK_OP_NO_LIBCALL:
1781 may_use_call = false;
1782 break;
1783
1784 default:
1785 abort ();
1786 }
1787
1788 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1789
1790 if (GET_MODE (x) != BLKmode)
1791 abort ();
1792 if (GET_MODE (y) != BLKmode)
1793 abort ();
1794
1795 x = protect_from_queue (x, 1);
1796 y = protect_from_queue (y, 0);
1797 size = protect_from_queue (size, 0);
1798
1799 if (GET_CODE (x) != MEM)
1800 abort ();
1801 if (GET_CODE (y) != MEM)
1802 abort ();
1803 if (size == 0)
1804 abort ();
1805
1806 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1807 can be incorrect is coming from __builtin_memcpy. */
1808 if (GET_CODE (size) == CONST_INT)
1809 {
1810 x = shallow_copy_rtx (x);
1811 y = shallow_copy_rtx (y);
1812 set_mem_size (x, size);
1813 set_mem_size (y, size);
1814 }
1815
1816 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1817 move_by_pieces (x, y, INTVAL (size), align, 0);
1818 else if (emit_block_move_via_movstr (x, y, size, align))
1819 ;
1820 else if (may_use_call)
1821 retval = emit_block_move_via_libcall (x, y, size);
1822 else
1823 emit_block_move_via_loop (x, y, size, align);
1824
1825 if (method == BLOCK_OP_CALL_PARM)
1826 OK_DEFER_POP;
1827
1828 return retval;
1829 }
1830
1831 /* A subroutine of emit_block_move. Returns true if calling the
1832 block move libcall will not clobber any parameters which may have
1833 already been placed on the stack. */
1834
1835 static bool
1836 block_move_libcall_safe_for_call_parm ()
1837 {
1838 if (PUSH_ARGS)
1839 return true;
1840 else
1841 {
1842 /* Check to see whether memcpy takes all register arguments. */
1843 static enum {
1844 takes_regs_uninit, takes_regs_no, takes_regs_yes
1845 } takes_regs = takes_regs_uninit;
1846
1847 switch (takes_regs)
1848 {
1849 case takes_regs_uninit:
1850 {
1851 CUMULATIVE_ARGS args_so_far;
1852 tree fn, arg;
1853
1854 fn = emit_block_move_libcall_fn (false);
1855 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1856
1857 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1858 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1859 {
1860 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1861 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1862 if (!tmp || !REG_P (tmp))
1863 goto fail_takes_regs;
1864 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1865 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1866 NULL_TREE, 1))
1867 goto fail_takes_regs;
1868 #endif
1869 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1870 }
1871 }
1872 takes_regs = takes_regs_yes;
1873 /* FALLTHRU */
1874
1875 case takes_regs_yes:
1876 return true;
1877
1878 fail_takes_regs:
1879 takes_regs = takes_regs_no;
1880 /* FALLTHRU */
1881 case takes_regs_no:
1882 return false;
1883
1884 default:
1885 abort ();
1886 }
1887 }
1888 }
1889
1890 /* A subroutine of emit_block_move. Expand a movstr pattern;
1891 return true if successful. */
1892
1893 static bool
1894 emit_block_move_via_movstr (x, y, size, align)
1895 rtx x, y, size;
1896 unsigned int align;
1897 {
1898 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1899 enum machine_mode mode;
1900
1901 /* Since this is a move insn, we don't care about volatility. */
1902 volatile_ok = 1;
1903
1904 /* Try the most limited insn first, because there's no point
1905 including more than one in the machine description unless
1906 the more limited one has some advantage. */
1907
1908 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1909 mode = GET_MODE_WIDER_MODE (mode))
1910 {
1911 enum insn_code code = movstr_optab[(int) mode];
1912 insn_operand_predicate_fn pred;
1913
1914 if (code != CODE_FOR_nothing
1915 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1916 here because if SIZE is less than the mode mask, as it is
1917 returned by the macro, it will definitely be less than the
1918 actual mode mask. */
1919 && ((GET_CODE (size) == CONST_INT
1920 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1921 <= (GET_MODE_MASK (mode) >> 1)))
1922 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1923 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1924 || (*pred) (x, BLKmode))
1925 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1926 || (*pred) (y, BLKmode))
1927 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1928 || (*pred) (opalign, VOIDmode)))
1929 {
1930 rtx op2;
1931 rtx last = get_last_insn ();
1932 rtx pat;
1933
1934 op2 = convert_to_mode (mode, size, 1);
1935 pred = insn_data[(int) code].operand[2].predicate;
1936 if (pred != 0 && ! (*pred) (op2, mode))
1937 op2 = copy_to_mode_reg (mode, op2);
1938
1939 /* ??? When called via emit_block_move_for_call, it'd be
1940 nice if there were some way to inform the backend, so
1941 that it doesn't fail the expansion because it thinks
1942 emitting the libcall would be more efficient. */
1943
1944 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1945 if (pat)
1946 {
1947 emit_insn (pat);
1948 volatile_ok = 0;
1949 return true;
1950 }
1951 else
1952 delete_insns_since (last);
1953 }
1954 }
1955
1956 volatile_ok = 0;
1957 return false;
1958 }
1959
1960 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1961 Return the return value from memcpy, 0 otherwise. */
1962
1963 static rtx
1964 emit_block_move_via_libcall (dst, src, size)
1965 rtx dst, src, size;
1966 {
1967 rtx dst_addr, src_addr;
1968 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1969 enum machine_mode size_mode;
1970 rtx retval;
1971
1972 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1973
1974 It is unsafe to save the value generated by protect_from_queue and reuse
1975 it later. Consider what happens if emit_queue is called before the
1976 return value from protect_from_queue is used.
1977
1978 Expansion of the CALL_EXPR below will call emit_queue before we are
1979 finished emitting RTL for argument setup. So if we are not careful we
1980 could get the wrong value for an argument.
1981
1982 To avoid this problem we go ahead and emit code to copy the addresses of
1983 DST and SRC and SIZE into new pseudos. We can then place those new
1984 pseudos into an RTL_EXPR and use them later, even after a call to
1985 emit_queue.
1986
1987 Note this is not strictly needed for library calls since they do not call
1988 emit_queue before loading their arguments. However, we may need to have
1989 library calls call emit_queue in the future since failing to do so could
1990 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1991 arguments in registers. */
1992
1993 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1994 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1995
1996 #ifdef POINTERS_EXTEND_UNSIGNED
1997 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1998 src_addr = convert_memory_address (ptr_mode, src_addr);
1999 #endif
2000
2001 dst_tree = make_tree (ptr_type_node, dst_addr);
2002 src_tree = make_tree (ptr_type_node, src_addr);
2003
2004 if (TARGET_MEM_FUNCTIONS)
2005 size_mode = TYPE_MODE (sizetype);
2006 else
2007 size_mode = TYPE_MODE (unsigned_type_node);
2008
2009 size = convert_to_mode (size_mode, size, 1);
2010 size = copy_to_mode_reg (size_mode, size);
2011
2012 /* It is incorrect to use the libcall calling conventions to call
2013 memcpy in this context. This could be a user call to memcpy and
2014 the user may wish to examine the return value from memcpy. For
2015 targets where libcalls and normal calls have different conventions
2016 for returning pointers, we could end up generating incorrect code.
2017
2018 For convenience, we generate the call to bcopy this way as well. */
2019
2020 if (TARGET_MEM_FUNCTIONS)
2021 size_tree = make_tree (sizetype, size);
2022 else
2023 size_tree = make_tree (unsigned_type_node, size);
2024
2025 fn = emit_block_move_libcall_fn (true);
2026 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2027 if (TARGET_MEM_FUNCTIONS)
2028 {
2029 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
2030 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
2031 }
2032 else
2033 {
2034 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
2035 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
2036 }
2037
2038 /* Now we have to build up the CALL_EXPR itself. */
2039 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2040 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2041 call_expr, arg_list, NULL_TREE);
2042 TREE_SIDE_EFFECTS (call_expr) = 1;
2043
2044 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2045
2046 /* If we are initializing a readonly value, show the above call clobbered
2047 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
2048 the delay slot scheduler might overlook conflicts and take nasty
2049 decisions. */
2050 if (RTX_UNCHANGING_P (dst))
2051 add_function_usage_to
2052 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
2053 gen_rtx_CLOBBER (VOIDmode, dst),
2054 NULL_RTX));
2055
2056 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
2057 }
2058
2059 /* A subroutine of emit_block_move_via_libcall. Create the tree node
2060 for the function we use for block copies. The first time FOR_CALL
2061 is true, we call assemble_external. */
2062
2063 static GTY(()) tree block_move_fn;
2064
2065 void
2066 init_block_move_fn (asmspec)
2067 const char *asmspec;
2068 {
2069 if (!block_move_fn)
2070 {
2071 tree args, fn;
2072
2073 if (TARGET_MEM_FUNCTIONS)
2074 {
2075 fn = get_identifier ("memcpy");
2076 args = build_function_type_list (ptr_type_node, ptr_type_node,
2077 const_ptr_type_node, sizetype,
2078 NULL_TREE);
2079 }
2080 else
2081 {
2082 fn = get_identifier ("bcopy");
2083 args = build_function_type_list (void_type_node, const_ptr_type_node,
2084 ptr_type_node, unsigned_type_node,
2085 NULL_TREE);
2086 }
2087
2088 fn = build_decl (FUNCTION_DECL, fn, args);
2089 DECL_EXTERNAL (fn) = 1;
2090 TREE_PUBLIC (fn) = 1;
2091 DECL_ARTIFICIAL (fn) = 1;
2092 TREE_NOTHROW (fn) = 1;
2093
2094 block_move_fn = fn;
2095 }
2096
2097 if (asmspec)
2098 {
2099 SET_DECL_RTL (block_move_fn, NULL_RTX);
2100 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
2101 }
2102 }
2103
2104 static tree
2105 emit_block_move_libcall_fn (for_call)
2106 int for_call;
2107 {
2108 static bool emitted_extern;
2109
2110 if (!block_move_fn)
2111 init_block_move_fn (NULL);
2112
2113 if (for_call && !emitted_extern)
2114 {
2115 emitted_extern = true;
2116 make_decl_rtl (block_move_fn, NULL);
2117 assemble_external (block_move_fn);
2118 }
2119
2120 return block_move_fn;
2121 }
2122
2123 /* A subroutine of emit_block_move. Copy the data via an explicit
2124 loop. This is used only when libcalls are forbidden. */
2125 /* ??? It'd be nice to copy in hunks larger than QImode. */
2126
2127 static void
2128 emit_block_move_via_loop (x, y, size, align)
2129 rtx x, y, size;
2130 unsigned int align ATTRIBUTE_UNUSED;
2131 {
2132 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2133 enum machine_mode iter_mode;
2134
2135 iter_mode = GET_MODE (size);
2136 if (iter_mode == VOIDmode)
2137 iter_mode = word_mode;
2138
2139 top_label = gen_label_rtx ();
2140 cmp_label = gen_label_rtx ();
2141 iter = gen_reg_rtx (iter_mode);
2142
2143 emit_move_insn (iter, const0_rtx);
2144
2145 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2146 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2147 do_pending_stack_adjust ();
2148
2149 emit_note (NOTE_INSN_LOOP_BEG);
2150
2151 emit_jump (cmp_label);
2152 emit_label (top_label);
2153
2154 tmp = convert_modes (Pmode, iter_mode, iter, true);
2155 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2156 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2157 x = change_address (x, QImode, x_addr);
2158 y = change_address (y, QImode, y_addr);
2159
2160 emit_move_insn (x, y);
2161
2162 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2163 true, OPTAB_LIB_WIDEN);
2164 if (tmp != iter)
2165 emit_move_insn (iter, tmp);
2166
2167 emit_note (NOTE_INSN_LOOP_CONT);
2168 emit_label (cmp_label);
2169
2170 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2171 true, top_label);
2172
2173 emit_note (NOTE_INSN_LOOP_END);
2174 }
2175 \f
2176 /* Copy all or part of a value X into registers starting at REGNO.
2177 The number of registers to be filled is NREGS. */
2178
2179 void
2180 move_block_to_reg (regno, x, nregs, mode)
2181 int regno;
2182 rtx x;
2183 int nregs;
2184 enum machine_mode mode;
2185 {
2186 int i;
2187 #ifdef HAVE_load_multiple
2188 rtx pat;
2189 rtx last;
2190 #endif
2191
2192 if (nregs == 0)
2193 return;
2194
2195 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2196 x = validize_mem (force_const_mem (mode, x));
2197
2198 /* See if the machine can do this with a load multiple insn. */
2199 #ifdef HAVE_load_multiple
2200 if (HAVE_load_multiple)
2201 {
2202 last = get_last_insn ();
2203 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2204 GEN_INT (nregs));
2205 if (pat)
2206 {
2207 emit_insn (pat);
2208 return;
2209 }
2210 else
2211 delete_insns_since (last);
2212 }
2213 #endif
2214
2215 for (i = 0; i < nregs; i++)
2216 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2217 operand_subword_force (x, i, mode));
2218 }
2219
2220 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2221 The number of registers to be filled is NREGS. */
2222
2223 void
2224 move_block_from_reg (regno, x, nregs)
2225 int regno;
2226 rtx x;
2227 int nregs;
2228 {
2229 int i;
2230
2231 if (nregs == 0)
2232 return;
2233
2234 /* See if the machine can do this with a store multiple insn. */
2235 #ifdef HAVE_store_multiple
2236 if (HAVE_store_multiple)
2237 {
2238 rtx last = get_last_insn ();
2239 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2240 GEN_INT (nregs));
2241 if (pat)
2242 {
2243 emit_insn (pat);
2244 return;
2245 }
2246 else
2247 delete_insns_since (last);
2248 }
2249 #endif
2250
2251 for (i = 0; i < nregs; i++)
2252 {
2253 rtx tem = operand_subword (x, i, 1, BLKmode);
2254
2255 if (tem == 0)
2256 abort ();
2257
2258 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2259 }
2260 }
2261
2262 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2263 ORIG, where ORIG is a non-consecutive group of registers represented by
2264 a PARALLEL. The clone is identical to the original except in that the
2265 original set of registers is replaced by a new set of pseudo registers.
2266 The new set has the same modes as the original set. */
2267
2268 rtx
2269 gen_group_rtx (orig)
2270 rtx orig;
2271 {
2272 int i, length;
2273 rtx *tmps;
2274
2275 if (GET_CODE (orig) != PARALLEL)
2276 abort ();
2277
2278 length = XVECLEN (orig, 0);
2279 tmps = (rtx *) alloca (sizeof (rtx) * length);
2280
2281 /* Skip a NULL entry in first slot. */
2282 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2283
2284 if (i)
2285 tmps[0] = 0;
2286
2287 for (; i < length; i++)
2288 {
2289 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2290 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2291
2292 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2293 }
2294
2295 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2296 }
2297
2298 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2299 registers represented by a PARALLEL. SSIZE represents the total size of
2300 block SRC in bytes, or -1 if not known. */
2301 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2302 the balance will be in what would be the low-order memory addresses, i.e.
2303 left justified for big endian, right justified for little endian. This
2304 happens to be true for the targets currently using this support. If this
2305 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2306 would be needed. */
2307
2308 void
2309 emit_group_load (dst, orig_src, ssize)
2310 rtx dst, orig_src;
2311 int ssize;
2312 {
2313 rtx *tmps, src;
2314 int start, i;
2315
2316 if (GET_CODE (dst) != PARALLEL)
2317 abort ();
2318
2319 /* Check for a NULL entry, used to indicate that the parameter goes
2320 both on the stack and in registers. */
2321 if (XEXP (XVECEXP (dst, 0, 0), 0))
2322 start = 0;
2323 else
2324 start = 1;
2325
2326 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2327
2328 /* Process the pieces. */
2329 for (i = start; i < XVECLEN (dst, 0); i++)
2330 {
2331 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2332 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2333 unsigned int bytelen = GET_MODE_SIZE (mode);
2334 int shift = 0;
2335
2336 /* Handle trailing fragments that run over the size of the struct. */
2337 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2338 {
2339 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2340 bytelen = ssize - bytepos;
2341 if (bytelen <= 0)
2342 abort ();
2343 }
2344
2345 /* If we won't be loading directly from memory, protect the real source
2346 from strange tricks we might play; but make sure that the source can
2347 be loaded directly into the destination. */
2348 src = orig_src;
2349 if (GET_CODE (orig_src) != MEM
2350 && (!CONSTANT_P (orig_src)
2351 || (GET_MODE (orig_src) != mode
2352 && GET_MODE (orig_src) != VOIDmode)))
2353 {
2354 if (GET_MODE (orig_src) == VOIDmode)
2355 src = gen_reg_rtx (mode);
2356 else
2357 src = gen_reg_rtx (GET_MODE (orig_src));
2358
2359 emit_move_insn (src, orig_src);
2360 }
2361
2362 /* Optimize the access just a bit. */
2363 if (GET_CODE (src) == MEM
2364 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2365 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2366 && bytelen == GET_MODE_SIZE (mode))
2367 {
2368 tmps[i] = gen_reg_rtx (mode);
2369 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2370 }
2371 else if (GET_CODE (src) == CONCAT)
2372 {
2373 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2374 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2375
2376 if ((bytepos == 0 && bytelen == slen0)
2377 || (bytepos != 0 && bytepos + bytelen <= slen))
2378 {
2379 /* The following assumes that the concatenated objects all
2380 have the same size. In this case, a simple calculation
2381 can be used to determine the object and the bit field
2382 to be extracted. */
2383 tmps[i] = XEXP (src, bytepos / slen0);
2384 if (! CONSTANT_P (tmps[i])
2385 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2386 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2387 (bytepos % slen0) * BITS_PER_UNIT,
2388 1, NULL_RTX, mode, mode, ssize);
2389 }
2390 else if (bytepos == 0)
2391 {
2392 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2393 emit_move_insn (mem, src);
2394 tmps[i] = adjust_address (mem, mode, 0);
2395 }
2396 else
2397 abort ();
2398 }
2399 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2400 SIMD register, which is currently broken. While we get GCC
2401 to emit proper RTL for these cases, let's dump to memory. */
2402 else if (VECTOR_MODE_P (GET_MODE (dst))
2403 && GET_CODE (src) == REG)
2404 {
2405 int slen = GET_MODE_SIZE (GET_MODE (src));
2406 rtx mem;
2407
2408 mem = assign_stack_temp (GET_MODE (src), slen, 0);
2409 emit_move_insn (mem, src);
2410 tmps[i] = adjust_address (mem, mode, (int) bytepos);
2411 }
2412 else if (CONSTANT_P (src)
2413 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2414 tmps[i] = src;
2415 else
2416 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2417 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2418 mode, mode, ssize);
2419
2420 if (BYTES_BIG_ENDIAN && shift)
2421 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2422 tmps[i], 0, OPTAB_WIDEN);
2423 }
2424
2425 emit_queue ();
2426
2427 /* Copy the extracted pieces into the proper (probable) hard regs. */
2428 for (i = start; i < XVECLEN (dst, 0); i++)
2429 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2430 }
2431
2432 /* Emit code to move a block SRC to block DST, where SRC and DST are
2433 non-consecutive groups of registers, each represented by a PARALLEL. */
2434
2435 void
2436 emit_group_move (dst, src)
2437 rtx dst, src;
2438 {
2439 int i;
2440
2441 if (GET_CODE (src) != PARALLEL
2442 || GET_CODE (dst) != PARALLEL
2443 || XVECLEN (src, 0) != XVECLEN (dst, 0))
2444 abort ();
2445
2446 /* Skip first entry if NULL. */
2447 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2448 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2449 XEXP (XVECEXP (src, 0, i), 0));
2450 }
2451
2452 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2453 registers represented by a PARALLEL. SSIZE represents the total size of
2454 block DST, or -1 if not known. */
2455
2456 void
2457 emit_group_store (orig_dst, src, ssize)
2458 rtx orig_dst, src;
2459 int ssize;
2460 {
2461 rtx *tmps, dst;
2462 int start, i;
2463
2464 if (GET_CODE (src) != PARALLEL)
2465 abort ();
2466
2467 /* Check for a NULL entry, used to indicate that the parameter goes
2468 both on the stack and in registers. */
2469 if (XEXP (XVECEXP (src, 0, 0), 0))
2470 start = 0;
2471 else
2472 start = 1;
2473
2474 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2475
2476 /* Copy the (probable) hard regs into pseudos. */
2477 for (i = start; i < XVECLEN (src, 0); i++)
2478 {
2479 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2480 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2481 emit_move_insn (tmps[i], reg);
2482 }
2483 emit_queue ();
2484
2485 /* If we won't be storing directly into memory, protect the real destination
2486 from strange tricks we might play. */
2487 dst = orig_dst;
2488 if (GET_CODE (dst) == PARALLEL)
2489 {
2490 rtx temp;
2491
2492 /* We can get a PARALLEL dst if there is a conditional expression in
2493 a return statement. In that case, the dst and src are the same,
2494 so no action is necessary. */
2495 if (rtx_equal_p (dst, src))
2496 return;
2497
2498 /* It is unclear if we can ever reach here, but we may as well handle
2499 it. Allocate a temporary, and split this into a store/load to/from
2500 the temporary. */
2501
2502 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2503 emit_group_store (temp, src, ssize);
2504 emit_group_load (dst, temp, ssize);
2505 return;
2506 }
2507 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2508 {
2509 dst = gen_reg_rtx (GET_MODE (orig_dst));
2510 /* Make life a bit easier for combine. */
2511 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2512 }
2513
2514 /* Process the pieces. */
2515 for (i = start; i < XVECLEN (src, 0); i++)
2516 {
2517 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2518 enum machine_mode mode = GET_MODE (tmps[i]);
2519 unsigned int bytelen = GET_MODE_SIZE (mode);
2520 rtx dest = dst;
2521
2522 /* Handle trailing fragments that run over the size of the struct. */
2523 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2524 {
2525 if (BYTES_BIG_ENDIAN)
2526 {
2527 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2528 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2529 tmps[i], 0, OPTAB_WIDEN);
2530 }
2531 bytelen = ssize - bytepos;
2532 }
2533
2534 if (GET_CODE (dst) == CONCAT)
2535 {
2536 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2537 dest = XEXP (dst, 0);
2538 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2539 {
2540 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2541 dest = XEXP (dst, 1);
2542 }
2543 else if (bytepos == 0 && XVECLEN (src, 0))
2544 {
2545 dest = assign_stack_temp (GET_MODE (dest),
2546 GET_MODE_SIZE (GET_MODE (dest)), 0);
2547 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2548 tmps[i]);
2549 dst = dest;
2550 break;
2551 }
2552 else
2553 abort ();
2554 }
2555
2556 /* Optimize the access just a bit. */
2557 if (GET_CODE (dest) == MEM
2558 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2559 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2560 && bytelen == GET_MODE_SIZE (mode))
2561 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2562 else
2563 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2564 mode, tmps[i], ssize);
2565 }
2566
2567 emit_queue ();
2568
2569 /* Copy from the pseudo into the (probable) hard reg. */
2570 if (orig_dst != dst)
2571 emit_move_insn (orig_dst, dst);
2572 }
2573
2574 /* Generate code to copy a BLKmode object of TYPE out of a
2575 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2576 is null, a stack temporary is created. TGTBLK is returned.
2577
2578 The primary purpose of this routine is to handle functions
2579 that return BLKmode structures in registers. Some machines
2580 (the PA for example) want to return all small structures
2581 in registers regardless of the structure's alignment. */
2582
2583 rtx
2584 copy_blkmode_from_reg (tgtblk, srcreg, type)
2585 rtx tgtblk;
2586 rtx srcreg;
2587 tree type;
2588 {
2589 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2590 rtx src = NULL, dst = NULL;
2591 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2592 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2593
2594 if (tgtblk == 0)
2595 {
2596 tgtblk = assign_temp (build_qualified_type (type,
2597 (TYPE_QUALS (type)
2598 | TYPE_QUAL_CONST)),
2599 0, 1, 1);
2600 preserve_temp_slots (tgtblk);
2601 }
2602
2603 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2604 into a new pseudo which is a full word. */
2605
2606 if (GET_MODE (srcreg) != BLKmode
2607 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2608 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2609
2610 /* Structures whose size is not a multiple of a word are aligned
2611 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2612 machine, this means we must skip the empty high order bytes when
2613 calculating the bit offset. */
2614 if (BYTES_BIG_ENDIAN
2615 && bytes % UNITS_PER_WORD)
2616 big_endian_correction
2617 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2618
2619 /* Copy the structure BITSIZE bites at a time.
2620
2621 We could probably emit more efficient code for machines which do not use
2622 strict alignment, but it doesn't seem worth the effort at the current
2623 time. */
2624 for (bitpos = 0, xbitpos = big_endian_correction;
2625 bitpos < bytes * BITS_PER_UNIT;
2626 bitpos += bitsize, xbitpos += bitsize)
2627 {
2628 /* We need a new source operand each time xbitpos is on a
2629 word boundary and when xbitpos == big_endian_correction
2630 (the first time through). */
2631 if (xbitpos % BITS_PER_WORD == 0
2632 || xbitpos == big_endian_correction)
2633 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2634 GET_MODE (srcreg));
2635
2636 /* We need a new destination operand each time bitpos is on
2637 a word boundary. */
2638 if (bitpos % BITS_PER_WORD == 0)
2639 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2640
2641 /* Use xbitpos for the source extraction (right justified) and
2642 xbitpos for the destination store (left justified). */
2643 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2644 extract_bit_field (src, bitsize,
2645 xbitpos % BITS_PER_WORD, 1,
2646 NULL_RTX, word_mode, word_mode,
2647 BITS_PER_WORD),
2648 BITS_PER_WORD);
2649 }
2650
2651 return tgtblk;
2652 }
2653
2654 /* Add a USE expression for REG to the (possibly empty) list pointed
2655 to by CALL_FUSAGE. REG must denote a hard register. */
2656
2657 void
2658 use_reg (call_fusage, reg)
2659 rtx *call_fusage, reg;
2660 {
2661 if (GET_CODE (reg) != REG
2662 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2663 abort ();
2664
2665 *call_fusage
2666 = gen_rtx_EXPR_LIST (VOIDmode,
2667 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2668 }
2669
2670 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2671 starting at REGNO. All of these registers must be hard registers. */
2672
2673 void
2674 use_regs (call_fusage, regno, nregs)
2675 rtx *call_fusage;
2676 int regno;
2677 int nregs;
2678 {
2679 int i;
2680
2681 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2682 abort ();
2683
2684 for (i = 0; i < nregs; i++)
2685 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2686 }
2687
2688 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2689 PARALLEL REGS. This is for calls that pass values in multiple
2690 non-contiguous locations. The Irix 6 ABI has examples of this. */
2691
2692 void
2693 use_group_regs (call_fusage, regs)
2694 rtx *call_fusage;
2695 rtx regs;
2696 {
2697 int i;
2698
2699 for (i = 0; i < XVECLEN (regs, 0); i++)
2700 {
2701 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2702
2703 /* A NULL entry means the parameter goes both on the stack and in
2704 registers. This can also be a MEM for targets that pass values
2705 partially on the stack and partially in registers. */
2706 if (reg != 0 && GET_CODE (reg) == REG)
2707 use_reg (call_fusage, reg);
2708 }
2709 }
2710 \f
2711
2712 /* Determine whether the LEN bytes generated by CONSTFUN can be
2713 stored to memory using several move instructions. CONSTFUNDATA is
2714 a pointer which will be passed as argument in every CONSTFUN call.
2715 ALIGN is maximum alignment we can assume. Return nonzero if a
2716 call to store_by_pieces should succeed. */
2717
2718 int
2719 can_store_by_pieces (len, constfun, constfundata, align)
2720 unsigned HOST_WIDE_INT len;
2721 rtx (*constfun) PARAMS ((void *, HOST_WIDE_INT, enum machine_mode));
2722 void *constfundata;
2723 unsigned int align;
2724 {
2725 unsigned HOST_WIDE_INT max_size, l;
2726 HOST_WIDE_INT offset = 0;
2727 enum machine_mode mode, tmode;
2728 enum insn_code icode;
2729 int reverse;
2730 rtx cst;
2731
2732 if (! STORE_BY_PIECES_P (len, align))
2733 return 0;
2734
2735 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2736 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2737 align = MOVE_MAX * BITS_PER_UNIT;
2738
2739 /* We would first store what we can in the largest integer mode, then go to
2740 successively smaller modes. */
2741
2742 for (reverse = 0;
2743 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2744 reverse++)
2745 {
2746 l = len;
2747 mode = VOIDmode;
2748 max_size = STORE_MAX_PIECES + 1;
2749 while (max_size > 1)
2750 {
2751 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2752 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2753 if (GET_MODE_SIZE (tmode) < max_size)
2754 mode = tmode;
2755
2756 if (mode == VOIDmode)
2757 break;
2758
2759 icode = mov_optab->handlers[(int) mode].insn_code;
2760 if (icode != CODE_FOR_nothing
2761 && align >= GET_MODE_ALIGNMENT (mode))
2762 {
2763 unsigned int size = GET_MODE_SIZE (mode);
2764
2765 while (l >= size)
2766 {
2767 if (reverse)
2768 offset -= size;
2769
2770 cst = (*constfun) (constfundata, offset, mode);
2771 if (!LEGITIMATE_CONSTANT_P (cst))
2772 return 0;
2773
2774 if (!reverse)
2775 offset += size;
2776
2777 l -= size;
2778 }
2779 }
2780
2781 max_size = GET_MODE_SIZE (mode);
2782 }
2783
2784 /* The code above should have handled everything. */
2785 if (l != 0)
2786 abort ();
2787 }
2788
2789 return 1;
2790 }
2791
2792 /* Generate several move instructions to store LEN bytes generated by
2793 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2794 pointer which will be passed as argument in every CONSTFUN call.
2795 ALIGN is maximum alignment we can assume.
2796 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2797 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2798 stpcpy. */
2799
2800 rtx
2801 store_by_pieces (to, len, constfun, constfundata, align, endp)
2802 rtx to;
2803 unsigned HOST_WIDE_INT len;
2804 rtx (*constfun) PARAMS ((void *, HOST_WIDE_INT, enum machine_mode));
2805 void *constfundata;
2806 unsigned int align;
2807 int endp;
2808 {
2809 struct store_by_pieces data;
2810
2811 if (! STORE_BY_PIECES_P (len, align))
2812 abort ();
2813 to = protect_from_queue (to, 1);
2814 data.constfun = constfun;
2815 data.constfundata = constfundata;
2816 data.len = len;
2817 data.to = to;
2818 store_by_pieces_1 (&data, align);
2819 if (endp)
2820 {
2821 rtx to1;
2822
2823 if (data.reverse)
2824 abort ();
2825 if (data.autinc_to)
2826 {
2827 if (endp == 2)
2828 {
2829 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2830 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2831 else
2832 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2833 -1));
2834 }
2835 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2836 data.offset);
2837 }
2838 else
2839 {
2840 if (endp == 2)
2841 --data.offset;
2842 to1 = adjust_address (data.to, QImode, data.offset);
2843 }
2844 return to1;
2845 }
2846 else
2847 return data.to;
2848 }
2849
2850 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2851 rtx with BLKmode). The caller must pass TO through protect_from_queue
2852 before calling. ALIGN is maximum alignment we can assume. */
2853
2854 static void
2855 clear_by_pieces (to, len, align)
2856 rtx to;
2857 unsigned HOST_WIDE_INT len;
2858 unsigned int align;
2859 {
2860 struct store_by_pieces data;
2861
2862 data.constfun = clear_by_pieces_1;
2863 data.constfundata = NULL;
2864 data.len = len;
2865 data.to = to;
2866 store_by_pieces_1 (&data, align);
2867 }
2868
2869 /* Callback routine for clear_by_pieces.
2870 Return const0_rtx unconditionally. */
2871
2872 static rtx
2873 clear_by_pieces_1 (data, offset, mode)
2874 void *data ATTRIBUTE_UNUSED;
2875 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2876 enum machine_mode mode ATTRIBUTE_UNUSED;
2877 {
2878 return const0_rtx;
2879 }
2880
2881 /* Subroutine of clear_by_pieces and store_by_pieces.
2882 Generate several move instructions to store LEN bytes of block TO. (A MEM
2883 rtx with BLKmode). The caller must pass TO through protect_from_queue
2884 before calling. ALIGN is maximum alignment we can assume. */
2885
2886 static void
2887 store_by_pieces_1 (data, align)
2888 struct store_by_pieces *data;
2889 unsigned int align;
2890 {
2891 rtx to_addr = XEXP (data->to, 0);
2892 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2893 enum machine_mode mode = VOIDmode, tmode;
2894 enum insn_code icode;
2895
2896 data->offset = 0;
2897 data->to_addr = to_addr;
2898 data->autinc_to
2899 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2900 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2901
2902 data->explicit_inc_to = 0;
2903 data->reverse
2904 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2905 if (data->reverse)
2906 data->offset = data->len;
2907
2908 /* If storing requires more than two move insns,
2909 copy addresses to registers (to make displacements shorter)
2910 and use post-increment if available. */
2911 if (!data->autinc_to
2912 && move_by_pieces_ninsns (data->len, align) > 2)
2913 {
2914 /* Determine the main mode we'll be using. */
2915 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2916 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2917 if (GET_MODE_SIZE (tmode) < max_size)
2918 mode = tmode;
2919
2920 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2921 {
2922 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2923 data->autinc_to = 1;
2924 data->explicit_inc_to = -1;
2925 }
2926
2927 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2928 && ! data->autinc_to)
2929 {
2930 data->to_addr = copy_addr_to_reg (to_addr);
2931 data->autinc_to = 1;
2932 data->explicit_inc_to = 1;
2933 }
2934
2935 if ( !data->autinc_to && CONSTANT_P (to_addr))
2936 data->to_addr = copy_addr_to_reg (to_addr);
2937 }
2938
2939 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2940 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2941 align = MOVE_MAX * BITS_PER_UNIT;
2942
2943 /* First store what we can in the largest integer mode, then go to
2944 successively smaller modes. */
2945
2946 while (max_size > 1)
2947 {
2948 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2949 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2950 if (GET_MODE_SIZE (tmode) < max_size)
2951 mode = tmode;
2952
2953 if (mode == VOIDmode)
2954 break;
2955
2956 icode = mov_optab->handlers[(int) mode].insn_code;
2957 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2958 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2959
2960 max_size = GET_MODE_SIZE (mode);
2961 }
2962
2963 /* The code above should have handled everything. */
2964 if (data->len != 0)
2965 abort ();
2966 }
2967
2968 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2969 with move instructions for mode MODE. GENFUN is the gen_... function
2970 to make a move insn for that mode. DATA has all the other info. */
2971
2972 static void
2973 store_by_pieces_2 (genfun, mode, data)
2974 rtx (*genfun) PARAMS ((rtx, ...));
2975 enum machine_mode mode;
2976 struct store_by_pieces *data;
2977 {
2978 unsigned int size = GET_MODE_SIZE (mode);
2979 rtx to1, cst;
2980
2981 while (data->len >= size)
2982 {
2983 if (data->reverse)
2984 data->offset -= size;
2985
2986 if (data->autinc_to)
2987 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2988 data->offset);
2989 else
2990 to1 = adjust_address (data->to, mode, data->offset);
2991
2992 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2993 emit_insn (gen_add2_insn (data->to_addr,
2994 GEN_INT (-(HOST_WIDE_INT) size)));
2995
2996 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2997 emit_insn ((*genfun) (to1, cst));
2998
2999 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
3000 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
3001
3002 if (! data->reverse)
3003 data->offset += size;
3004
3005 data->len -= size;
3006 }
3007 }
3008 \f
3009 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
3010 its length in bytes. */
3011
3012 rtx
3013 clear_storage (object, size)
3014 rtx object;
3015 rtx size;
3016 {
3017 rtx retval = 0;
3018 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
3019 : GET_MODE_ALIGNMENT (GET_MODE (object)));
3020
3021 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
3022 just move a zero. Otherwise, do this a piece at a time. */
3023 if (GET_MODE (object) != BLKmode
3024 && GET_CODE (size) == CONST_INT
3025 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
3026 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
3027 else
3028 {
3029 object = protect_from_queue (object, 1);
3030 size = protect_from_queue (size, 0);
3031
3032 if (GET_CODE (size) == CONST_INT
3033 && CLEAR_BY_PIECES_P (INTVAL (size), align))
3034 clear_by_pieces (object, INTVAL (size), align);
3035 else if (clear_storage_via_clrstr (object, size, align))
3036 ;
3037 else
3038 retval = clear_storage_via_libcall (object, size);
3039 }
3040
3041 return retval;
3042 }
3043
3044 /* A subroutine of clear_storage. Expand a clrstr pattern;
3045 return true if successful. */
3046
3047 static bool
3048 clear_storage_via_clrstr (object, size, align)
3049 rtx object, size;
3050 unsigned int align;
3051 {
3052 /* Try the most limited insn first, because there's no point
3053 including more than one in the machine description unless
3054 the more limited one has some advantage. */
3055
3056 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3057 enum machine_mode mode;
3058
3059 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
3060 mode = GET_MODE_WIDER_MODE (mode))
3061 {
3062 enum insn_code code = clrstr_optab[(int) mode];
3063 insn_operand_predicate_fn pred;
3064
3065 if (code != CODE_FOR_nothing
3066 /* We don't need MODE to be narrower than
3067 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
3068 the mode mask, as it is returned by the macro, it will
3069 definitely be less than the actual mode mask. */
3070 && ((GET_CODE (size) == CONST_INT
3071 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3072 <= (GET_MODE_MASK (mode) >> 1)))
3073 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3074 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
3075 || (*pred) (object, BLKmode))
3076 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
3077 || (*pred) (opalign, VOIDmode)))
3078 {
3079 rtx op1;
3080 rtx last = get_last_insn ();
3081 rtx pat;
3082
3083 op1 = convert_to_mode (mode, size, 1);
3084 pred = insn_data[(int) code].operand[1].predicate;
3085 if (pred != 0 && ! (*pred) (op1, mode))
3086 op1 = copy_to_mode_reg (mode, op1);
3087
3088 pat = GEN_FCN ((int) code) (object, op1, opalign);
3089 if (pat)
3090 {
3091 emit_insn (pat);
3092 return true;
3093 }
3094 else
3095 delete_insns_since (last);
3096 }
3097 }
3098
3099 return false;
3100 }
3101
3102 /* A subroutine of clear_storage. Expand a call to memset or bzero.
3103 Return the return value of memset, 0 otherwise. */
3104
3105 static rtx
3106 clear_storage_via_libcall (object, size)
3107 rtx object, size;
3108 {
3109 tree call_expr, arg_list, fn, object_tree, size_tree;
3110 enum machine_mode size_mode;
3111 rtx retval;
3112
3113 /* OBJECT or SIZE may have been passed through protect_from_queue.
3114
3115 It is unsafe to save the value generated by protect_from_queue
3116 and reuse it later. Consider what happens if emit_queue is
3117 called before the return value from protect_from_queue is used.
3118
3119 Expansion of the CALL_EXPR below will call emit_queue before
3120 we are finished emitting RTL for argument setup. So if we are
3121 not careful we could get the wrong value for an argument.
3122
3123 To avoid this problem we go ahead and emit code to copy OBJECT
3124 and SIZE into new pseudos. We can then place those new pseudos
3125 into an RTL_EXPR and use them later, even after a call to
3126 emit_queue.
3127
3128 Note this is not strictly needed for library calls since they
3129 do not call emit_queue before loading their arguments. However,
3130 we may need to have library calls call emit_queue in the future
3131 since failing to do so could cause problems for targets which
3132 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3133
3134 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
3135
3136 if (TARGET_MEM_FUNCTIONS)
3137 size_mode = TYPE_MODE (sizetype);
3138 else
3139 size_mode = TYPE_MODE (unsigned_type_node);
3140 size = convert_to_mode (size_mode, size, 1);
3141 size = copy_to_mode_reg (size_mode, size);
3142
3143 /* It is incorrect to use the libcall calling conventions to call
3144 memset in this context. This could be a user call to memset and
3145 the user may wish to examine the return value from memset. For
3146 targets where libcalls and normal calls have different conventions
3147 for returning pointers, we could end up generating incorrect code.
3148
3149 For convenience, we generate the call to bzero this way as well. */
3150
3151 object_tree = make_tree (ptr_type_node, object);
3152 if (TARGET_MEM_FUNCTIONS)
3153 size_tree = make_tree (sizetype, size);
3154 else
3155 size_tree = make_tree (unsigned_type_node, size);
3156
3157 fn = clear_storage_libcall_fn (true);
3158 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3159 if (TARGET_MEM_FUNCTIONS)
3160 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3161 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3162
3163 /* Now we have to build up the CALL_EXPR itself. */
3164 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3165 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3166 call_expr, arg_list, NULL_TREE);
3167 TREE_SIDE_EFFECTS (call_expr) = 1;
3168
3169 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3170
3171 /* If we are initializing a readonly value, show the above call
3172 clobbered it. Otherwise, a load from it may erroneously be
3173 hoisted from a loop. */
3174 if (RTX_UNCHANGING_P (object))
3175 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3176
3177 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3178 }
3179
3180 /* A subroutine of clear_storage_via_libcall. Create the tree node
3181 for the function we use for block clears. The first time FOR_CALL
3182 is true, we call assemble_external. */
3183
3184 static GTY(()) tree block_clear_fn;
3185
3186 void
3187 init_block_clear_fn (asmspec)
3188 const char *asmspec;
3189 {
3190 if (!block_clear_fn)
3191 {
3192 tree fn, args;
3193
3194 if (TARGET_MEM_FUNCTIONS)
3195 {
3196 fn = get_identifier ("memset");
3197 args = build_function_type_list (ptr_type_node, ptr_type_node,
3198 integer_type_node, sizetype,
3199 NULL_TREE);
3200 }
3201 else
3202 {
3203 fn = get_identifier ("bzero");
3204 args = build_function_type_list (void_type_node, ptr_type_node,
3205 unsigned_type_node, NULL_TREE);
3206 }
3207
3208 fn = build_decl (FUNCTION_DECL, fn, args);
3209 DECL_EXTERNAL (fn) = 1;
3210 TREE_PUBLIC (fn) = 1;
3211 DECL_ARTIFICIAL (fn) = 1;
3212 TREE_NOTHROW (fn) = 1;
3213
3214 block_clear_fn = fn;
3215 }
3216
3217 if (asmspec)
3218 {
3219 SET_DECL_RTL (block_clear_fn, NULL_RTX);
3220 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
3221 }
3222 }
3223
3224 static tree
3225 clear_storage_libcall_fn (for_call)
3226 int for_call;
3227 {
3228 static bool emitted_extern;
3229
3230 if (!block_clear_fn)
3231 init_block_clear_fn (NULL);
3232
3233 if (for_call && !emitted_extern)
3234 {
3235 emitted_extern = true;
3236 make_decl_rtl (block_clear_fn, NULL);
3237 assemble_external (block_clear_fn);
3238 }
3239
3240 return block_clear_fn;
3241 }
3242 \f
3243 /* Generate code to copy Y into X.
3244 Both Y and X must have the same mode, except that
3245 Y can be a constant with VOIDmode.
3246 This mode cannot be BLKmode; use emit_block_move for that.
3247
3248 Return the last instruction emitted. */
3249
3250 rtx
3251 emit_move_insn (x, y)
3252 rtx x, y;
3253 {
3254 enum machine_mode mode = GET_MODE (x);
3255 rtx y_cst = NULL_RTX;
3256 rtx last_insn, set;
3257
3258 x = protect_from_queue (x, 1);
3259 y = protect_from_queue (y, 0);
3260
3261 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3262 abort ();
3263
3264 /* Never force constant_p_rtx to memory. */
3265 if (GET_CODE (y) == CONSTANT_P_RTX)
3266 ;
3267 else if (CONSTANT_P (y))
3268 {
3269 if (optimize
3270 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3271 && (last_insn = compress_float_constant (x, y)))
3272 return last_insn;
3273
3274 y_cst = y;
3275
3276 if (!LEGITIMATE_CONSTANT_P (y))
3277 {
3278 y = force_const_mem (mode, y);
3279
3280 /* If the target's cannot_force_const_mem prevented the spill,
3281 assume that the target's move expanders will also take care
3282 of the non-legitimate constant. */
3283 if (!y)
3284 y = y_cst;
3285 }
3286 }
3287
3288 /* If X or Y are memory references, verify that their addresses are valid
3289 for the machine. */
3290 if (GET_CODE (x) == MEM
3291 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3292 && ! push_operand (x, GET_MODE (x)))
3293 || (flag_force_addr
3294 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3295 x = validize_mem (x);
3296
3297 if (GET_CODE (y) == MEM
3298 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3299 || (flag_force_addr
3300 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3301 y = validize_mem (y);
3302
3303 if (mode == BLKmode)
3304 abort ();
3305
3306 last_insn = emit_move_insn_1 (x, y);
3307
3308 if (y_cst && GET_CODE (x) == REG
3309 && (set = single_set (last_insn)) != NULL_RTX
3310 && SET_DEST (set) == x
3311 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3312 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3313
3314 return last_insn;
3315 }
3316
3317 /* Low level part of emit_move_insn.
3318 Called just like emit_move_insn, but assumes X and Y
3319 are basically valid. */
3320
3321 rtx
3322 emit_move_insn_1 (x, y)
3323 rtx x, y;
3324 {
3325 enum machine_mode mode = GET_MODE (x);
3326 enum machine_mode submode;
3327 enum mode_class class = GET_MODE_CLASS (mode);
3328
3329 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3330 abort ();
3331
3332 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3333 return
3334 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3335
3336 /* Expand complex moves by moving real part and imag part, if possible. */
3337 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3338 && BLKmode != (submode = GET_MODE_INNER (mode))
3339 && (mov_optab->handlers[(int) submode].insn_code
3340 != CODE_FOR_nothing))
3341 {
3342 /* Don't split destination if it is a stack push. */
3343 int stack = push_operand (x, GET_MODE (x));
3344
3345 #ifdef PUSH_ROUNDING
3346 /* In case we output to the stack, but the size is smaller than the
3347 machine can push exactly, we need to use move instructions. */
3348 if (stack
3349 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3350 != GET_MODE_SIZE (submode)))
3351 {
3352 rtx temp;
3353 HOST_WIDE_INT offset1, offset2;
3354
3355 /* Do not use anti_adjust_stack, since we don't want to update
3356 stack_pointer_delta. */
3357 temp = expand_binop (Pmode,
3358 #ifdef STACK_GROWS_DOWNWARD
3359 sub_optab,
3360 #else
3361 add_optab,
3362 #endif
3363 stack_pointer_rtx,
3364 GEN_INT
3365 (PUSH_ROUNDING
3366 (GET_MODE_SIZE (GET_MODE (x)))),
3367 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3368
3369 if (temp != stack_pointer_rtx)
3370 emit_move_insn (stack_pointer_rtx, temp);
3371
3372 #ifdef STACK_GROWS_DOWNWARD
3373 offset1 = 0;
3374 offset2 = GET_MODE_SIZE (submode);
3375 #else
3376 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3377 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3378 + GET_MODE_SIZE (submode));
3379 #endif
3380
3381 emit_move_insn (change_address (x, submode,
3382 gen_rtx_PLUS (Pmode,
3383 stack_pointer_rtx,
3384 GEN_INT (offset1))),
3385 gen_realpart (submode, y));
3386 emit_move_insn (change_address (x, submode,
3387 gen_rtx_PLUS (Pmode,
3388 stack_pointer_rtx,
3389 GEN_INT (offset2))),
3390 gen_imagpart (submode, y));
3391 }
3392 else
3393 #endif
3394 /* If this is a stack, push the highpart first, so it
3395 will be in the argument order.
3396
3397 In that case, change_address is used only to convert
3398 the mode, not to change the address. */
3399 if (stack)
3400 {
3401 /* Note that the real part always precedes the imag part in memory
3402 regardless of machine's endianness. */
3403 #ifdef STACK_GROWS_DOWNWARD
3404 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3405 gen_imagpart (submode, y));
3406 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3407 gen_realpart (submode, y));
3408 #else
3409 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3410 gen_realpart (submode, y));
3411 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3412 gen_imagpart (submode, y));
3413 #endif
3414 }
3415 else
3416 {
3417 rtx realpart_x, realpart_y;
3418 rtx imagpart_x, imagpart_y;
3419
3420 /* If this is a complex value with each part being smaller than a
3421 word, the usual calling sequence will likely pack the pieces into
3422 a single register. Unfortunately, SUBREG of hard registers only
3423 deals in terms of words, so we have a problem converting input
3424 arguments to the CONCAT of two registers that is used elsewhere
3425 for complex values. If this is before reload, we can copy it into
3426 memory and reload. FIXME, we should see about using extract and
3427 insert on integer registers, but complex short and complex char
3428 variables should be rarely used. */
3429 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3430 && (reload_in_progress | reload_completed) == 0)
3431 {
3432 int packed_dest_p
3433 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3434 int packed_src_p
3435 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3436
3437 if (packed_dest_p || packed_src_p)
3438 {
3439 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3440 ? MODE_FLOAT : MODE_INT);
3441
3442 enum machine_mode reg_mode
3443 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3444
3445 if (reg_mode != BLKmode)
3446 {
3447 rtx mem = assign_stack_temp (reg_mode,
3448 GET_MODE_SIZE (mode), 0);
3449 rtx cmem = adjust_address (mem, mode, 0);
3450
3451 cfun->cannot_inline
3452 = N_("function using short complex types cannot be inline");
3453
3454 if (packed_dest_p)
3455 {
3456 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3457
3458 emit_move_insn_1 (cmem, y);
3459 return emit_move_insn_1 (sreg, mem);
3460 }
3461 else
3462 {
3463 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3464
3465 emit_move_insn_1 (mem, sreg);
3466 return emit_move_insn_1 (x, cmem);
3467 }
3468 }
3469 }
3470 }
3471
3472 realpart_x = gen_realpart (submode, x);
3473 realpart_y = gen_realpart (submode, y);
3474 imagpart_x = gen_imagpart (submode, x);
3475 imagpart_y = gen_imagpart (submode, y);
3476
3477 /* Show the output dies here. This is necessary for SUBREGs
3478 of pseudos since we cannot track their lifetimes correctly;
3479 hard regs shouldn't appear here except as return values.
3480 We never want to emit such a clobber after reload. */
3481 if (x != y
3482 && ! (reload_in_progress || reload_completed)
3483 && (GET_CODE (realpart_x) == SUBREG
3484 || GET_CODE (imagpart_x) == SUBREG))
3485 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3486
3487 emit_move_insn (realpart_x, realpart_y);
3488 emit_move_insn (imagpart_x, imagpart_y);
3489 }
3490
3491 return get_last_insn ();
3492 }
3493
3494 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3495 find a mode to do it in. If we have a movcc, use it. Otherwise,
3496 find the MODE_INT mode of the same width. */
3497 else if (GET_MODE_CLASS (mode) == MODE_CC
3498 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3499 {
3500 enum insn_code insn_code;
3501 enum machine_mode tmode = VOIDmode;
3502 rtx x1 = x, y1 = y;
3503
3504 if (mode != CCmode
3505 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3506 tmode = CCmode;
3507 else
3508 for (tmode = QImode; tmode != VOIDmode;
3509 tmode = GET_MODE_WIDER_MODE (tmode))
3510 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3511 break;
3512
3513 if (tmode == VOIDmode)
3514 abort ();
3515
3516 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3517 may call change_address which is not appropriate if we were
3518 called when a reload was in progress. We don't have to worry
3519 about changing the address since the size in bytes is supposed to
3520 be the same. Copy the MEM to change the mode and move any
3521 substitutions from the old MEM to the new one. */
3522
3523 if (reload_in_progress)
3524 {
3525 x = gen_lowpart_common (tmode, x1);
3526 if (x == 0 && GET_CODE (x1) == MEM)
3527 {
3528 x = adjust_address_nv (x1, tmode, 0);
3529 copy_replacements (x1, x);
3530 }
3531
3532 y = gen_lowpart_common (tmode, y1);
3533 if (y == 0 && GET_CODE (y1) == MEM)
3534 {
3535 y = adjust_address_nv (y1, tmode, 0);
3536 copy_replacements (y1, y);
3537 }
3538 }
3539 else
3540 {
3541 x = gen_lowpart (tmode, x);
3542 y = gen_lowpart (tmode, y);
3543 }
3544
3545 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3546 return emit_insn (GEN_FCN (insn_code) (x, y));
3547 }
3548
3549 /* This will handle any multi-word or full-word mode that lacks a move_insn
3550 pattern. However, you will get better code if you define such patterns,
3551 even if they must turn into multiple assembler instructions. */
3552 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3553 {
3554 rtx last_insn = 0;
3555 rtx seq, inner;
3556 int need_clobber;
3557 int i;
3558
3559 #ifdef PUSH_ROUNDING
3560
3561 /* If X is a push on the stack, do the push now and replace
3562 X with a reference to the stack pointer. */
3563 if (push_operand (x, GET_MODE (x)))
3564 {
3565 rtx temp;
3566 enum rtx_code code;
3567
3568 /* Do not use anti_adjust_stack, since we don't want to update
3569 stack_pointer_delta. */
3570 temp = expand_binop (Pmode,
3571 #ifdef STACK_GROWS_DOWNWARD
3572 sub_optab,
3573 #else
3574 add_optab,
3575 #endif
3576 stack_pointer_rtx,
3577 GEN_INT
3578 (PUSH_ROUNDING
3579 (GET_MODE_SIZE (GET_MODE (x)))),
3580 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3581
3582 if (temp != stack_pointer_rtx)
3583 emit_move_insn (stack_pointer_rtx, temp);
3584
3585 code = GET_CODE (XEXP (x, 0));
3586
3587 /* Just hope that small offsets off SP are OK. */
3588 if (code == POST_INC)
3589 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3590 GEN_INT (-((HOST_WIDE_INT)
3591 GET_MODE_SIZE (GET_MODE (x)))));
3592 else if (code == POST_DEC)
3593 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3594 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3595 else
3596 temp = stack_pointer_rtx;
3597
3598 x = change_address (x, VOIDmode, temp);
3599 }
3600 #endif
3601
3602 /* If we are in reload, see if either operand is a MEM whose address
3603 is scheduled for replacement. */
3604 if (reload_in_progress && GET_CODE (x) == MEM
3605 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3606 x = replace_equiv_address_nv (x, inner);
3607 if (reload_in_progress && GET_CODE (y) == MEM
3608 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3609 y = replace_equiv_address_nv (y, inner);
3610
3611 start_sequence ();
3612
3613 need_clobber = 0;
3614 for (i = 0;
3615 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3616 i++)
3617 {
3618 rtx xpart = operand_subword (x, i, 1, mode);
3619 rtx ypart = operand_subword (y, i, 1, mode);
3620
3621 /* If we can't get a part of Y, put Y into memory if it is a
3622 constant. Otherwise, force it into a register. If we still
3623 can't get a part of Y, abort. */
3624 if (ypart == 0 && CONSTANT_P (y))
3625 {
3626 y = force_const_mem (mode, y);
3627 ypart = operand_subword (y, i, 1, mode);
3628 }
3629 else if (ypart == 0)
3630 ypart = operand_subword_force (y, i, mode);
3631
3632 if (xpart == 0 || ypart == 0)
3633 abort ();
3634
3635 need_clobber |= (GET_CODE (xpart) == SUBREG);
3636
3637 last_insn = emit_move_insn (xpart, ypart);
3638 }
3639
3640 seq = get_insns ();
3641 end_sequence ();
3642
3643 /* Show the output dies here. This is necessary for SUBREGs
3644 of pseudos since we cannot track their lifetimes correctly;
3645 hard regs shouldn't appear here except as return values.
3646 We never want to emit such a clobber after reload. */
3647 if (x != y
3648 && ! (reload_in_progress || reload_completed)
3649 && need_clobber != 0)
3650 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3651
3652 emit_insn (seq);
3653
3654 return last_insn;
3655 }
3656 else
3657 abort ();
3658 }
3659
3660 /* If Y is representable exactly in a narrower mode, and the target can
3661 perform the extension directly from constant or memory, then emit the
3662 move as an extension. */
3663
3664 static rtx
3665 compress_float_constant (x, y)
3666 rtx x, y;
3667 {
3668 enum machine_mode dstmode = GET_MODE (x);
3669 enum machine_mode orig_srcmode = GET_MODE (y);
3670 enum machine_mode srcmode;
3671 REAL_VALUE_TYPE r;
3672
3673 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3674
3675 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3676 srcmode != orig_srcmode;
3677 srcmode = GET_MODE_WIDER_MODE (srcmode))
3678 {
3679 enum insn_code ic;
3680 rtx trunc_y, last_insn;
3681
3682 /* Skip if the target can't extend this way. */
3683 ic = can_extend_p (dstmode, srcmode, 0);
3684 if (ic == CODE_FOR_nothing)
3685 continue;
3686
3687 /* Skip if the narrowed value isn't exact. */
3688 if (! exact_real_truncate (srcmode, &r))
3689 continue;
3690
3691 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3692
3693 if (LEGITIMATE_CONSTANT_P (trunc_y))
3694 {
3695 /* Skip if the target needs extra instructions to perform
3696 the extension. */
3697 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3698 continue;
3699 }
3700 else if (float_extend_from_mem[dstmode][srcmode])
3701 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3702 else
3703 continue;
3704
3705 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3706 last_insn = get_last_insn ();
3707
3708 if (GET_CODE (x) == REG)
3709 set_unique_reg_note (last_insn, REG_EQUAL, y);
3710
3711 return last_insn;
3712 }
3713
3714 return NULL_RTX;
3715 }
3716 \f
3717 /* Pushing data onto the stack. */
3718
3719 /* Push a block of length SIZE (perhaps variable)
3720 and return an rtx to address the beginning of the block.
3721 Note that it is not possible for the value returned to be a QUEUED.
3722 The value may be virtual_outgoing_args_rtx.
3723
3724 EXTRA is the number of bytes of padding to push in addition to SIZE.
3725 BELOW nonzero means this padding comes at low addresses;
3726 otherwise, the padding comes at high addresses. */
3727
3728 rtx
3729 push_block (size, extra, below)
3730 rtx size;
3731 int extra, below;
3732 {
3733 rtx temp;
3734
3735 size = convert_modes (Pmode, ptr_mode, size, 1);
3736 if (CONSTANT_P (size))
3737 anti_adjust_stack (plus_constant (size, extra));
3738 else if (GET_CODE (size) == REG && extra == 0)
3739 anti_adjust_stack (size);
3740 else
3741 {
3742 temp = copy_to_mode_reg (Pmode, size);
3743 if (extra != 0)
3744 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3745 temp, 0, OPTAB_LIB_WIDEN);
3746 anti_adjust_stack (temp);
3747 }
3748
3749 #ifndef STACK_GROWS_DOWNWARD
3750 if (0)
3751 #else
3752 if (1)
3753 #endif
3754 {
3755 temp = virtual_outgoing_args_rtx;
3756 if (extra != 0 && below)
3757 temp = plus_constant (temp, extra);
3758 }
3759 else
3760 {
3761 if (GET_CODE (size) == CONST_INT)
3762 temp = plus_constant (virtual_outgoing_args_rtx,
3763 -INTVAL (size) - (below ? 0 : extra));
3764 else if (extra != 0 && !below)
3765 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3766 negate_rtx (Pmode, plus_constant (size, extra)));
3767 else
3768 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3769 negate_rtx (Pmode, size));
3770 }
3771
3772 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3773 }
3774
3775 #ifdef PUSH_ROUNDING
3776
3777 /* Emit single push insn. */
3778
3779 static void
3780 emit_single_push_insn (mode, x, type)
3781 rtx x;
3782 enum machine_mode mode;
3783 tree type;
3784 {
3785 rtx dest_addr;
3786 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3787 rtx dest;
3788 enum insn_code icode;
3789 insn_operand_predicate_fn pred;
3790
3791 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3792 /* If there is push pattern, use it. Otherwise try old way of throwing
3793 MEM representing push operation to move expander. */
3794 icode = push_optab->handlers[(int) mode].insn_code;
3795 if (icode != CODE_FOR_nothing)
3796 {
3797 if (((pred = insn_data[(int) icode].operand[0].predicate)
3798 && !((*pred) (x, mode))))
3799 x = force_reg (mode, x);
3800 emit_insn (GEN_FCN (icode) (x));
3801 return;
3802 }
3803 if (GET_MODE_SIZE (mode) == rounded_size)
3804 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3805 /* If we are to pad downward, adjust the stack pointer first and
3806 then store X into the stack location using an offset. This is
3807 because emit_move_insn does not know how to pad; it does not have
3808 access to type. */
3809 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3810 {
3811 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3812 HOST_WIDE_INT offset;
3813
3814 emit_move_insn (stack_pointer_rtx,
3815 expand_binop (Pmode,
3816 #ifdef STACK_GROWS_DOWNWARD
3817 sub_optab,
3818 #else
3819 add_optab,
3820 #endif
3821 stack_pointer_rtx,
3822 GEN_INT (rounded_size),
3823 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3824
3825 offset = (HOST_WIDE_INT) padding_size;
3826 #ifdef STACK_GROWS_DOWNWARD
3827 if (STACK_PUSH_CODE == POST_DEC)
3828 /* We have already decremented the stack pointer, so get the
3829 previous value. */
3830 offset += (HOST_WIDE_INT) rounded_size;
3831 #else
3832 if (STACK_PUSH_CODE == POST_INC)
3833 /* We have already incremented the stack pointer, so get the
3834 previous value. */
3835 offset -= (HOST_WIDE_INT) rounded_size;
3836 #endif
3837 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3838 }
3839 else
3840 {
3841 #ifdef STACK_GROWS_DOWNWARD
3842 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3843 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3844 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3845 #else
3846 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3847 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3848 GEN_INT (rounded_size));
3849 #endif
3850 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3851 }
3852
3853 dest = gen_rtx_MEM (mode, dest_addr);
3854
3855 if (type != 0)
3856 {
3857 set_mem_attributes (dest, type, 1);
3858
3859 if (flag_optimize_sibling_calls)
3860 /* Function incoming arguments may overlap with sibling call
3861 outgoing arguments and we cannot allow reordering of reads
3862 from function arguments with stores to outgoing arguments
3863 of sibling calls. */
3864 set_mem_alias_set (dest, 0);
3865 }
3866 emit_move_insn (dest, x);
3867 }
3868 #endif
3869
3870 /* Generate code to push X onto the stack, assuming it has mode MODE and
3871 type TYPE.
3872 MODE is redundant except when X is a CONST_INT (since they don't
3873 carry mode info).
3874 SIZE is an rtx for the size of data to be copied (in bytes),
3875 needed only if X is BLKmode.
3876
3877 ALIGN (in bits) is maximum alignment we can assume.
3878
3879 If PARTIAL and REG are both nonzero, then copy that many of the first
3880 words of X into registers starting with REG, and push the rest of X.
3881 The amount of space pushed is decreased by PARTIAL words,
3882 rounded *down* to a multiple of PARM_BOUNDARY.
3883 REG must be a hard register in this case.
3884 If REG is zero but PARTIAL is not, take any all others actions for an
3885 argument partially in registers, but do not actually load any
3886 registers.
3887
3888 EXTRA is the amount in bytes of extra space to leave next to this arg.
3889 This is ignored if an argument block has already been allocated.
3890
3891 On a machine that lacks real push insns, ARGS_ADDR is the address of
3892 the bottom of the argument block for this call. We use indexing off there
3893 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3894 argument block has not been preallocated.
3895
3896 ARGS_SO_FAR is the size of args previously pushed for this call.
3897
3898 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3899 for arguments passed in registers. If nonzero, it will be the number
3900 of bytes required. */
3901
3902 void
3903 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3904 args_addr, args_so_far, reg_parm_stack_space,
3905 alignment_pad)
3906 rtx x;
3907 enum machine_mode mode;
3908 tree type;
3909 rtx size;
3910 unsigned int align;
3911 int partial;
3912 rtx reg;
3913 int extra;
3914 rtx args_addr;
3915 rtx args_so_far;
3916 int reg_parm_stack_space;
3917 rtx alignment_pad;
3918 {
3919 rtx xinner;
3920 enum direction stack_direction
3921 #ifdef STACK_GROWS_DOWNWARD
3922 = downward;
3923 #else
3924 = upward;
3925 #endif
3926
3927 /* Decide where to pad the argument: `downward' for below,
3928 `upward' for above, or `none' for don't pad it.
3929 Default is below for small data on big-endian machines; else above. */
3930 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3931
3932 /* Invert direction if stack is post-decrement.
3933 FIXME: why? */
3934 if (STACK_PUSH_CODE == POST_DEC)
3935 if (where_pad != none)
3936 where_pad = (where_pad == downward ? upward : downward);
3937
3938 xinner = x = protect_from_queue (x, 0);
3939
3940 if (mode == BLKmode)
3941 {
3942 /* Copy a block into the stack, entirely or partially. */
3943
3944 rtx temp;
3945 int used = partial * UNITS_PER_WORD;
3946 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3947 int skip;
3948
3949 if (size == 0)
3950 abort ();
3951
3952 used -= offset;
3953
3954 /* USED is now the # of bytes we need not copy to the stack
3955 because registers will take care of them. */
3956
3957 if (partial != 0)
3958 xinner = adjust_address (xinner, BLKmode, used);
3959
3960 /* If the partial register-part of the arg counts in its stack size,
3961 skip the part of stack space corresponding to the registers.
3962 Otherwise, start copying to the beginning of the stack space,
3963 by setting SKIP to 0. */
3964 skip = (reg_parm_stack_space == 0) ? 0 : used;
3965
3966 #ifdef PUSH_ROUNDING
3967 /* Do it with several push insns if that doesn't take lots of insns
3968 and if there is no difficulty with push insns that skip bytes
3969 on the stack for alignment purposes. */
3970 if (args_addr == 0
3971 && PUSH_ARGS
3972 && GET_CODE (size) == CONST_INT
3973 && skip == 0
3974 && MEM_ALIGN (xinner) >= align
3975 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3976 /* Here we avoid the case of a structure whose weak alignment
3977 forces many pushes of a small amount of data,
3978 and such small pushes do rounding that causes trouble. */
3979 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3980 || align >= BIGGEST_ALIGNMENT
3981 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3982 == (align / BITS_PER_UNIT)))
3983 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3984 {
3985 /* Push padding now if padding above and stack grows down,
3986 or if padding below and stack grows up.
3987 But if space already allocated, this has already been done. */
3988 if (extra && args_addr == 0
3989 && where_pad != none && where_pad != stack_direction)
3990 anti_adjust_stack (GEN_INT (extra));
3991
3992 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3993 }
3994 else
3995 #endif /* PUSH_ROUNDING */
3996 {
3997 rtx target;
3998
3999 /* Otherwise make space on the stack and copy the data
4000 to the address of that space. */
4001
4002 /* Deduct words put into registers from the size we must copy. */
4003 if (partial != 0)
4004 {
4005 if (GET_CODE (size) == CONST_INT)
4006 size = GEN_INT (INTVAL (size) - used);
4007 else
4008 size = expand_binop (GET_MODE (size), sub_optab, size,
4009 GEN_INT (used), NULL_RTX, 0,
4010 OPTAB_LIB_WIDEN);
4011 }
4012
4013 /* Get the address of the stack space.
4014 In this case, we do not deal with EXTRA separately.
4015 A single stack adjust will do. */
4016 if (! args_addr)
4017 {
4018 temp = push_block (size, extra, where_pad == downward);
4019 extra = 0;
4020 }
4021 else if (GET_CODE (args_so_far) == CONST_INT)
4022 temp = memory_address (BLKmode,
4023 plus_constant (args_addr,
4024 skip + INTVAL (args_so_far)));
4025 else
4026 temp = memory_address (BLKmode,
4027 plus_constant (gen_rtx_PLUS (Pmode,
4028 args_addr,
4029 args_so_far),
4030 skip));
4031
4032 if (!ACCUMULATE_OUTGOING_ARGS)
4033 {
4034 /* If the source is referenced relative to the stack pointer,
4035 copy it to another register to stabilize it. We do not need
4036 to do this if we know that we won't be changing sp. */
4037
4038 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4039 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4040 temp = copy_to_reg (temp);
4041 }
4042
4043 target = gen_rtx_MEM (BLKmode, temp);
4044
4045 if (type != 0)
4046 {
4047 set_mem_attributes (target, type, 1);
4048 /* Function incoming arguments may overlap with sibling call
4049 outgoing arguments and we cannot allow reordering of reads
4050 from function arguments with stores to outgoing arguments
4051 of sibling calls. */
4052 set_mem_alias_set (target, 0);
4053 }
4054
4055 /* ALIGN may well be better aligned than TYPE, e.g. due to
4056 PARM_BOUNDARY. Assume the caller isn't lying. */
4057 set_mem_align (target, align);
4058
4059 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4060 }
4061 }
4062 else if (partial > 0)
4063 {
4064 /* Scalar partly in registers. */
4065
4066 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4067 int i;
4068 int not_stack;
4069 /* # words of start of argument
4070 that we must make space for but need not store. */
4071 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
4072 int args_offset = INTVAL (args_so_far);
4073 int skip;
4074
4075 /* Push padding now if padding above and stack grows down,
4076 or if padding below and stack grows up.
4077 But if space already allocated, this has already been done. */
4078 if (extra && args_addr == 0
4079 && where_pad != none && where_pad != stack_direction)
4080 anti_adjust_stack (GEN_INT (extra));
4081
4082 /* If we make space by pushing it, we might as well push
4083 the real data. Otherwise, we can leave OFFSET nonzero
4084 and leave the space uninitialized. */
4085 if (args_addr == 0)
4086 offset = 0;
4087
4088 /* Now NOT_STACK gets the number of words that we don't need to
4089 allocate on the stack. */
4090 not_stack = partial - offset;
4091
4092 /* If the partial register-part of the arg counts in its stack size,
4093 skip the part of stack space corresponding to the registers.
4094 Otherwise, start copying to the beginning of the stack space,
4095 by setting SKIP to 0. */
4096 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4097
4098 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
4099 x = validize_mem (force_const_mem (mode, x));
4100
4101 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4102 SUBREGs of such registers are not allowed. */
4103 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
4104 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4105 x = copy_to_reg (x);
4106
4107 /* Loop over all the words allocated on the stack for this arg. */
4108 /* We can do it by words, because any scalar bigger than a word
4109 has a size a multiple of a word. */
4110 #ifndef PUSH_ARGS_REVERSED
4111 for (i = not_stack; i < size; i++)
4112 #else
4113 for (i = size - 1; i >= not_stack; i--)
4114 #endif
4115 if (i >= not_stack + offset)
4116 emit_push_insn (operand_subword_force (x, i, mode),
4117 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4118 0, args_addr,
4119 GEN_INT (args_offset + ((i - not_stack + skip)
4120 * UNITS_PER_WORD)),
4121 reg_parm_stack_space, alignment_pad);
4122 }
4123 else
4124 {
4125 rtx addr;
4126 rtx dest;
4127
4128 /* Push padding now if padding above and stack grows down,
4129 or if padding below and stack grows up.
4130 But if space already allocated, this has already been done. */
4131 if (extra && args_addr == 0
4132 && where_pad != none && where_pad != stack_direction)
4133 anti_adjust_stack (GEN_INT (extra));
4134
4135 #ifdef PUSH_ROUNDING
4136 if (args_addr == 0 && PUSH_ARGS)
4137 emit_single_push_insn (mode, x, type);
4138 else
4139 #endif
4140 {
4141 if (GET_CODE (args_so_far) == CONST_INT)
4142 addr
4143 = memory_address (mode,
4144 plus_constant (args_addr,
4145 INTVAL (args_so_far)));
4146 else
4147 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4148 args_so_far));
4149 dest = gen_rtx_MEM (mode, addr);
4150 if (type != 0)
4151 {
4152 set_mem_attributes (dest, type, 1);
4153 /* Function incoming arguments may overlap with sibling call
4154 outgoing arguments and we cannot allow reordering of reads
4155 from function arguments with stores to outgoing arguments
4156 of sibling calls. */
4157 set_mem_alias_set (dest, 0);
4158 }
4159
4160 emit_move_insn (dest, x);
4161 }
4162 }
4163
4164 /* If part should go in registers, copy that part
4165 into the appropriate registers. Do this now, at the end,
4166 since mem-to-mem copies above may do function calls. */
4167 if (partial > 0 && reg != 0)
4168 {
4169 /* Handle calls that pass values in multiple non-contiguous locations.
4170 The Irix 6 ABI has examples of this. */
4171 if (GET_CODE (reg) == PARALLEL)
4172 emit_group_load (reg, x, -1); /* ??? size? */
4173 else
4174 move_block_to_reg (REGNO (reg), x, partial, mode);
4175 }
4176
4177 if (extra && args_addr == 0 && where_pad == stack_direction)
4178 anti_adjust_stack (GEN_INT (extra));
4179
4180 if (alignment_pad && args_addr == 0)
4181 anti_adjust_stack (alignment_pad);
4182 }
4183 \f
4184 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4185 operations. */
4186
4187 static rtx
4188 get_subtarget (x)
4189 rtx x;
4190 {
4191 return ((x == 0
4192 /* Only registers can be subtargets. */
4193 || GET_CODE (x) != REG
4194 /* If the register is readonly, it can't be set more than once. */
4195 || RTX_UNCHANGING_P (x)
4196 /* Don't use hard regs to avoid extending their life. */
4197 || REGNO (x) < FIRST_PSEUDO_REGISTER
4198 /* Avoid subtargets inside loops,
4199 since they hide some invariant expressions. */
4200 || preserve_subexpressions_p ())
4201 ? 0 : x);
4202 }
4203
4204 /* Expand an assignment that stores the value of FROM into TO.
4205 If WANT_VALUE is nonzero, return an rtx for the value of TO.
4206 (This may contain a QUEUED rtx;
4207 if the value is constant, this rtx is a constant.)
4208 Otherwise, the returned value is NULL_RTX.
4209
4210 SUGGEST_REG is no longer actually used.
4211 It used to mean, copy the value through a register
4212 and return that register, if that is possible.
4213 We now use WANT_VALUE to decide whether to do this. */
4214
4215 rtx
4216 expand_assignment (to, from, want_value, suggest_reg)
4217 tree to, from;
4218 int want_value;
4219 int suggest_reg ATTRIBUTE_UNUSED;
4220 {
4221 rtx to_rtx = 0;
4222 rtx result;
4223
4224 /* Don't crash if the lhs of the assignment was erroneous. */
4225
4226 if (TREE_CODE (to) == ERROR_MARK)
4227 {
4228 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4229 return want_value ? result : NULL_RTX;
4230 }
4231
4232 /* Assignment of a structure component needs special treatment
4233 if the structure component's rtx is not simply a MEM.
4234 Assignment of an array element at a constant index, and assignment of
4235 an array element in an unaligned packed structure field, has the same
4236 problem. */
4237
4238 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
4239 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
4240 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4241 {
4242 enum machine_mode mode1;
4243 HOST_WIDE_INT bitsize, bitpos;
4244 rtx orig_to_rtx;
4245 tree offset;
4246 int unsignedp;
4247 int volatilep = 0;
4248 tree tem;
4249
4250 push_temp_slots ();
4251 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4252 &unsignedp, &volatilep);
4253
4254 /* If we are going to use store_bit_field and extract_bit_field,
4255 make sure to_rtx will be safe for multiple use. */
4256
4257 if (mode1 == VOIDmode && want_value)
4258 tem = stabilize_reference (tem);
4259
4260 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4261
4262 if (offset != 0)
4263 {
4264 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4265
4266 if (GET_CODE (to_rtx) != MEM)
4267 abort ();
4268
4269 #ifdef POINTERS_EXTEND_UNSIGNED
4270 if (GET_MODE (offset_rtx) != Pmode)
4271 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4272 #else
4273 if (GET_MODE (offset_rtx) != ptr_mode)
4274 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4275 #endif
4276
4277 /* A constant address in TO_RTX can have VOIDmode, we must not try
4278 to call force_reg for that case. Avoid that case. */
4279 if (GET_CODE (to_rtx) == MEM
4280 && GET_MODE (to_rtx) == BLKmode
4281 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4282 && bitsize > 0
4283 && (bitpos % bitsize) == 0
4284 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4285 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4286 {
4287 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4288 bitpos = 0;
4289 }
4290
4291 to_rtx = offset_address (to_rtx, offset_rtx,
4292 highest_pow2_factor_for_type (TREE_TYPE (to),
4293 offset));
4294 }
4295
4296 if (GET_CODE (to_rtx) == MEM)
4297 {
4298 /* If the field is at offset zero, we could have been given the
4299 DECL_RTX of the parent struct. Don't munge it. */
4300 to_rtx = shallow_copy_rtx (to_rtx);
4301
4302 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4303 }
4304
4305 /* Deal with volatile and readonly fields. The former is only done
4306 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4307 if (volatilep && GET_CODE (to_rtx) == MEM)
4308 {
4309 if (to_rtx == orig_to_rtx)
4310 to_rtx = copy_rtx (to_rtx);
4311 MEM_VOLATILE_P (to_rtx) = 1;
4312 }
4313
4314 if (TREE_CODE (to) == COMPONENT_REF
4315 && TREE_READONLY (TREE_OPERAND (to, 1)))
4316 {
4317 if (to_rtx == orig_to_rtx)
4318 to_rtx = copy_rtx (to_rtx);
4319 RTX_UNCHANGING_P (to_rtx) = 1;
4320 }
4321
4322 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4323 {
4324 if (to_rtx == orig_to_rtx)
4325 to_rtx = copy_rtx (to_rtx);
4326 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4327 }
4328
4329 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4330 (want_value
4331 /* Spurious cast for HPUX compiler. */
4332 ? ((enum machine_mode)
4333 TYPE_MODE (TREE_TYPE (to)))
4334 : VOIDmode),
4335 unsignedp, TREE_TYPE (tem), get_alias_set (to));
4336
4337 preserve_temp_slots (result);
4338 free_temp_slots ();
4339 pop_temp_slots ();
4340
4341 /* If the value is meaningful, convert RESULT to the proper mode.
4342 Otherwise, return nothing. */
4343 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4344 TYPE_MODE (TREE_TYPE (from)),
4345 result,
4346 TREE_UNSIGNED (TREE_TYPE (to)))
4347 : NULL_RTX);
4348 }
4349
4350 /* If the rhs is a function call and its value is not an aggregate,
4351 call the function before we start to compute the lhs.
4352 This is needed for correct code for cases such as
4353 val = setjmp (buf) on machines where reference to val
4354 requires loading up part of an address in a separate insn.
4355
4356 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4357 since it might be a promoted variable where the zero- or sign- extension
4358 needs to be done. Handling this in the normal way is safe because no
4359 computation is done before the call. */
4360 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
4361 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4362 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4363 && GET_CODE (DECL_RTL (to)) == REG))
4364 {
4365 rtx value;
4366
4367 push_temp_slots ();
4368 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4369 if (to_rtx == 0)
4370 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4371
4372 /* Handle calls that return values in multiple non-contiguous locations.
4373 The Irix 6 ABI has examples of this. */
4374 if (GET_CODE (to_rtx) == PARALLEL)
4375 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
4376 else if (GET_MODE (to_rtx) == BLKmode)
4377 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4378 else
4379 {
4380 #ifdef POINTERS_EXTEND_UNSIGNED
4381 if (POINTER_TYPE_P (TREE_TYPE (to))
4382 && GET_MODE (to_rtx) != GET_MODE (value))
4383 value = convert_memory_address (GET_MODE (to_rtx), value);
4384 #endif
4385 emit_move_insn (to_rtx, value);
4386 }
4387 preserve_temp_slots (to_rtx);
4388 free_temp_slots ();
4389 pop_temp_slots ();
4390 return want_value ? to_rtx : NULL_RTX;
4391 }
4392
4393 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4394 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4395
4396 if (to_rtx == 0)
4397 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4398
4399 /* Don't move directly into a return register. */
4400 if (TREE_CODE (to) == RESULT_DECL
4401 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4402 {
4403 rtx temp;
4404
4405 push_temp_slots ();
4406 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4407
4408 if (GET_CODE (to_rtx) == PARALLEL)
4409 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
4410 else
4411 emit_move_insn (to_rtx, temp);
4412
4413 preserve_temp_slots (to_rtx);
4414 free_temp_slots ();
4415 pop_temp_slots ();
4416 return want_value ? to_rtx : NULL_RTX;
4417 }
4418
4419 /* In case we are returning the contents of an object which overlaps
4420 the place the value is being stored, use a safe function when copying
4421 a value through a pointer into a structure value return block. */
4422 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4423 && current_function_returns_struct
4424 && !current_function_returns_pcc_struct)
4425 {
4426 rtx from_rtx, size;
4427
4428 push_temp_slots ();
4429 size = expr_size (from);
4430 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4431
4432 if (TARGET_MEM_FUNCTIONS)
4433 emit_library_call (memmove_libfunc, LCT_NORMAL,
4434 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4435 XEXP (from_rtx, 0), Pmode,
4436 convert_to_mode (TYPE_MODE (sizetype),
4437 size, TREE_UNSIGNED (sizetype)),
4438 TYPE_MODE (sizetype));
4439 else
4440 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4441 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4442 XEXP (to_rtx, 0), Pmode,
4443 convert_to_mode (TYPE_MODE (integer_type_node),
4444 size,
4445 TREE_UNSIGNED (integer_type_node)),
4446 TYPE_MODE (integer_type_node));
4447
4448 preserve_temp_slots (to_rtx);
4449 free_temp_slots ();
4450 pop_temp_slots ();
4451 return want_value ? to_rtx : NULL_RTX;
4452 }
4453
4454 /* Compute FROM and store the value in the rtx we got. */
4455
4456 push_temp_slots ();
4457 result = store_expr (from, to_rtx, want_value);
4458 preserve_temp_slots (result);
4459 free_temp_slots ();
4460 pop_temp_slots ();
4461 return want_value ? result : NULL_RTX;
4462 }
4463
4464 /* Generate code for computing expression EXP,
4465 and storing the value into TARGET.
4466 TARGET may contain a QUEUED rtx.
4467
4468 If WANT_VALUE & 1 is nonzero, return a copy of the value
4469 not in TARGET, so that we can be sure to use the proper
4470 value in a containing expression even if TARGET has something
4471 else stored in it. If possible, we copy the value through a pseudo
4472 and return that pseudo. Or, if the value is constant, we try to
4473 return the constant. In some cases, we return a pseudo
4474 copied *from* TARGET.
4475
4476 If the mode is BLKmode then we may return TARGET itself.
4477 It turns out that in BLKmode it doesn't cause a problem.
4478 because C has no operators that could combine two different
4479 assignments into the same BLKmode object with different values
4480 with no sequence point. Will other languages need this to
4481 be more thorough?
4482
4483 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4484 to catch quickly any cases where the caller uses the value
4485 and fails to set WANT_VALUE.
4486
4487 If WANT_VALUE & 2 is set, this is a store into a call param on the
4488 stack, and block moves may need to be treated specially. */
4489
4490 rtx
4491 store_expr (exp, target, want_value)
4492 tree exp;
4493 rtx target;
4494 int want_value;
4495 {
4496 rtx temp;
4497 int dont_return_target = 0;
4498 int dont_store_target = 0;
4499
4500 if (VOID_TYPE_P (TREE_TYPE (exp)))
4501 {
4502 /* C++ can generate ?: expressions with a throw expression in one
4503 branch and an rvalue in the other. Here, we resolve attempts to
4504 store the throw expression's nonexistant result. */
4505 if (want_value)
4506 abort ();
4507 expand_expr (exp, const0_rtx, VOIDmode, 0);
4508 return NULL_RTX;
4509 }
4510 if (TREE_CODE (exp) == COMPOUND_EXPR)
4511 {
4512 /* Perform first part of compound expression, then assign from second
4513 part. */
4514 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4515 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4516 emit_queue ();
4517 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4518 }
4519 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4520 {
4521 /* For conditional expression, get safe form of the target. Then
4522 test the condition, doing the appropriate assignment on either
4523 side. This avoids the creation of unnecessary temporaries.
4524 For non-BLKmode, it is more efficient not to do this. */
4525
4526 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4527
4528 emit_queue ();
4529 target = protect_from_queue (target, 1);
4530
4531 do_pending_stack_adjust ();
4532 NO_DEFER_POP;
4533 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4534 start_cleanup_deferral ();
4535 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4536 end_cleanup_deferral ();
4537 emit_queue ();
4538 emit_jump_insn (gen_jump (lab2));
4539 emit_barrier ();
4540 emit_label (lab1);
4541 start_cleanup_deferral ();
4542 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4543 end_cleanup_deferral ();
4544 emit_queue ();
4545 emit_label (lab2);
4546 OK_DEFER_POP;
4547
4548 return want_value & 1 ? target : NULL_RTX;
4549 }
4550 else if (queued_subexp_p (target))
4551 /* If target contains a postincrement, let's not risk
4552 using it as the place to generate the rhs. */
4553 {
4554 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4555 {
4556 /* Expand EXP into a new pseudo. */
4557 temp = gen_reg_rtx (GET_MODE (target));
4558 temp = expand_expr (exp, temp, GET_MODE (target),
4559 (want_value & 2
4560 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4561 }
4562 else
4563 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4564 (want_value & 2
4565 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4566
4567 /* If target is volatile, ANSI requires accessing the value
4568 *from* the target, if it is accessed. So make that happen.
4569 In no case return the target itself. */
4570 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4571 dont_return_target = 1;
4572 }
4573 else if ((want_value & 1) != 0
4574 && GET_CODE (target) == MEM
4575 && ! MEM_VOLATILE_P (target)
4576 && GET_MODE (target) != BLKmode)
4577 /* If target is in memory and caller wants value in a register instead,
4578 arrange that. Pass TARGET as target for expand_expr so that,
4579 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4580 We know expand_expr will not use the target in that case.
4581 Don't do this if TARGET is volatile because we are supposed
4582 to write it and then read it. */
4583 {
4584 temp = expand_expr (exp, target, GET_MODE (target),
4585 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4586 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4587 {
4588 /* If TEMP is already in the desired TARGET, only copy it from
4589 memory and don't store it there again. */
4590 if (temp == target
4591 || (rtx_equal_p (temp, target)
4592 && ! side_effects_p (temp) && ! side_effects_p (target)))
4593 dont_store_target = 1;
4594 temp = copy_to_reg (temp);
4595 }
4596 dont_return_target = 1;
4597 }
4598 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4599 /* If this is a scalar in a register that is stored in a wider mode
4600 than the declared mode, compute the result into its declared mode
4601 and then convert to the wider mode. Our value is the computed
4602 expression. */
4603 {
4604 rtx inner_target = 0;
4605
4606 /* If we don't want a value, we can do the conversion inside EXP,
4607 which will often result in some optimizations. Do the conversion
4608 in two steps: first change the signedness, if needed, then
4609 the extend. But don't do this if the type of EXP is a subtype
4610 of something else since then the conversion might involve
4611 more than just converting modes. */
4612 if ((want_value & 1) == 0
4613 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4614 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4615 {
4616 if (TREE_UNSIGNED (TREE_TYPE (exp))
4617 != SUBREG_PROMOTED_UNSIGNED_P (target))
4618 exp = convert
4619 ((*lang_hooks.types.signed_or_unsigned_type)
4620 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4621
4622 exp = convert ((*lang_hooks.types.type_for_mode)
4623 (GET_MODE (SUBREG_REG (target)),
4624 SUBREG_PROMOTED_UNSIGNED_P (target)),
4625 exp);
4626
4627 inner_target = SUBREG_REG (target);
4628 }
4629
4630 temp = expand_expr (exp, inner_target, VOIDmode,
4631 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4632
4633 /* If TEMP is a MEM and we want a result value, make the access
4634 now so it gets done only once. Strictly speaking, this is
4635 only necessary if the MEM is volatile, or if the address
4636 overlaps TARGET. But not performing the load twice also
4637 reduces the amount of rtl we generate and then have to CSE. */
4638 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4639 temp = copy_to_reg (temp);
4640
4641 /* If TEMP is a VOIDmode constant, use convert_modes to make
4642 sure that we properly convert it. */
4643 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4644 {
4645 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4646 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4647 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4648 GET_MODE (target), temp,
4649 SUBREG_PROMOTED_UNSIGNED_P (target));
4650 }
4651
4652 convert_move (SUBREG_REG (target), temp,
4653 SUBREG_PROMOTED_UNSIGNED_P (target));
4654
4655 /* If we promoted a constant, change the mode back down to match
4656 target. Otherwise, the caller might get confused by a result whose
4657 mode is larger than expected. */
4658
4659 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4660 {
4661 if (GET_MODE (temp) != VOIDmode)
4662 {
4663 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4664 SUBREG_PROMOTED_VAR_P (temp) = 1;
4665 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4666 SUBREG_PROMOTED_UNSIGNED_P (target));
4667 }
4668 else
4669 temp = convert_modes (GET_MODE (target),
4670 GET_MODE (SUBREG_REG (target)),
4671 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4672 }
4673
4674 return want_value & 1 ? temp : NULL_RTX;
4675 }
4676 else
4677 {
4678 temp = expand_expr (exp, target, GET_MODE (target),
4679 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4680 /* Return TARGET if it's a specified hardware register.
4681 If TARGET is a volatile mem ref, either return TARGET
4682 or return a reg copied *from* TARGET; ANSI requires this.
4683
4684 Otherwise, if TEMP is not TARGET, return TEMP
4685 if it is constant (for efficiency),
4686 or if we really want the correct value. */
4687 if (!(target && GET_CODE (target) == REG
4688 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4689 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4690 && ! rtx_equal_p (temp, target)
4691 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4692 dont_return_target = 1;
4693 }
4694
4695 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4696 the same as that of TARGET, adjust the constant. This is needed, for
4697 example, in case it is a CONST_DOUBLE and we want only a word-sized
4698 value. */
4699 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4700 && TREE_CODE (exp) != ERROR_MARK
4701 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4702 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4703 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4704
4705 /* If value was not generated in the target, store it there.
4706 Convert the value to TARGET's type first if necessary.
4707 If TEMP and TARGET compare equal according to rtx_equal_p, but
4708 one or both of them are volatile memory refs, we have to distinguish
4709 two cases:
4710 - expand_expr has used TARGET. In this case, we must not generate
4711 another copy. This can be detected by TARGET being equal according
4712 to == .
4713 - expand_expr has not used TARGET - that means that the source just
4714 happens to have the same RTX form. Since temp will have been created
4715 by expand_expr, it will compare unequal according to == .
4716 We must generate a copy in this case, to reach the correct number
4717 of volatile memory references. */
4718
4719 if ((! rtx_equal_p (temp, target)
4720 || (temp != target && (side_effects_p (temp)
4721 || side_effects_p (target))))
4722 && TREE_CODE (exp) != ERROR_MARK
4723 && ! dont_store_target
4724 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4725 but TARGET is not valid memory reference, TEMP will differ
4726 from TARGET although it is really the same location. */
4727 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4728 || target != DECL_RTL_IF_SET (exp))
4729 /* If there's nothing to copy, don't bother. Don't call expr_size
4730 unless necessary, because some front-ends (C++) expr_size-hook
4731 aborts on objects that are not supposed to be bit-copied or
4732 bit-initialized. */
4733 && expr_size (exp) != const0_rtx)
4734 {
4735 target = protect_from_queue (target, 1);
4736 if (GET_MODE (temp) != GET_MODE (target)
4737 && GET_MODE (temp) != VOIDmode)
4738 {
4739 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4740 if (dont_return_target)
4741 {
4742 /* In this case, we will return TEMP,
4743 so make sure it has the proper mode.
4744 But don't forget to store the value into TARGET. */
4745 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4746 emit_move_insn (target, temp);
4747 }
4748 else
4749 convert_move (target, temp, unsignedp);
4750 }
4751
4752 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4753 {
4754 /* Handle copying a string constant into an array. The string
4755 constant may be shorter than the array. So copy just the string's
4756 actual length, and clear the rest. First get the size of the data
4757 type of the string, which is actually the size of the target. */
4758 rtx size = expr_size (exp);
4759
4760 if (GET_CODE (size) == CONST_INT
4761 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4762 emit_block_move (target, temp, size,
4763 (want_value & 2
4764 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4765 else
4766 {
4767 /* Compute the size of the data to copy from the string. */
4768 tree copy_size
4769 = size_binop (MIN_EXPR,
4770 make_tree (sizetype, size),
4771 size_int (TREE_STRING_LENGTH (exp)));
4772 rtx copy_size_rtx
4773 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4774 (want_value & 2
4775 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4776 rtx label = 0;
4777
4778 /* Copy that much. */
4779 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4780 TREE_UNSIGNED (sizetype));
4781 emit_block_move (target, temp, copy_size_rtx,
4782 (want_value & 2
4783 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4784
4785 /* Figure out how much is left in TARGET that we have to clear.
4786 Do all calculations in ptr_mode. */
4787 if (GET_CODE (copy_size_rtx) == CONST_INT)
4788 {
4789 size = plus_constant (size, -INTVAL (copy_size_rtx));
4790 target = adjust_address (target, BLKmode,
4791 INTVAL (copy_size_rtx));
4792 }
4793 else
4794 {
4795 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4796 copy_size_rtx, NULL_RTX, 0,
4797 OPTAB_LIB_WIDEN);
4798
4799 #ifdef POINTERS_EXTEND_UNSIGNED
4800 if (GET_MODE (copy_size_rtx) != Pmode)
4801 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4802 TREE_UNSIGNED (sizetype));
4803 #endif
4804
4805 target = offset_address (target, copy_size_rtx,
4806 highest_pow2_factor (copy_size));
4807 label = gen_label_rtx ();
4808 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4809 GET_MODE (size), 0, label);
4810 }
4811
4812 if (size != const0_rtx)
4813 clear_storage (target, size);
4814
4815 if (label)
4816 emit_label (label);
4817 }
4818 }
4819 /* Handle calls that return values in multiple non-contiguous locations.
4820 The Irix 6 ABI has examples of this. */
4821 else if (GET_CODE (target) == PARALLEL)
4822 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4823 else if (GET_MODE (temp) == BLKmode)
4824 emit_block_move (target, temp, expr_size (exp),
4825 (want_value & 2
4826 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4827 else
4828 emit_move_insn (target, temp);
4829 }
4830
4831 /* If we don't want a value, return NULL_RTX. */
4832 if ((want_value & 1) == 0)
4833 return NULL_RTX;
4834
4835 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4836 ??? The latter test doesn't seem to make sense. */
4837 else if (dont_return_target && GET_CODE (temp) != MEM)
4838 return temp;
4839
4840 /* Return TARGET itself if it is a hard register. */
4841 else if ((want_value & 1) != 0
4842 && GET_MODE (target) != BLKmode
4843 && ! (GET_CODE (target) == REG
4844 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4845 return copy_to_reg (target);
4846
4847 else
4848 return target;
4849 }
4850 \f
4851 /* Return 1 if EXP just contains zeros. */
4852
4853 static int
4854 is_zeros_p (exp)
4855 tree exp;
4856 {
4857 tree elt;
4858
4859 switch (TREE_CODE (exp))
4860 {
4861 case CONVERT_EXPR:
4862 case NOP_EXPR:
4863 case NON_LVALUE_EXPR:
4864 case VIEW_CONVERT_EXPR:
4865 return is_zeros_p (TREE_OPERAND (exp, 0));
4866
4867 case INTEGER_CST:
4868 return integer_zerop (exp);
4869
4870 case COMPLEX_CST:
4871 return
4872 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4873
4874 case REAL_CST:
4875 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4876
4877 case VECTOR_CST:
4878 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4879 elt = TREE_CHAIN (elt))
4880 if (!is_zeros_p (TREE_VALUE (elt)))
4881 return 0;
4882
4883 return 1;
4884
4885 case CONSTRUCTOR:
4886 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4887 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4888 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4889 if (! is_zeros_p (TREE_VALUE (elt)))
4890 return 0;
4891
4892 return 1;
4893
4894 default:
4895 return 0;
4896 }
4897 }
4898
4899 /* Return 1 if EXP contains mostly (3/4) zeros. */
4900
4901 static int
4902 mostly_zeros_p (exp)
4903 tree exp;
4904 {
4905 if (TREE_CODE (exp) == CONSTRUCTOR)
4906 {
4907 int elts = 0, zeros = 0;
4908 tree elt = CONSTRUCTOR_ELTS (exp);
4909 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4910 {
4911 /* If there are no ranges of true bits, it is all zero. */
4912 return elt == NULL_TREE;
4913 }
4914 for (; elt; elt = TREE_CHAIN (elt))
4915 {
4916 /* We do not handle the case where the index is a RANGE_EXPR,
4917 so the statistic will be somewhat inaccurate.
4918 We do make a more accurate count in store_constructor itself,
4919 so since this function is only used for nested array elements,
4920 this should be close enough. */
4921 if (mostly_zeros_p (TREE_VALUE (elt)))
4922 zeros++;
4923 elts++;
4924 }
4925
4926 return 4 * zeros >= 3 * elts;
4927 }
4928
4929 return is_zeros_p (exp);
4930 }
4931 \f
4932 /* Helper function for store_constructor.
4933 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4934 TYPE is the type of the CONSTRUCTOR, not the element type.
4935 CLEARED is as for store_constructor.
4936 ALIAS_SET is the alias set to use for any stores.
4937
4938 This provides a recursive shortcut back to store_constructor when it isn't
4939 necessary to go through store_field. This is so that we can pass through
4940 the cleared field to let store_constructor know that we may not have to
4941 clear a substructure if the outer structure has already been cleared. */
4942
4943 static void
4944 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4945 alias_set)
4946 rtx target;
4947 unsigned HOST_WIDE_INT bitsize;
4948 HOST_WIDE_INT bitpos;
4949 enum machine_mode mode;
4950 tree exp, type;
4951 int cleared;
4952 int alias_set;
4953 {
4954 if (TREE_CODE (exp) == CONSTRUCTOR
4955 && bitpos % BITS_PER_UNIT == 0
4956 /* If we have a nonzero bitpos for a register target, then we just
4957 let store_field do the bitfield handling. This is unlikely to
4958 generate unnecessary clear instructions anyways. */
4959 && (bitpos == 0 || GET_CODE (target) == MEM))
4960 {
4961 if (GET_CODE (target) == MEM)
4962 target
4963 = adjust_address (target,
4964 GET_MODE (target) == BLKmode
4965 || 0 != (bitpos
4966 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4967 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4968
4969
4970 /* Update the alias set, if required. */
4971 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4972 && MEM_ALIAS_SET (target) != 0)
4973 {
4974 target = copy_rtx (target);
4975 set_mem_alias_set (target, alias_set);
4976 }
4977
4978 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4979 }
4980 else
4981 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4982 alias_set);
4983 }
4984
4985 /* Store the value of constructor EXP into the rtx TARGET.
4986 TARGET is either a REG or a MEM; we know it cannot conflict, since
4987 safe_from_p has been called.
4988 CLEARED is true if TARGET is known to have been zero'd.
4989 SIZE is the number of bytes of TARGET we are allowed to modify: this
4990 may not be the same as the size of EXP if we are assigning to a field
4991 which has been packed to exclude padding bits. */
4992
4993 static void
4994 store_constructor (exp, target, cleared, size)
4995 tree exp;
4996 rtx target;
4997 int cleared;
4998 HOST_WIDE_INT size;
4999 {
5000 tree type = TREE_TYPE (exp);
5001 #ifdef WORD_REGISTER_OPERATIONS
5002 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5003 #endif
5004
5005 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
5006 || TREE_CODE (type) == QUAL_UNION_TYPE)
5007 {
5008 tree elt;
5009
5010 /* We either clear the aggregate or indicate the value is dead. */
5011 if ((TREE_CODE (type) == UNION_TYPE
5012 || TREE_CODE (type) == QUAL_UNION_TYPE)
5013 && ! cleared
5014 && ! CONSTRUCTOR_ELTS (exp))
5015 /* If the constructor is empty, clear the union. */
5016 {
5017 clear_storage (target, expr_size (exp));
5018 cleared = 1;
5019 }
5020
5021 /* If we are building a static constructor into a register,
5022 set the initial value as zero so we can fold the value into
5023 a constant. But if more than one register is involved,
5024 this probably loses. */
5025 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
5026 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5027 {
5028 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5029 cleared = 1;
5030 }
5031
5032 /* If the constructor has fewer fields than the structure
5033 or if we are initializing the structure to mostly zeros,
5034 clear the whole structure first. Don't do this if TARGET is a
5035 register whose mode size isn't equal to SIZE since clear_storage
5036 can't handle this case. */
5037 else if (! cleared && size > 0
5038 && ((list_length (CONSTRUCTOR_ELTS (exp))
5039 != fields_length (type))
5040 || mostly_zeros_p (exp))
5041 && (GET_CODE (target) != REG
5042 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5043 == size)))
5044 {
5045 rtx xtarget = target;
5046
5047 if (readonly_fields_p (type))
5048 {
5049 xtarget = copy_rtx (xtarget);
5050 RTX_UNCHANGING_P (xtarget) = 1;
5051 }
5052
5053 clear_storage (xtarget, GEN_INT (size));
5054 cleared = 1;
5055 }
5056
5057 if (! cleared)
5058 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5059
5060 /* Store each element of the constructor into
5061 the corresponding field of TARGET. */
5062
5063 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5064 {
5065 tree field = TREE_PURPOSE (elt);
5066 tree value = TREE_VALUE (elt);
5067 enum machine_mode mode;
5068 HOST_WIDE_INT bitsize;
5069 HOST_WIDE_INT bitpos = 0;
5070 tree offset;
5071 rtx to_rtx = target;
5072
5073 /* Just ignore missing fields.
5074 We cleared the whole structure, above,
5075 if any fields are missing. */
5076 if (field == 0)
5077 continue;
5078
5079 if (cleared && is_zeros_p (value))
5080 continue;
5081
5082 if (host_integerp (DECL_SIZE (field), 1))
5083 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5084 else
5085 bitsize = -1;
5086
5087 mode = DECL_MODE (field);
5088 if (DECL_BIT_FIELD (field))
5089 mode = VOIDmode;
5090
5091 offset = DECL_FIELD_OFFSET (field);
5092 if (host_integerp (offset, 0)
5093 && host_integerp (bit_position (field), 0))
5094 {
5095 bitpos = int_bit_position (field);
5096 offset = 0;
5097 }
5098 else
5099 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5100
5101 if (offset)
5102 {
5103 rtx offset_rtx;
5104
5105 if (CONTAINS_PLACEHOLDER_P (offset))
5106 offset = build (WITH_RECORD_EXPR, sizetype,
5107 offset, make_tree (TREE_TYPE (exp), target));
5108
5109 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5110 if (GET_CODE (to_rtx) != MEM)
5111 abort ();
5112
5113 #ifdef POINTERS_EXTEND_UNSIGNED
5114 if (GET_MODE (offset_rtx) != Pmode)
5115 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5116 #else
5117 if (GET_MODE (offset_rtx) != ptr_mode)
5118 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5119 #endif
5120
5121 to_rtx = offset_address (to_rtx, offset_rtx,
5122 highest_pow2_factor (offset));
5123 }
5124
5125 if (TREE_READONLY (field))
5126 {
5127 if (GET_CODE (to_rtx) == MEM)
5128 to_rtx = copy_rtx (to_rtx);
5129
5130 RTX_UNCHANGING_P (to_rtx) = 1;
5131 }
5132
5133 #ifdef WORD_REGISTER_OPERATIONS
5134 /* If this initializes a field that is smaller than a word, at the
5135 start of a word, try to widen it to a full word.
5136 This special case allows us to output C++ member function
5137 initializations in a form that the optimizers can understand. */
5138 if (GET_CODE (target) == REG
5139 && bitsize < BITS_PER_WORD
5140 && bitpos % BITS_PER_WORD == 0
5141 && GET_MODE_CLASS (mode) == MODE_INT
5142 && TREE_CODE (value) == INTEGER_CST
5143 && exp_size >= 0
5144 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5145 {
5146 tree type = TREE_TYPE (value);
5147
5148 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5149 {
5150 type = (*lang_hooks.types.type_for_size)
5151 (BITS_PER_WORD, TREE_UNSIGNED (type));
5152 value = convert (type, value);
5153 }
5154
5155 if (BYTES_BIG_ENDIAN)
5156 value
5157 = fold (build (LSHIFT_EXPR, type, value,
5158 build_int_2 (BITS_PER_WORD - bitsize, 0)));
5159 bitsize = BITS_PER_WORD;
5160 mode = word_mode;
5161 }
5162 #endif
5163
5164 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5165 && DECL_NONADDRESSABLE_P (field))
5166 {
5167 to_rtx = copy_rtx (to_rtx);
5168 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5169 }
5170
5171 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5172 value, type, cleared,
5173 get_alias_set (TREE_TYPE (field)));
5174 }
5175 }
5176 else if (TREE_CODE (type) == ARRAY_TYPE
5177 || TREE_CODE (type) == VECTOR_TYPE)
5178 {
5179 tree elt;
5180 int i;
5181 int need_to_clear;
5182 tree domain = TYPE_DOMAIN (type);
5183 tree elttype = TREE_TYPE (type);
5184 int const_bounds_p;
5185 HOST_WIDE_INT minelt = 0;
5186 HOST_WIDE_INT maxelt = 0;
5187
5188 /* Vectors are like arrays, but the domain is stored via an array
5189 type indirectly. */
5190 if (TREE_CODE (type) == VECTOR_TYPE)
5191 {
5192 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
5193 the same field as TYPE_DOMAIN, we are not guaranteed that
5194 it always will. */
5195 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
5196 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
5197 }
5198
5199 const_bounds_p = (TYPE_MIN_VALUE (domain)
5200 && TYPE_MAX_VALUE (domain)
5201 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5202 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5203
5204 /* If we have constant bounds for the range of the type, get them. */
5205 if (const_bounds_p)
5206 {
5207 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5208 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5209 }
5210
5211 /* If the constructor has fewer elements than the array,
5212 clear the whole array first. Similarly if this is
5213 static constructor of a non-BLKmode object. */
5214 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
5215 need_to_clear = 1;
5216 else
5217 {
5218 HOST_WIDE_INT count = 0, zero_count = 0;
5219 need_to_clear = ! const_bounds_p;
5220
5221 /* This loop is a more accurate version of the loop in
5222 mostly_zeros_p (it handles RANGE_EXPR in an index).
5223 It is also needed to check for missing elements. */
5224 for (elt = CONSTRUCTOR_ELTS (exp);
5225 elt != NULL_TREE && ! need_to_clear;
5226 elt = TREE_CHAIN (elt))
5227 {
5228 tree index = TREE_PURPOSE (elt);
5229 HOST_WIDE_INT this_node_count;
5230
5231 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5232 {
5233 tree lo_index = TREE_OPERAND (index, 0);
5234 tree hi_index = TREE_OPERAND (index, 1);
5235
5236 if (! host_integerp (lo_index, 1)
5237 || ! host_integerp (hi_index, 1))
5238 {
5239 need_to_clear = 1;
5240 break;
5241 }
5242
5243 this_node_count = (tree_low_cst (hi_index, 1)
5244 - tree_low_cst (lo_index, 1) + 1);
5245 }
5246 else
5247 this_node_count = 1;
5248
5249 count += this_node_count;
5250 if (mostly_zeros_p (TREE_VALUE (elt)))
5251 zero_count += this_node_count;
5252 }
5253
5254 /* Clear the entire array first if there are any missing elements,
5255 or if the incidence of zero elements is >= 75%. */
5256 if (! need_to_clear
5257 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
5258 need_to_clear = 1;
5259 }
5260
5261 if (need_to_clear && size > 0)
5262 {
5263 if (! cleared)
5264 {
5265 if (REG_P (target))
5266 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5267 else
5268 clear_storage (target, GEN_INT (size));
5269 }
5270 cleared = 1;
5271 }
5272 else if (REG_P (target))
5273 /* Inform later passes that the old value is dead. */
5274 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5275
5276 /* Store each element of the constructor into
5277 the corresponding element of TARGET, determined
5278 by counting the elements. */
5279 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5280 elt;
5281 elt = TREE_CHAIN (elt), i++)
5282 {
5283 enum machine_mode mode;
5284 HOST_WIDE_INT bitsize;
5285 HOST_WIDE_INT bitpos;
5286 int unsignedp;
5287 tree value = TREE_VALUE (elt);
5288 tree index = TREE_PURPOSE (elt);
5289 rtx xtarget = target;
5290
5291 if (cleared && is_zeros_p (value))
5292 continue;
5293
5294 unsignedp = TREE_UNSIGNED (elttype);
5295 mode = TYPE_MODE (elttype);
5296 if (mode == BLKmode)
5297 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5298 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5299 : -1);
5300 else
5301 bitsize = GET_MODE_BITSIZE (mode);
5302
5303 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5304 {
5305 tree lo_index = TREE_OPERAND (index, 0);
5306 tree hi_index = TREE_OPERAND (index, 1);
5307 rtx index_r, pos_rtx, loop_end;
5308 struct nesting *loop;
5309 HOST_WIDE_INT lo, hi, count;
5310 tree position;
5311
5312 /* If the range is constant and "small", unroll the loop. */
5313 if (const_bounds_p
5314 && host_integerp (lo_index, 0)
5315 && host_integerp (hi_index, 0)
5316 && (lo = tree_low_cst (lo_index, 0),
5317 hi = tree_low_cst (hi_index, 0),
5318 count = hi - lo + 1,
5319 (GET_CODE (target) != MEM
5320 || count <= 2
5321 || (host_integerp (TYPE_SIZE (elttype), 1)
5322 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5323 <= 40 * 8)))))
5324 {
5325 lo -= minelt; hi -= minelt;
5326 for (; lo <= hi; lo++)
5327 {
5328 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5329
5330 if (GET_CODE (target) == MEM
5331 && !MEM_KEEP_ALIAS_SET_P (target)
5332 && TREE_CODE (type) == ARRAY_TYPE
5333 && TYPE_NONALIASED_COMPONENT (type))
5334 {
5335 target = copy_rtx (target);
5336 MEM_KEEP_ALIAS_SET_P (target) = 1;
5337 }
5338
5339 store_constructor_field
5340 (target, bitsize, bitpos, mode, value, type, cleared,
5341 get_alias_set (elttype));
5342 }
5343 }
5344 else
5345 {
5346 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5347 loop_end = gen_label_rtx ();
5348
5349 unsignedp = TREE_UNSIGNED (domain);
5350
5351 index = build_decl (VAR_DECL, NULL_TREE, domain);
5352
5353 index_r
5354 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5355 &unsignedp, 0));
5356 SET_DECL_RTL (index, index_r);
5357 if (TREE_CODE (value) == SAVE_EXPR
5358 && SAVE_EXPR_RTL (value) == 0)
5359 {
5360 /* Make sure value gets expanded once before the
5361 loop. */
5362 expand_expr (value, const0_rtx, VOIDmode, 0);
5363 emit_queue ();
5364 }
5365 store_expr (lo_index, index_r, 0);
5366 loop = expand_start_loop (0);
5367
5368 /* Assign value to element index. */
5369 position
5370 = convert (ssizetype,
5371 fold (build (MINUS_EXPR, TREE_TYPE (index),
5372 index, TYPE_MIN_VALUE (domain))));
5373 position = size_binop (MULT_EXPR, position,
5374 convert (ssizetype,
5375 TYPE_SIZE_UNIT (elttype)));
5376
5377 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5378 xtarget = offset_address (target, pos_rtx,
5379 highest_pow2_factor (position));
5380 xtarget = adjust_address (xtarget, mode, 0);
5381 if (TREE_CODE (value) == CONSTRUCTOR)
5382 store_constructor (value, xtarget, cleared,
5383 bitsize / BITS_PER_UNIT);
5384 else
5385 store_expr (value, xtarget, 0);
5386
5387 expand_exit_loop_if_false (loop,
5388 build (LT_EXPR, integer_type_node,
5389 index, hi_index));
5390
5391 expand_increment (build (PREINCREMENT_EXPR,
5392 TREE_TYPE (index),
5393 index, integer_one_node), 0, 0);
5394 expand_end_loop ();
5395 emit_label (loop_end);
5396 }
5397 }
5398 else if ((index != 0 && ! host_integerp (index, 0))
5399 || ! host_integerp (TYPE_SIZE (elttype), 1))
5400 {
5401 tree position;
5402
5403 if (index == 0)
5404 index = ssize_int (1);
5405
5406 if (minelt)
5407 index = convert (ssizetype,
5408 fold (build (MINUS_EXPR, index,
5409 TYPE_MIN_VALUE (domain))));
5410
5411 position = size_binop (MULT_EXPR, index,
5412 convert (ssizetype,
5413 TYPE_SIZE_UNIT (elttype)));
5414 xtarget = offset_address (target,
5415 expand_expr (position, 0, VOIDmode, 0),
5416 highest_pow2_factor (position));
5417 xtarget = adjust_address (xtarget, mode, 0);
5418 store_expr (value, xtarget, 0);
5419 }
5420 else
5421 {
5422 if (index != 0)
5423 bitpos = ((tree_low_cst (index, 0) - minelt)
5424 * tree_low_cst (TYPE_SIZE (elttype), 1));
5425 else
5426 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5427
5428 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5429 && TREE_CODE (type) == ARRAY_TYPE
5430 && TYPE_NONALIASED_COMPONENT (type))
5431 {
5432 target = copy_rtx (target);
5433 MEM_KEEP_ALIAS_SET_P (target) = 1;
5434 }
5435
5436 store_constructor_field (target, bitsize, bitpos, mode, value,
5437 type, cleared, get_alias_set (elttype));
5438
5439 }
5440 }
5441 }
5442
5443 /* Set constructor assignments. */
5444 else if (TREE_CODE (type) == SET_TYPE)
5445 {
5446 tree elt = CONSTRUCTOR_ELTS (exp);
5447 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5448 tree domain = TYPE_DOMAIN (type);
5449 tree domain_min, domain_max, bitlength;
5450
5451 /* The default implementation strategy is to extract the constant
5452 parts of the constructor, use that to initialize the target,
5453 and then "or" in whatever non-constant ranges we need in addition.
5454
5455 If a large set is all zero or all ones, it is
5456 probably better to set it using memset (if available) or bzero.
5457 Also, if a large set has just a single range, it may also be
5458 better to first clear all the first clear the set (using
5459 bzero/memset), and set the bits we want. */
5460
5461 /* Check for all zeros. */
5462 if (elt == NULL_TREE && size > 0)
5463 {
5464 if (!cleared)
5465 clear_storage (target, GEN_INT (size));
5466 return;
5467 }
5468
5469 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5470 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5471 bitlength = size_binop (PLUS_EXPR,
5472 size_diffop (domain_max, domain_min),
5473 ssize_int (1));
5474
5475 nbits = tree_low_cst (bitlength, 1);
5476
5477 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5478 are "complicated" (more than one range), initialize (the
5479 constant parts) by copying from a constant. */
5480 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5481 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5482 {
5483 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5484 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5485 char *bit_buffer = (char *) alloca (nbits);
5486 HOST_WIDE_INT word = 0;
5487 unsigned int bit_pos = 0;
5488 unsigned int ibit = 0;
5489 unsigned int offset = 0; /* In bytes from beginning of set. */
5490
5491 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5492 for (;;)
5493 {
5494 if (bit_buffer[ibit])
5495 {
5496 if (BYTES_BIG_ENDIAN)
5497 word |= (1 << (set_word_size - 1 - bit_pos));
5498 else
5499 word |= 1 << bit_pos;
5500 }
5501
5502 bit_pos++; ibit++;
5503 if (bit_pos >= set_word_size || ibit == nbits)
5504 {
5505 if (word != 0 || ! cleared)
5506 {
5507 rtx datum = GEN_INT (word);
5508 rtx to_rtx;
5509
5510 /* The assumption here is that it is safe to use
5511 XEXP if the set is multi-word, but not if
5512 it's single-word. */
5513 if (GET_CODE (target) == MEM)
5514 to_rtx = adjust_address (target, mode, offset);
5515 else if (offset == 0)
5516 to_rtx = target;
5517 else
5518 abort ();
5519 emit_move_insn (to_rtx, datum);
5520 }
5521
5522 if (ibit == nbits)
5523 break;
5524 word = 0;
5525 bit_pos = 0;
5526 offset += set_word_size / BITS_PER_UNIT;
5527 }
5528 }
5529 }
5530 else if (!cleared)
5531 /* Don't bother clearing storage if the set is all ones. */
5532 if (TREE_CHAIN (elt) != NULL_TREE
5533 || (TREE_PURPOSE (elt) == NULL_TREE
5534 ? nbits != 1
5535 : ( ! host_integerp (TREE_VALUE (elt), 0)
5536 || ! host_integerp (TREE_PURPOSE (elt), 0)
5537 || (tree_low_cst (TREE_VALUE (elt), 0)
5538 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5539 != (HOST_WIDE_INT) nbits))))
5540 clear_storage (target, expr_size (exp));
5541
5542 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5543 {
5544 /* Start of range of element or NULL. */
5545 tree startbit = TREE_PURPOSE (elt);
5546 /* End of range of element, or element value. */
5547 tree endbit = TREE_VALUE (elt);
5548 HOST_WIDE_INT startb, endb;
5549 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5550
5551 bitlength_rtx = expand_expr (bitlength,
5552 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5553
5554 /* Handle non-range tuple element like [ expr ]. */
5555 if (startbit == NULL_TREE)
5556 {
5557 startbit = save_expr (endbit);
5558 endbit = startbit;
5559 }
5560
5561 startbit = convert (sizetype, startbit);
5562 endbit = convert (sizetype, endbit);
5563 if (! integer_zerop (domain_min))
5564 {
5565 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5566 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5567 }
5568 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5569 EXPAND_CONST_ADDRESS);
5570 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5571 EXPAND_CONST_ADDRESS);
5572
5573 if (REG_P (target))
5574 {
5575 targetx
5576 = assign_temp
5577 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5578 (GET_MODE (target), 0),
5579 TYPE_QUAL_CONST)),
5580 0, 1, 1);
5581 emit_move_insn (targetx, target);
5582 }
5583
5584 else if (GET_CODE (target) == MEM)
5585 targetx = target;
5586 else
5587 abort ();
5588
5589 /* Optimization: If startbit and endbit are constants divisible
5590 by BITS_PER_UNIT, call memset instead. */
5591 if (TARGET_MEM_FUNCTIONS
5592 && TREE_CODE (startbit) == INTEGER_CST
5593 && TREE_CODE (endbit) == INTEGER_CST
5594 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5595 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5596 {
5597 emit_library_call (memset_libfunc, LCT_NORMAL,
5598 VOIDmode, 3,
5599 plus_constant (XEXP (targetx, 0),
5600 startb / BITS_PER_UNIT),
5601 Pmode,
5602 constm1_rtx, TYPE_MODE (integer_type_node),
5603 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5604 TYPE_MODE (sizetype));
5605 }
5606 else
5607 emit_library_call (setbits_libfunc, LCT_NORMAL,
5608 VOIDmode, 4, XEXP (targetx, 0),
5609 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5610 startbit_rtx, TYPE_MODE (sizetype),
5611 endbit_rtx, TYPE_MODE (sizetype));
5612
5613 if (REG_P (target))
5614 emit_move_insn (target, targetx);
5615 }
5616 }
5617
5618 else
5619 abort ();
5620 }
5621
5622 /* Store the value of EXP (an expression tree)
5623 into a subfield of TARGET which has mode MODE and occupies
5624 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5625 If MODE is VOIDmode, it means that we are storing into a bit-field.
5626
5627 If VALUE_MODE is VOIDmode, return nothing in particular.
5628 UNSIGNEDP is not used in this case.
5629
5630 Otherwise, return an rtx for the value stored. This rtx
5631 has mode VALUE_MODE if that is convenient to do.
5632 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5633
5634 TYPE is the type of the underlying object,
5635
5636 ALIAS_SET is the alias set for the destination. This value will
5637 (in general) be different from that for TARGET, since TARGET is a
5638 reference to the containing structure. */
5639
5640 static rtx
5641 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5642 alias_set)
5643 rtx target;
5644 HOST_WIDE_INT bitsize;
5645 HOST_WIDE_INT bitpos;
5646 enum machine_mode mode;
5647 tree exp;
5648 enum machine_mode value_mode;
5649 int unsignedp;
5650 tree type;
5651 int alias_set;
5652 {
5653 HOST_WIDE_INT width_mask = 0;
5654
5655 if (TREE_CODE (exp) == ERROR_MARK)
5656 return const0_rtx;
5657
5658 /* If we have nothing to store, do nothing unless the expression has
5659 side-effects. */
5660 if (bitsize == 0)
5661 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5662 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5663 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5664
5665 /* If we are storing into an unaligned field of an aligned union that is
5666 in a register, we may have the mode of TARGET being an integer mode but
5667 MODE == BLKmode. In that case, get an aligned object whose size and
5668 alignment are the same as TARGET and store TARGET into it (we can avoid
5669 the store if the field being stored is the entire width of TARGET). Then
5670 call ourselves recursively to store the field into a BLKmode version of
5671 that object. Finally, load from the object into TARGET. This is not
5672 very efficient in general, but should only be slightly more expensive
5673 than the otherwise-required unaligned accesses. Perhaps this can be
5674 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5675 twice, once with emit_move_insn and once via store_field. */
5676
5677 if (mode == BLKmode
5678 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5679 {
5680 rtx object = assign_temp (type, 0, 1, 1);
5681 rtx blk_object = adjust_address (object, BLKmode, 0);
5682
5683 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5684 emit_move_insn (object, target);
5685
5686 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5687 alias_set);
5688
5689 emit_move_insn (target, object);
5690
5691 /* We want to return the BLKmode version of the data. */
5692 return blk_object;
5693 }
5694
5695 if (GET_CODE (target) == CONCAT)
5696 {
5697 /* We're storing into a struct containing a single __complex. */
5698
5699 if (bitpos != 0)
5700 abort ();
5701 return store_expr (exp, target, 0);
5702 }
5703
5704 /* If the structure is in a register or if the component
5705 is a bit field, we cannot use addressing to access it.
5706 Use bit-field techniques or SUBREG to store in it. */
5707
5708 if (mode == VOIDmode
5709 || (mode != BLKmode && ! direct_store[(int) mode]
5710 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5711 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5712 || GET_CODE (target) == REG
5713 || GET_CODE (target) == SUBREG
5714 /* If the field isn't aligned enough to store as an ordinary memref,
5715 store it as a bit field. */
5716 || (mode != BLKmode
5717 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5718 || bitpos % GET_MODE_ALIGNMENT (mode))
5719 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5720 || (bitpos % BITS_PER_UNIT != 0)))
5721 /* If the RHS and field are a constant size and the size of the
5722 RHS isn't the same size as the bitfield, we must use bitfield
5723 operations. */
5724 || (bitsize >= 0
5725 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5726 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5727 {
5728 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5729
5730 /* If BITSIZE is narrower than the size of the type of EXP
5731 we will be narrowing TEMP. Normally, what's wanted are the
5732 low-order bits. However, if EXP's type is a record and this is
5733 big-endian machine, we want the upper BITSIZE bits. */
5734 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5735 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5736 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5737 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5738 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5739 - bitsize),
5740 NULL_RTX, 1);
5741
5742 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5743 MODE. */
5744 if (mode != VOIDmode && mode != BLKmode
5745 && mode != TYPE_MODE (TREE_TYPE (exp)))
5746 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5747
5748 /* If the modes of TARGET and TEMP are both BLKmode, both
5749 must be in memory and BITPOS must be aligned on a byte
5750 boundary. If so, we simply do a block copy. */
5751 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5752 {
5753 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5754 || bitpos % BITS_PER_UNIT != 0)
5755 abort ();
5756
5757 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5758 emit_block_move (target, temp,
5759 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5760 / BITS_PER_UNIT),
5761 BLOCK_OP_NORMAL);
5762
5763 return value_mode == VOIDmode ? const0_rtx : target;
5764 }
5765
5766 /* Store the value in the bitfield. */
5767 store_bit_field (target, bitsize, bitpos, mode, temp,
5768 int_size_in_bytes (type));
5769
5770 if (value_mode != VOIDmode)
5771 {
5772 /* The caller wants an rtx for the value.
5773 If possible, avoid refetching from the bitfield itself. */
5774 if (width_mask != 0
5775 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5776 {
5777 tree count;
5778 enum machine_mode tmode;
5779
5780 tmode = GET_MODE (temp);
5781 if (tmode == VOIDmode)
5782 tmode = value_mode;
5783
5784 if (unsignedp)
5785 return expand_and (tmode, temp,
5786 gen_int_mode (width_mask, tmode),
5787 NULL_RTX);
5788
5789 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5790 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5791 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5792 }
5793
5794 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5795 NULL_RTX, value_mode, VOIDmode,
5796 int_size_in_bytes (type));
5797 }
5798 return const0_rtx;
5799 }
5800 else
5801 {
5802 rtx addr = XEXP (target, 0);
5803 rtx to_rtx = target;
5804
5805 /* If a value is wanted, it must be the lhs;
5806 so make the address stable for multiple use. */
5807
5808 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5809 && ! CONSTANT_ADDRESS_P (addr)
5810 /* A frame-pointer reference is already stable. */
5811 && ! (GET_CODE (addr) == PLUS
5812 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5813 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5814 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5815 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5816
5817 /* Now build a reference to just the desired component. */
5818
5819 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5820
5821 if (to_rtx == target)
5822 to_rtx = copy_rtx (to_rtx);
5823
5824 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5825 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5826 set_mem_alias_set (to_rtx, alias_set);
5827
5828 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5829 }
5830 }
5831 \f
5832 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5833 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5834 codes and find the ultimate containing object, which we return.
5835
5836 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5837 bit position, and *PUNSIGNEDP to the signedness of the field.
5838 If the position of the field is variable, we store a tree
5839 giving the variable offset (in units) in *POFFSET.
5840 This offset is in addition to the bit position.
5841 If the position is not variable, we store 0 in *POFFSET.
5842
5843 If any of the extraction expressions is volatile,
5844 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5845
5846 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5847 is a mode that can be used to access the field. In that case, *PBITSIZE
5848 is redundant.
5849
5850 If the field describes a variable-sized object, *PMODE is set to
5851 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5852 this case, but the address of the object can be found. */
5853
5854 tree
5855 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5856 punsignedp, pvolatilep)
5857 tree exp;
5858 HOST_WIDE_INT *pbitsize;
5859 HOST_WIDE_INT *pbitpos;
5860 tree *poffset;
5861 enum machine_mode *pmode;
5862 int *punsignedp;
5863 int *pvolatilep;
5864 {
5865 tree size_tree = 0;
5866 enum machine_mode mode = VOIDmode;
5867 tree offset = size_zero_node;
5868 tree bit_offset = bitsize_zero_node;
5869 tree placeholder_ptr = 0;
5870 tree tem;
5871
5872 /* First get the mode, signedness, and size. We do this from just the
5873 outermost expression. */
5874 if (TREE_CODE (exp) == COMPONENT_REF)
5875 {
5876 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5877 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5878 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5879
5880 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5881 }
5882 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5883 {
5884 size_tree = TREE_OPERAND (exp, 1);
5885 *punsignedp = TREE_UNSIGNED (exp);
5886 }
5887 else
5888 {
5889 mode = TYPE_MODE (TREE_TYPE (exp));
5890 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5891
5892 if (mode == BLKmode)
5893 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5894 else
5895 *pbitsize = GET_MODE_BITSIZE (mode);
5896 }
5897
5898 if (size_tree != 0)
5899 {
5900 if (! host_integerp (size_tree, 1))
5901 mode = BLKmode, *pbitsize = -1;
5902 else
5903 *pbitsize = tree_low_cst (size_tree, 1);
5904 }
5905
5906 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5907 and find the ultimate containing object. */
5908 while (1)
5909 {
5910 if (TREE_CODE (exp) == BIT_FIELD_REF)
5911 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5912 else if (TREE_CODE (exp) == COMPONENT_REF)
5913 {
5914 tree field = TREE_OPERAND (exp, 1);
5915 tree this_offset = DECL_FIELD_OFFSET (field);
5916
5917 /* If this field hasn't been filled in yet, don't go
5918 past it. This should only happen when folding expressions
5919 made during type construction. */
5920 if (this_offset == 0)
5921 break;
5922 else if (CONTAINS_PLACEHOLDER_P (this_offset))
5923 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5924
5925 offset = size_binop (PLUS_EXPR, offset, this_offset);
5926 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5927 DECL_FIELD_BIT_OFFSET (field));
5928
5929 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5930 }
5931
5932 else if (TREE_CODE (exp) == ARRAY_REF
5933 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5934 {
5935 tree index = TREE_OPERAND (exp, 1);
5936 tree array = TREE_OPERAND (exp, 0);
5937 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5938 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5939 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5940
5941 /* We assume all arrays have sizes that are a multiple of a byte.
5942 First subtract the lower bound, if any, in the type of the
5943 index, then convert to sizetype and multiply by the size of the
5944 array element. */
5945 if (low_bound != 0 && ! integer_zerop (low_bound))
5946 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5947 index, low_bound));
5948
5949 /* If the index has a self-referential type, pass it to a
5950 WITH_RECORD_EXPR; if the component size is, pass our
5951 component to one. */
5952 if (CONTAINS_PLACEHOLDER_P (index))
5953 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5954 if (CONTAINS_PLACEHOLDER_P (unit_size))
5955 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5956
5957 offset = size_binop (PLUS_EXPR, offset,
5958 size_binop (MULT_EXPR,
5959 convert (sizetype, index),
5960 unit_size));
5961 }
5962
5963 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5964 {
5965 tree new = find_placeholder (exp, &placeholder_ptr);
5966
5967 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5968 We might have been called from tree optimization where we
5969 haven't set up an object yet. */
5970 if (new == 0)
5971 break;
5972 else
5973 exp = new;
5974
5975 continue;
5976 }
5977
5978 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5979 conversions that don't change the mode, and all view conversions
5980 except those that need to "step up" the alignment. */
5981 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5982 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5983 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5984 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5985 && STRICT_ALIGNMENT
5986 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5987 < BIGGEST_ALIGNMENT)
5988 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5989 || TYPE_ALIGN_OK (TREE_TYPE
5990 (TREE_OPERAND (exp, 0))))))
5991 && ! ((TREE_CODE (exp) == NOP_EXPR
5992 || TREE_CODE (exp) == CONVERT_EXPR)
5993 && (TYPE_MODE (TREE_TYPE (exp))
5994 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5995 break;
5996
5997 /* If any reference in the chain is volatile, the effect is volatile. */
5998 if (TREE_THIS_VOLATILE (exp))
5999 *pvolatilep = 1;
6000
6001 exp = TREE_OPERAND (exp, 0);
6002 }
6003
6004 /* If OFFSET is constant, see if we can return the whole thing as a
6005 constant bit position. Otherwise, split it up. */
6006 if (host_integerp (offset, 0)
6007 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
6008 bitsize_unit_node))
6009 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
6010 && host_integerp (tem, 0))
6011 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
6012 else
6013 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
6014
6015 *pmode = mode;
6016 return exp;
6017 }
6018
6019 /* Return 1 if T is an expression that get_inner_reference handles. */
6020
6021 int
6022 handled_component_p (t)
6023 tree t;
6024 {
6025 switch (TREE_CODE (t))
6026 {
6027 case BIT_FIELD_REF:
6028 case COMPONENT_REF:
6029 case ARRAY_REF:
6030 case ARRAY_RANGE_REF:
6031 case NON_LVALUE_EXPR:
6032 case VIEW_CONVERT_EXPR:
6033 return 1;
6034
6035 case NOP_EXPR:
6036 case CONVERT_EXPR:
6037 return (TYPE_MODE (TREE_TYPE (t))
6038 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
6039
6040 default:
6041 return 0;
6042 }
6043 }
6044 \f
6045 /* Given an rtx VALUE that may contain additions and multiplications, return
6046 an equivalent value that just refers to a register, memory, or constant.
6047 This is done by generating instructions to perform the arithmetic and
6048 returning a pseudo-register containing the value.
6049
6050 The returned value may be a REG, SUBREG, MEM or constant. */
6051
6052 rtx
6053 force_operand (value, target)
6054 rtx value, target;
6055 {
6056 rtx op1, op2;
6057 /* Use subtarget as the target for operand 0 of a binary operation. */
6058 rtx subtarget = get_subtarget (target);
6059 enum rtx_code code = GET_CODE (value);
6060
6061 /* Check for a PIC address load. */
6062 if ((code == PLUS || code == MINUS)
6063 && XEXP (value, 0) == pic_offset_table_rtx
6064 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6065 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6066 || GET_CODE (XEXP (value, 1)) == CONST))
6067 {
6068 if (!subtarget)
6069 subtarget = gen_reg_rtx (GET_MODE (value));
6070 emit_move_insn (subtarget, value);
6071 return subtarget;
6072 }
6073
6074 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
6075 {
6076 if (!target)
6077 target = gen_reg_rtx (GET_MODE (value));
6078 convert_move (target, force_operand (XEXP (value, 0), NULL),
6079 code == ZERO_EXTEND);
6080 return target;
6081 }
6082
6083 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
6084 {
6085 op2 = XEXP (value, 1);
6086 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
6087 subtarget = 0;
6088 if (code == MINUS && GET_CODE (op2) == CONST_INT)
6089 {
6090 code = PLUS;
6091 op2 = negate_rtx (GET_MODE (value), op2);
6092 }
6093
6094 /* Check for an addition with OP2 a constant integer and our first
6095 operand a PLUS of a virtual register and something else. In that
6096 case, we want to emit the sum of the virtual register and the
6097 constant first and then add the other value. This allows virtual
6098 register instantiation to simply modify the constant rather than
6099 creating another one around this addition. */
6100 if (code == PLUS && GET_CODE (op2) == CONST_INT
6101 && GET_CODE (XEXP (value, 0)) == PLUS
6102 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
6103 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6104 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6105 {
6106 rtx temp = expand_simple_binop (GET_MODE (value), code,
6107 XEXP (XEXP (value, 0), 0), op2,
6108 subtarget, 0, OPTAB_LIB_WIDEN);
6109 return expand_simple_binop (GET_MODE (value), code, temp,
6110 force_operand (XEXP (XEXP (value,
6111 0), 1), 0),
6112 target, 0, OPTAB_LIB_WIDEN);
6113 }
6114
6115 op1 = force_operand (XEXP (value, 0), subtarget);
6116 op2 = force_operand (op2, NULL_RTX);
6117 switch (code)
6118 {
6119 case MULT:
6120 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6121 case DIV:
6122 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6123 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6124 target, 1, OPTAB_LIB_WIDEN);
6125 else
6126 return expand_divmod (0,
6127 FLOAT_MODE_P (GET_MODE (value))
6128 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6129 GET_MODE (value), op1, op2, target, 0);
6130 break;
6131 case MOD:
6132 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6133 target, 0);
6134 break;
6135 case UDIV:
6136 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6137 target, 1);
6138 break;
6139 case UMOD:
6140 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6141 target, 1);
6142 break;
6143 case ASHIFTRT:
6144 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6145 target, 0, OPTAB_LIB_WIDEN);
6146 break;
6147 default:
6148 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6149 target, 1, OPTAB_LIB_WIDEN);
6150 }
6151 }
6152 if (GET_RTX_CLASS (code) == '1')
6153 {
6154 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6155 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6156 }
6157
6158 #ifdef INSN_SCHEDULING
6159 /* On machines that have insn scheduling, we want all memory reference to be
6160 explicit, so we need to deal with such paradoxical SUBREGs. */
6161 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
6162 && (GET_MODE_SIZE (GET_MODE (value))
6163 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6164 value
6165 = simplify_gen_subreg (GET_MODE (value),
6166 force_reg (GET_MODE (SUBREG_REG (value)),
6167 force_operand (SUBREG_REG (value),
6168 NULL_RTX)),
6169 GET_MODE (SUBREG_REG (value)),
6170 SUBREG_BYTE (value));
6171 #endif
6172
6173 return value;
6174 }
6175 \f
6176 /* Subroutine of expand_expr: return nonzero iff there is no way that
6177 EXP can reference X, which is being modified. TOP_P is nonzero if this
6178 call is going to be used to determine whether we need a temporary
6179 for EXP, as opposed to a recursive call to this function.
6180
6181 It is always safe for this routine to return zero since it merely
6182 searches for optimization opportunities. */
6183
6184 int
6185 safe_from_p (x, exp, top_p)
6186 rtx x;
6187 tree exp;
6188 int top_p;
6189 {
6190 rtx exp_rtl = 0;
6191 int i, nops;
6192 static tree save_expr_list;
6193
6194 if (x == 0
6195 /* If EXP has varying size, we MUST use a target since we currently
6196 have no way of allocating temporaries of variable size
6197 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6198 So we assume here that something at a higher level has prevented a
6199 clash. This is somewhat bogus, but the best we can do. Only
6200 do this when X is BLKmode and when we are at the top level. */
6201 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6202 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6203 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6204 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6205 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6206 != INTEGER_CST)
6207 && GET_MODE (x) == BLKmode)
6208 /* If X is in the outgoing argument area, it is always safe. */
6209 || (GET_CODE (x) == MEM
6210 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6211 || (GET_CODE (XEXP (x, 0)) == PLUS
6212 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6213 return 1;
6214
6215 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6216 find the underlying pseudo. */
6217 if (GET_CODE (x) == SUBREG)
6218 {
6219 x = SUBREG_REG (x);
6220 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6221 return 0;
6222 }
6223
6224 /* A SAVE_EXPR might appear many times in the expression passed to the
6225 top-level safe_from_p call, and if it has a complex subexpression,
6226 examining it multiple times could result in a combinatorial explosion.
6227 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6228 with optimization took about 28 minutes to compile -- even though it was
6229 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6230 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6231 we have processed. Note that the only test of top_p was above. */
6232
6233 if (top_p)
6234 {
6235 int rtn;
6236 tree t;
6237
6238 save_expr_list = 0;
6239
6240 rtn = safe_from_p (x, exp, 0);
6241
6242 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
6243 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
6244
6245 return rtn;
6246 }
6247
6248 /* Now look at our tree code and possibly recurse. */
6249 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6250 {
6251 case 'd':
6252 exp_rtl = DECL_RTL_IF_SET (exp);
6253 break;
6254
6255 case 'c':
6256 return 1;
6257
6258 case 'x':
6259 if (TREE_CODE (exp) == TREE_LIST)
6260 {
6261 while (1)
6262 {
6263 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6264 return 0;
6265 exp = TREE_CHAIN (exp);
6266 if (!exp)
6267 return 1;
6268 if (TREE_CODE (exp) != TREE_LIST)
6269 return safe_from_p (x, exp, 0);
6270 }
6271 }
6272 else if (TREE_CODE (exp) == ERROR_MARK)
6273 return 1; /* An already-visited SAVE_EXPR? */
6274 else
6275 return 0;
6276
6277 case '2':
6278 case '<':
6279 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6280 return 0;
6281 /* FALLTHRU */
6282
6283 case '1':
6284 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6285
6286 case 'e':
6287 case 'r':
6288 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6289 the expression. If it is set, we conflict iff we are that rtx or
6290 both are in memory. Otherwise, we check all operands of the
6291 expression recursively. */
6292
6293 switch (TREE_CODE (exp))
6294 {
6295 case ADDR_EXPR:
6296 /* If the operand is static or we are static, we can't conflict.
6297 Likewise if we don't conflict with the operand at all. */
6298 if (staticp (TREE_OPERAND (exp, 0))
6299 || TREE_STATIC (exp)
6300 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6301 return 1;
6302
6303 /* Otherwise, the only way this can conflict is if we are taking
6304 the address of a DECL a that address if part of X, which is
6305 very rare. */
6306 exp = TREE_OPERAND (exp, 0);
6307 if (DECL_P (exp))
6308 {
6309 if (!DECL_RTL_SET_P (exp)
6310 || GET_CODE (DECL_RTL (exp)) != MEM)
6311 return 0;
6312 else
6313 exp_rtl = XEXP (DECL_RTL (exp), 0);
6314 }
6315 break;
6316
6317 case INDIRECT_REF:
6318 if (GET_CODE (x) == MEM
6319 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6320 get_alias_set (exp)))
6321 return 0;
6322 break;
6323
6324 case CALL_EXPR:
6325 /* Assume that the call will clobber all hard registers and
6326 all of memory. */
6327 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6328 || GET_CODE (x) == MEM)
6329 return 0;
6330 break;
6331
6332 case RTL_EXPR:
6333 /* If a sequence exists, we would have to scan every instruction
6334 in the sequence to see if it was safe. This is probably not
6335 worthwhile. */
6336 if (RTL_EXPR_SEQUENCE (exp))
6337 return 0;
6338
6339 exp_rtl = RTL_EXPR_RTL (exp);
6340 break;
6341
6342 case WITH_CLEANUP_EXPR:
6343 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6344 break;
6345
6346 case CLEANUP_POINT_EXPR:
6347 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6348
6349 case SAVE_EXPR:
6350 exp_rtl = SAVE_EXPR_RTL (exp);
6351 if (exp_rtl)
6352 break;
6353
6354 /* If we've already scanned this, don't do it again. Otherwise,
6355 show we've scanned it and record for clearing the flag if we're
6356 going on. */
6357 if (TREE_PRIVATE (exp))
6358 return 1;
6359
6360 TREE_PRIVATE (exp) = 1;
6361 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6362 {
6363 TREE_PRIVATE (exp) = 0;
6364 return 0;
6365 }
6366
6367 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6368 return 1;
6369
6370 case BIND_EXPR:
6371 /* The only operand we look at is operand 1. The rest aren't
6372 part of the expression. */
6373 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6374
6375 case METHOD_CALL_EXPR:
6376 /* This takes an rtx argument, but shouldn't appear here. */
6377 abort ();
6378
6379 default:
6380 break;
6381 }
6382
6383 /* If we have an rtx, we do not need to scan our operands. */
6384 if (exp_rtl)
6385 break;
6386
6387 nops = first_rtl_op (TREE_CODE (exp));
6388 for (i = 0; i < nops; i++)
6389 if (TREE_OPERAND (exp, i) != 0
6390 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6391 return 0;
6392
6393 /* If this is a language-specific tree code, it may require
6394 special handling. */
6395 if ((unsigned int) TREE_CODE (exp)
6396 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6397 && !(*lang_hooks.safe_from_p) (x, exp))
6398 return 0;
6399 }
6400
6401 /* If we have an rtl, find any enclosed object. Then see if we conflict
6402 with it. */
6403 if (exp_rtl)
6404 {
6405 if (GET_CODE (exp_rtl) == SUBREG)
6406 {
6407 exp_rtl = SUBREG_REG (exp_rtl);
6408 if (GET_CODE (exp_rtl) == REG
6409 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6410 return 0;
6411 }
6412
6413 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6414 are memory and they conflict. */
6415 return ! (rtx_equal_p (x, exp_rtl)
6416 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6417 && true_dependence (exp_rtl, VOIDmode, x,
6418 rtx_addr_varies_p)));
6419 }
6420
6421 /* If we reach here, it is safe. */
6422 return 1;
6423 }
6424
6425 /* Subroutine of expand_expr: return rtx if EXP is a
6426 variable or parameter; else return 0. */
6427
6428 static rtx
6429 var_rtx (exp)
6430 tree exp;
6431 {
6432 STRIP_NOPS (exp);
6433 switch (TREE_CODE (exp))
6434 {
6435 case PARM_DECL:
6436 case VAR_DECL:
6437 return DECL_RTL (exp);
6438 default:
6439 return 0;
6440 }
6441 }
6442
6443 #ifdef MAX_INTEGER_COMPUTATION_MODE
6444
6445 void
6446 check_max_integer_computation_mode (exp)
6447 tree exp;
6448 {
6449 enum tree_code code;
6450 enum machine_mode mode;
6451
6452 /* Strip any NOPs that don't change the mode. */
6453 STRIP_NOPS (exp);
6454 code = TREE_CODE (exp);
6455
6456 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6457 if (code == NOP_EXPR
6458 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6459 return;
6460
6461 /* First check the type of the overall operation. We need only look at
6462 unary, binary and relational operations. */
6463 if (TREE_CODE_CLASS (code) == '1'
6464 || TREE_CODE_CLASS (code) == '2'
6465 || TREE_CODE_CLASS (code) == '<')
6466 {
6467 mode = TYPE_MODE (TREE_TYPE (exp));
6468 if (GET_MODE_CLASS (mode) == MODE_INT
6469 && mode > MAX_INTEGER_COMPUTATION_MODE)
6470 internal_error ("unsupported wide integer operation");
6471 }
6472
6473 /* Check operand of a unary op. */
6474 if (TREE_CODE_CLASS (code) == '1')
6475 {
6476 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6477 if (GET_MODE_CLASS (mode) == MODE_INT
6478 && mode > MAX_INTEGER_COMPUTATION_MODE)
6479 internal_error ("unsupported wide integer operation");
6480 }
6481
6482 /* Check operands of a binary/comparison op. */
6483 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6484 {
6485 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6486 if (GET_MODE_CLASS (mode) == MODE_INT
6487 && mode > MAX_INTEGER_COMPUTATION_MODE)
6488 internal_error ("unsupported wide integer operation");
6489
6490 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6491 if (GET_MODE_CLASS (mode) == MODE_INT
6492 && mode > MAX_INTEGER_COMPUTATION_MODE)
6493 internal_error ("unsupported wide integer operation");
6494 }
6495 }
6496 #endif
6497 \f
6498 /* Return the highest power of two that EXP is known to be a multiple of.
6499 This is used in updating alignment of MEMs in array references. */
6500
6501 static unsigned HOST_WIDE_INT
6502 highest_pow2_factor (exp)
6503 tree exp;
6504 {
6505 unsigned HOST_WIDE_INT c0, c1;
6506
6507 switch (TREE_CODE (exp))
6508 {
6509 case INTEGER_CST:
6510 /* We can find the lowest bit that's a one. If the low
6511 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6512 We need to handle this case since we can find it in a COND_EXPR,
6513 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6514 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6515 later ICE. */
6516 if (TREE_CONSTANT_OVERFLOW (exp))
6517 return BIGGEST_ALIGNMENT;
6518 else
6519 {
6520 /* Note: tree_low_cst is intentionally not used here,
6521 we don't care about the upper bits. */
6522 c0 = TREE_INT_CST_LOW (exp);
6523 c0 &= -c0;
6524 return c0 ? c0 : BIGGEST_ALIGNMENT;
6525 }
6526 break;
6527
6528 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6529 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6530 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6531 return MIN (c0, c1);
6532
6533 case MULT_EXPR:
6534 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6535 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6536 return c0 * c1;
6537
6538 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6539 case CEIL_DIV_EXPR:
6540 if (integer_pow2p (TREE_OPERAND (exp, 1))
6541 && host_integerp (TREE_OPERAND (exp, 1), 1))
6542 {
6543 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6544 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6545 return MAX (1, c0 / c1);
6546 }
6547 break;
6548
6549 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6550 case SAVE_EXPR: case WITH_RECORD_EXPR:
6551 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6552
6553 case COMPOUND_EXPR:
6554 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6555
6556 case COND_EXPR:
6557 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6558 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6559 return MIN (c0, c1);
6560
6561 default:
6562 break;
6563 }
6564
6565 return 1;
6566 }
6567
6568 /* Similar, except that it is known that the expression must be a multiple
6569 of the alignment of TYPE. */
6570
6571 static unsigned HOST_WIDE_INT
6572 highest_pow2_factor_for_type (type, exp)
6573 tree type;
6574 tree exp;
6575 {
6576 unsigned HOST_WIDE_INT type_align, factor;
6577
6578 factor = highest_pow2_factor (exp);
6579 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6580 return MAX (factor, type_align);
6581 }
6582 \f
6583 /* Return an object on the placeholder list that matches EXP, a
6584 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6585 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6586 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6587 is a location which initially points to a starting location in the
6588 placeholder list (zero means start of the list) and where a pointer into
6589 the placeholder list at which the object is found is placed. */
6590
6591 tree
6592 find_placeholder (exp, plist)
6593 tree exp;
6594 tree *plist;
6595 {
6596 tree type = TREE_TYPE (exp);
6597 tree placeholder_expr;
6598
6599 for (placeholder_expr
6600 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6601 placeholder_expr != 0;
6602 placeholder_expr = TREE_CHAIN (placeholder_expr))
6603 {
6604 tree need_type = TYPE_MAIN_VARIANT (type);
6605 tree elt;
6606
6607 /* Find the outermost reference that is of the type we want. If none,
6608 see if any object has a type that is a pointer to the type we
6609 want. */
6610 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6611 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6612 || TREE_CODE (elt) == COND_EXPR)
6613 ? TREE_OPERAND (elt, 1)
6614 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6615 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6616 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6617 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6618 ? TREE_OPERAND (elt, 0) : 0))
6619 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6620 {
6621 if (plist)
6622 *plist = placeholder_expr;
6623 return elt;
6624 }
6625
6626 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6627 elt
6628 = ((TREE_CODE (elt) == COMPOUND_EXPR
6629 || TREE_CODE (elt) == COND_EXPR)
6630 ? TREE_OPERAND (elt, 1)
6631 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6632 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6633 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6634 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6635 ? TREE_OPERAND (elt, 0) : 0))
6636 if (POINTER_TYPE_P (TREE_TYPE (elt))
6637 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6638 == need_type))
6639 {
6640 if (plist)
6641 *plist = placeholder_expr;
6642 return build1 (INDIRECT_REF, need_type, elt);
6643 }
6644 }
6645
6646 return 0;
6647 }
6648 \f
6649 /* expand_expr: generate code for computing expression EXP.
6650 An rtx for the computed value is returned. The value is never null.
6651 In the case of a void EXP, const0_rtx is returned.
6652
6653 The value may be stored in TARGET if TARGET is nonzero.
6654 TARGET is just a suggestion; callers must assume that
6655 the rtx returned may not be the same as TARGET.
6656
6657 If TARGET is CONST0_RTX, it means that the value will be ignored.
6658
6659 If TMODE is not VOIDmode, it suggests generating the
6660 result in mode TMODE. But this is done only when convenient.
6661 Otherwise, TMODE is ignored and the value generated in its natural mode.
6662 TMODE is just a suggestion; callers must assume that
6663 the rtx returned may not have mode TMODE.
6664
6665 Note that TARGET may have neither TMODE nor MODE. In that case, it
6666 probably will not be used.
6667
6668 If MODIFIER is EXPAND_SUM then when EXP is an addition
6669 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6670 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6671 products as above, or REG or MEM, or constant.
6672 Ordinarily in such cases we would output mul or add instructions
6673 and then return a pseudo reg containing the sum.
6674
6675 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6676 it also marks a label as absolutely required (it can't be dead).
6677 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6678 This is used for outputting expressions used in initializers.
6679
6680 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6681 with a constant address even if that address is not normally legitimate.
6682 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6683
6684 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6685 a call parameter. Such targets require special care as we haven't yet
6686 marked TARGET so that it's safe from being trashed by libcalls. We
6687 don't want to use TARGET for anything but the final result;
6688 Intermediate values must go elsewhere. Additionally, calls to
6689 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6690
6691 rtx
6692 expand_expr (exp, target, tmode, modifier)
6693 tree exp;
6694 rtx target;
6695 enum machine_mode tmode;
6696 enum expand_modifier modifier;
6697 {
6698 rtx op0, op1, temp;
6699 tree type = TREE_TYPE (exp);
6700 int unsignedp = TREE_UNSIGNED (type);
6701 enum machine_mode mode;
6702 enum tree_code code = TREE_CODE (exp);
6703 optab this_optab;
6704 rtx subtarget, original_target;
6705 int ignore;
6706 tree context;
6707
6708 /* Handle ERROR_MARK before anybody tries to access its type. */
6709 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6710 {
6711 op0 = CONST0_RTX (tmode);
6712 if (op0 != 0)
6713 return op0;
6714 return const0_rtx;
6715 }
6716
6717 mode = TYPE_MODE (type);
6718 /* Use subtarget as the target for operand 0 of a binary operation. */
6719 subtarget = get_subtarget (target);
6720 original_target = target;
6721 ignore = (target == const0_rtx
6722 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6723 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6724 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6725 && TREE_CODE (type) == VOID_TYPE));
6726
6727 /* If we are going to ignore this result, we need only do something
6728 if there is a side-effect somewhere in the expression. If there
6729 is, short-circuit the most common cases here. Note that we must
6730 not call expand_expr with anything but const0_rtx in case this
6731 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6732
6733 if (ignore)
6734 {
6735 if (! TREE_SIDE_EFFECTS (exp))
6736 return const0_rtx;
6737
6738 /* Ensure we reference a volatile object even if value is ignored, but
6739 don't do this if all we are doing is taking its address. */
6740 if (TREE_THIS_VOLATILE (exp)
6741 && TREE_CODE (exp) != FUNCTION_DECL
6742 && mode != VOIDmode && mode != BLKmode
6743 && modifier != EXPAND_CONST_ADDRESS)
6744 {
6745 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6746 if (GET_CODE (temp) == MEM)
6747 temp = copy_to_reg (temp);
6748 return const0_rtx;
6749 }
6750
6751 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6752 || code == INDIRECT_REF || code == BUFFER_REF)
6753 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6754 modifier);
6755
6756 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6757 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6758 {
6759 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6760 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6761 return const0_rtx;
6762 }
6763 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6764 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6765 /* If the second operand has no side effects, just evaluate
6766 the first. */
6767 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6768 modifier);
6769 else if (code == BIT_FIELD_REF)
6770 {
6771 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6772 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6773 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6774 return const0_rtx;
6775 }
6776
6777 target = 0;
6778 }
6779
6780 #ifdef MAX_INTEGER_COMPUTATION_MODE
6781 /* Only check stuff here if the mode we want is different from the mode
6782 of the expression; if it's the same, check_max_integer_computation_mode
6783 will handle it. Do we really need to check this stuff at all? */
6784
6785 if (target
6786 && GET_MODE (target) != mode
6787 && TREE_CODE (exp) != INTEGER_CST
6788 && TREE_CODE (exp) != PARM_DECL
6789 && TREE_CODE (exp) != ARRAY_REF
6790 && TREE_CODE (exp) != ARRAY_RANGE_REF
6791 && TREE_CODE (exp) != COMPONENT_REF
6792 && TREE_CODE (exp) != BIT_FIELD_REF
6793 && TREE_CODE (exp) != INDIRECT_REF
6794 && TREE_CODE (exp) != CALL_EXPR
6795 && TREE_CODE (exp) != VAR_DECL
6796 && TREE_CODE (exp) != RTL_EXPR)
6797 {
6798 enum machine_mode mode = GET_MODE (target);
6799
6800 if (GET_MODE_CLASS (mode) == MODE_INT
6801 && mode > MAX_INTEGER_COMPUTATION_MODE)
6802 internal_error ("unsupported wide integer operation");
6803 }
6804
6805 if (tmode != mode
6806 && TREE_CODE (exp) != INTEGER_CST
6807 && TREE_CODE (exp) != PARM_DECL
6808 && TREE_CODE (exp) != ARRAY_REF
6809 && TREE_CODE (exp) != ARRAY_RANGE_REF
6810 && TREE_CODE (exp) != COMPONENT_REF
6811 && TREE_CODE (exp) != BIT_FIELD_REF
6812 && TREE_CODE (exp) != INDIRECT_REF
6813 && TREE_CODE (exp) != VAR_DECL
6814 && TREE_CODE (exp) != CALL_EXPR
6815 && TREE_CODE (exp) != RTL_EXPR
6816 && GET_MODE_CLASS (tmode) == MODE_INT
6817 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6818 internal_error ("unsupported wide integer operation");
6819
6820 check_max_integer_computation_mode (exp);
6821 #endif
6822
6823 /* If will do cse, generate all results into pseudo registers
6824 since 1) that allows cse to find more things
6825 and 2) otherwise cse could produce an insn the machine
6826 cannot support. An exception is a CONSTRUCTOR into a multi-word
6827 MEM: that's much more likely to be most efficient into the MEM.
6828 Another is a CALL_EXPR which must return in memory. */
6829
6830 if (! cse_not_expected && mode != BLKmode && target
6831 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6832 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6833 && ! (code == CALL_EXPR && aggregate_value_p (exp)))
6834 target = 0;
6835
6836 switch (code)
6837 {
6838 case LABEL_DECL:
6839 {
6840 tree function = decl_function_context (exp);
6841 /* Labels in containing functions, or labels used from initializers,
6842 must be forced. */
6843 if (modifier == EXPAND_INITIALIZER
6844 || (function != current_function_decl
6845 && function != inline_function_decl
6846 && function != 0))
6847 temp = force_label_rtx (exp);
6848 else
6849 temp = label_rtx (exp);
6850
6851 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6852 if (function != current_function_decl
6853 && function != inline_function_decl && function != 0)
6854 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6855 return temp;
6856 }
6857
6858 case PARM_DECL:
6859 if (!DECL_RTL_SET_P (exp))
6860 {
6861 error_with_decl (exp, "prior parameter's size depends on `%s'");
6862 return CONST0_RTX (mode);
6863 }
6864
6865 /* ... fall through ... */
6866
6867 case VAR_DECL:
6868 /* If a static var's type was incomplete when the decl was written,
6869 but the type is complete now, lay out the decl now. */
6870 if (DECL_SIZE (exp) == 0
6871 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6872 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6873 layout_decl (exp, 0);
6874
6875 /* ... fall through ... */
6876
6877 case FUNCTION_DECL:
6878 case RESULT_DECL:
6879 if (DECL_RTL (exp) == 0)
6880 abort ();
6881
6882 /* Ensure variable marked as used even if it doesn't go through
6883 a parser. If it hasn't be used yet, write out an external
6884 definition. */
6885 if (! TREE_USED (exp))
6886 {
6887 assemble_external (exp);
6888 TREE_USED (exp) = 1;
6889 }
6890
6891 /* Show we haven't gotten RTL for this yet. */
6892 temp = 0;
6893
6894 /* Handle variables inherited from containing functions. */
6895 context = decl_function_context (exp);
6896
6897 /* We treat inline_function_decl as an alias for the current function
6898 because that is the inline function whose vars, types, etc.
6899 are being merged into the current function.
6900 See expand_inline_function. */
6901
6902 if (context != 0 && context != current_function_decl
6903 && context != inline_function_decl
6904 /* If var is static, we don't need a static chain to access it. */
6905 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6906 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6907 {
6908 rtx addr;
6909
6910 /* Mark as non-local and addressable. */
6911 DECL_NONLOCAL (exp) = 1;
6912 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6913 abort ();
6914 (*lang_hooks.mark_addressable) (exp);
6915 if (GET_CODE (DECL_RTL (exp)) != MEM)
6916 abort ();
6917 addr = XEXP (DECL_RTL (exp), 0);
6918 if (GET_CODE (addr) == MEM)
6919 addr
6920 = replace_equiv_address (addr,
6921 fix_lexical_addr (XEXP (addr, 0), exp));
6922 else
6923 addr = fix_lexical_addr (addr, exp);
6924
6925 temp = replace_equiv_address (DECL_RTL (exp), addr);
6926 }
6927
6928 /* This is the case of an array whose size is to be determined
6929 from its initializer, while the initializer is still being parsed.
6930 See expand_decl. */
6931
6932 else if (GET_CODE (DECL_RTL (exp)) == MEM
6933 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6934 temp = validize_mem (DECL_RTL (exp));
6935
6936 /* If DECL_RTL is memory, we are in the normal case and either
6937 the address is not valid or it is not a register and -fforce-addr
6938 is specified, get the address into a register. */
6939
6940 else if (GET_CODE (DECL_RTL (exp)) == MEM
6941 && modifier != EXPAND_CONST_ADDRESS
6942 && modifier != EXPAND_SUM
6943 && modifier != EXPAND_INITIALIZER
6944 && (! memory_address_p (DECL_MODE (exp),
6945 XEXP (DECL_RTL (exp), 0))
6946 || (flag_force_addr
6947 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6948 temp = replace_equiv_address (DECL_RTL (exp),
6949 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6950
6951 /* If we got something, return it. But first, set the alignment
6952 if the address is a register. */
6953 if (temp != 0)
6954 {
6955 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6956 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6957
6958 return temp;
6959 }
6960
6961 /* If the mode of DECL_RTL does not match that of the decl, it
6962 must be a promoted value. We return a SUBREG of the wanted mode,
6963 but mark it so that we know that it was already extended. */
6964
6965 if (GET_CODE (DECL_RTL (exp)) == REG
6966 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6967 {
6968 /* Get the signedness used for this variable. Ensure we get the
6969 same mode we got when the variable was declared. */
6970 if (GET_MODE (DECL_RTL (exp))
6971 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6972 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6973 abort ();
6974
6975 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6976 SUBREG_PROMOTED_VAR_P (temp) = 1;
6977 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6978 return temp;
6979 }
6980
6981 return DECL_RTL (exp);
6982
6983 case INTEGER_CST:
6984 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6985 TREE_INT_CST_HIGH (exp), mode);
6986
6987 /* ??? If overflow is set, fold will have done an incomplete job,
6988 which can result in (plus xx (const_int 0)), which can get
6989 simplified by validate_replace_rtx during virtual register
6990 instantiation, which can result in unrecognizable insns.
6991 Avoid this by forcing all overflows into registers. */
6992 if (TREE_CONSTANT_OVERFLOW (exp)
6993 && modifier != EXPAND_INITIALIZER)
6994 temp = force_reg (mode, temp);
6995
6996 return temp;
6997
6998 case VECTOR_CST:
6999 return const_vector_from_tree (exp);
7000
7001 case CONST_DECL:
7002 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
7003
7004 case REAL_CST:
7005 /* If optimized, generate immediate CONST_DOUBLE
7006 which will be turned into memory by reload if necessary.
7007
7008 We used to force a register so that loop.c could see it. But
7009 this does not allow gen_* patterns to perform optimizations with
7010 the constants. It also produces two insns in cases like "x = 1.0;".
7011 On most machines, floating-point constants are not permitted in
7012 many insns, so we'd end up copying it to a register in any case.
7013
7014 Now, we do the copying in expand_binop, if appropriate. */
7015 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
7016 TYPE_MODE (TREE_TYPE (exp)));
7017
7018 case COMPLEX_CST:
7019 /* Handle evaluating a complex constant in a CONCAT target. */
7020 if (original_target && GET_CODE (original_target) == CONCAT)
7021 {
7022 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7023 rtx rtarg, itarg;
7024
7025 rtarg = XEXP (original_target, 0);
7026 itarg = XEXP (original_target, 1);
7027
7028 /* Move the real and imaginary parts separately. */
7029 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
7030 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
7031
7032 if (op0 != rtarg)
7033 emit_move_insn (rtarg, op0);
7034 if (op1 != itarg)
7035 emit_move_insn (itarg, op1);
7036
7037 return original_target;
7038 }
7039
7040 /* ... fall through ... */
7041
7042 case STRING_CST:
7043 temp = output_constant_def (exp, 1);
7044
7045 /* temp contains a constant address.
7046 On RISC machines where a constant address isn't valid,
7047 make some insns to get that address into a register. */
7048 if (modifier != EXPAND_CONST_ADDRESS
7049 && modifier != EXPAND_INITIALIZER
7050 && modifier != EXPAND_SUM
7051 && (! memory_address_p (mode, XEXP (temp, 0))
7052 || flag_force_addr))
7053 return replace_equiv_address (temp,
7054 copy_rtx (XEXP (temp, 0)));
7055 return temp;
7056
7057 case EXPR_WITH_FILE_LOCATION:
7058 {
7059 rtx to_return;
7060 location_t saved_loc = input_location;
7061 input_filename = EXPR_WFL_FILENAME (exp);
7062 input_line = EXPR_WFL_LINENO (exp);
7063 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
7064 emit_line_note (input_filename, input_line);
7065 /* Possibly avoid switching back and forth here. */
7066 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
7067 input_location = saved_loc;
7068 return to_return;
7069 }
7070
7071 case SAVE_EXPR:
7072 context = decl_function_context (exp);
7073
7074 /* If this SAVE_EXPR was at global context, assume we are an
7075 initialization function and move it into our context. */
7076 if (context == 0)
7077 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
7078
7079 /* We treat inline_function_decl as an alias for the current function
7080 because that is the inline function whose vars, types, etc.
7081 are being merged into the current function.
7082 See expand_inline_function. */
7083 if (context == current_function_decl || context == inline_function_decl)
7084 context = 0;
7085
7086 /* If this is non-local, handle it. */
7087 if (context)
7088 {
7089 /* The following call just exists to abort if the context is
7090 not of a containing function. */
7091 find_function_data (context);
7092
7093 temp = SAVE_EXPR_RTL (exp);
7094 if (temp && GET_CODE (temp) == REG)
7095 {
7096 put_var_into_stack (exp, /*rescan=*/true);
7097 temp = SAVE_EXPR_RTL (exp);
7098 }
7099 if (temp == 0 || GET_CODE (temp) != MEM)
7100 abort ();
7101 return
7102 replace_equiv_address (temp,
7103 fix_lexical_addr (XEXP (temp, 0), exp));
7104 }
7105 if (SAVE_EXPR_RTL (exp) == 0)
7106 {
7107 if (mode == VOIDmode)
7108 temp = const0_rtx;
7109 else
7110 temp = assign_temp (build_qualified_type (type,
7111 (TYPE_QUALS (type)
7112 | TYPE_QUAL_CONST)),
7113 3, 0, 0);
7114
7115 SAVE_EXPR_RTL (exp) = temp;
7116 if (!optimize && GET_CODE (temp) == REG)
7117 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
7118 save_expr_regs);
7119
7120 /* If the mode of TEMP does not match that of the expression, it
7121 must be a promoted value. We pass store_expr a SUBREG of the
7122 wanted mode but mark it so that we know that it was already
7123 extended. */
7124
7125 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
7126 {
7127 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
7128 promote_mode (type, mode, &unsignedp, 0);
7129 SUBREG_PROMOTED_VAR_P (temp) = 1;
7130 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7131 }
7132
7133 if (temp == const0_rtx)
7134 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7135 else
7136 store_expr (TREE_OPERAND (exp, 0), temp,
7137 modifier == EXPAND_STACK_PARM ? 2 : 0);
7138
7139 TREE_USED (exp) = 1;
7140 }
7141
7142 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
7143 must be a promoted value. We return a SUBREG of the wanted mode,
7144 but mark it so that we know that it was already extended. */
7145
7146 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
7147 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
7148 {
7149 /* Compute the signedness and make the proper SUBREG. */
7150 promote_mode (type, mode, &unsignedp, 0);
7151 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
7152 SUBREG_PROMOTED_VAR_P (temp) = 1;
7153 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7154 return temp;
7155 }
7156
7157 return SAVE_EXPR_RTL (exp);
7158
7159 case UNSAVE_EXPR:
7160 {
7161 rtx temp;
7162 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7163 TREE_OPERAND (exp, 0)
7164 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
7165 return temp;
7166 }
7167
7168 case PLACEHOLDER_EXPR:
7169 {
7170 tree old_list = placeholder_list;
7171 tree placeholder_expr = 0;
7172
7173 exp = find_placeholder (exp, &placeholder_expr);
7174 if (exp == 0)
7175 abort ();
7176
7177 placeholder_list = TREE_CHAIN (placeholder_expr);
7178 temp = expand_expr (exp, original_target, tmode, modifier);
7179 placeholder_list = old_list;
7180 return temp;
7181 }
7182
7183 case WITH_RECORD_EXPR:
7184 /* Put the object on the placeholder list, expand our first operand,
7185 and pop the list. */
7186 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
7187 placeholder_list);
7188 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
7189 modifier);
7190 placeholder_list = TREE_CHAIN (placeholder_list);
7191 return target;
7192
7193 case GOTO_EXPR:
7194 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7195 expand_goto (TREE_OPERAND (exp, 0));
7196 else
7197 expand_computed_goto (TREE_OPERAND (exp, 0));
7198 return const0_rtx;
7199
7200 case EXIT_EXPR:
7201 expand_exit_loop_if_false (NULL,
7202 invert_truthvalue (TREE_OPERAND (exp, 0)));
7203 return const0_rtx;
7204
7205 case LABELED_BLOCK_EXPR:
7206 if (LABELED_BLOCK_BODY (exp))
7207 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
7208 /* Should perhaps use expand_label, but this is simpler and safer. */
7209 do_pending_stack_adjust ();
7210 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
7211 return const0_rtx;
7212
7213 case EXIT_BLOCK_EXPR:
7214 if (EXIT_BLOCK_RETURN (exp))
7215 sorry ("returned value in block_exit_expr");
7216 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
7217 return const0_rtx;
7218
7219 case LOOP_EXPR:
7220 push_temp_slots ();
7221 expand_start_loop (1);
7222 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
7223 expand_end_loop ();
7224 pop_temp_slots ();
7225
7226 return const0_rtx;
7227
7228 case BIND_EXPR:
7229 {
7230 tree vars = TREE_OPERAND (exp, 0);
7231
7232 /* Need to open a binding contour here because
7233 if there are any cleanups they must be contained here. */
7234 expand_start_bindings (2);
7235
7236 /* Mark the corresponding BLOCK for output in its proper place. */
7237 if (TREE_OPERAND (exp, 2) != 0
7238 && ! TREE_USED (TREE_OPERAND (exp, 2)))
7239 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
7240
7241 /* If VARS have not yet been expanded, expand them now. */
7242 while (vars)
7243 {
7244 if (!DECL_RTL_SET_P (vars))
7245 expand_decl (vars);
7246 expand_decl_init (vars);
7247 vars = TREE_CHAIN (vars);
7248 }
7249
7250 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
7251
7252 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7253
7254 return temp;
7255 }
7256
7257 case RTL_EXPR:
7258 if (RTL_EXPR_SEQUENCE (exp))
7259 {
7260 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
7261 abort ();
7262 emit_insn (RTL_EXPR_SEQUENCE (exp));
7263 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
7264 }
7265 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
7266 free_temps_for_rtl_expr (exp);
7267 return RTL_EXPR_RTL (exp);
7268
7269 case CONSTRUCTOR:
7270 /* If we don't need the result, just ensure we evaluate any
7271 subexpressions. */
7272 if (ignore)
7273 {
7274 tree elt;
7275
7276 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
7277 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7278
7279 return const0_rtx;
7280 }
7281
7282 /* All elts simple constants => refer to a constant in memory. But
7283 if this is a non-BLKmode mode, let it store a field at a time
7284 since that should make a CONST_INT or CONST_DOUBLE when we
7285 fold. Likewise, if we have a target we can use, it is best to
7286 store directly into the target unless the type is large enough
7287 that memcpy will be used. If we are making an initializer and
7288 all operands are constant, put it in memory as well.
7289
7290 FIXME: Avoid trying to fill vector constructors piece-meal.
7291 Output them with output_constant_def below unless we're sure
7292 they're zeros. This should go away when vector initializers
7293 are treated like VECTOR_CST instead of arrays.
7294 */
7295 else if ((TREE_STATIC (exp)
7296 && ((mode == BLKmode
7297 && ! (target != 0 && safe_from_p (target, exp, 1)))
7298 || TREE_ADDRESSABLE (exp)
7299 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7300 && (! MOVE_BY_PIECES_P
7301 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7302 TYPE_ALIGN (type)))
7303 && ((TREE_CODE (type) == VECTOR_TYPE
7304 && !is_zeros_p (exp))
7305 || ! mostly_zeros_p (exp)))))
7306 || ((modifier == EXPAND_INITIALIZER
7307 || modifier == EXPAND_CONST_ADDRESS)
7308 && TREE_CONSTANT (exp)))
7309 {
7310 rtx constructor = output_constant_def (exp, 1);
7311
7312 if (modifier != EXPAND_CONST_ADDRESS
7313 && modifier != EXPAND_INITIALIZER
7314 && modifier != EXPAND_SUM)
7315 constructor = validize_mem (constructor);
7316
7317 return constructor;
7318 }
7319 else
7320 {
7321 /* Handle calls that pass values in multiple non-contiguous
7322 locations. The Irix 6 ABI has examples of this. */
7323 if (target == 0 || ! safe_from_p (target, exp, 1)
7324 || GET_CODE (target) == PARALLEL
7325 || modifier == EXPAND_STACK_PARM)
7326 target
7327 = assign_temp (build_qualified_type (type,
7328 (TYPE_QUALS (type)
7329 | (TREE_READONLY (exp)
7330 * TYPE_QUAL_CONST))),
7331 0, TREE_ADDRESSABLE (exp), 1);
7332
7333 store_constructor (exp, target, 0, int_expr_size (exp));
7334 return target;
7335 }
7336
7337 case INDIRECT_REF:
7338 {
7339 tree exp1 = TREE_OPERAND (exp, 0);
7340 tree index;
7341 tree string = string_constant (exp1, &index);
7342
7343 /* Try to optimize reads from const strings. */
7344 if (string
7345 && TREE_CODE (string) == STRING_CST
7346 && TREE_CODE (index) == INTEGER_CST
7347 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7348 && GET_MODE_CLASS (mode) == MODE_INT
7349 && GET_MODE_SIZE (mode) == 1
7350 && modifier != EXPAND_WRITE)
7351 return gen_int_mode (TREE_STRING_POINTER (string)
7352 [TREE_INT_CST_LOW (index)], mode);
7353
7354 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7355 op0 = memory_address (mode, op0);
7356 temp = gen_rtx_MEM (mode, op0);
7357 set_mem_attributes (temp, exp, 0);
7358
7359 /* If we are writing to this object and its type is a record with
7360 readonly fields, we must mark it as readonly so it will
7361 conflict with readonly references to those fields. */
7362 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7363 RTX_UNCHANGING_P (temp) = 1;
7364
7365 return temp;
7366 }
7367
7368 case ARRAY_REF:
7369 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7370 abort ();
7371
7372 {
7373 tree array = TREE_OPERAND (exp, 0);
7374 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7375 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7376 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7377 HOST_WIDE_INT i;
7378
7379 /* Optimize the special-case of a zero lower bound.
7380
7381 We convert the low_bound to sizetype to avoid some problems
7382 with constant folding. (E.g. suppose the lower bound is 1,
7383 and its mode is QI. Without the conversion, (ARRAY
7384 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7385 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7386
7387 if (! integer_zerop (low_bound))
7388 index = size_diffop (index, convert (sizetype, low_bound));
7389
7390 /* Fold an expression like: "foo"[2].
7391 This is not done in fold so it won't happen inside &.
7392 Don't fold if this is for wide characters since it's too
7393 difficult to do correctly and this is a very rare case. */
7394
7395 if (modifier != EXPAND_CONST_ADDRESS
7396 && modifier != EXPAND_INITIALIZER
7397 && modifier != EXPAND_MEMORY
7398 && TREE_CODE (array) == STRING_CST
7399 && TREE_CODE (index) == INTEGER_CST
7400 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7401 && GET_MODE_CLASS (mode) == MODE_INT
7402 && GET_MODE_SIZE (mode) == 1)
7403 return gen_int_mode (TREE_STRING_POINTER (array)
7404 [TREE_INT_CST_LOW (index)], mode);
7405
7406 /* If this is a constant index into a constant array,
7407 just get the value from the array. Handle both the cases when
7408 we have an explicit constructor and when our operand is a variable
7409 that was declared const. */
7410
7411 if (modifier != EXPAND_CONST_ADDRESS
7412 && modifier != EXPAND_INITIALIZER
7413 && modifier != EXPAND_MEMORY
7414 && TREE_CODE (array) == CONSTRUCTOR
7415 && ! TREE_SIDE_EFFECTS (array)
7416 && TREE_CODE (index) == INTEGER_CST
7417 && 0 > compare_tree_int (index,
7418 list_length (CONSTRUCTOR_ELTS
7419 (TREE_OPERAND (exp, 0)))))
7420 {
7421 tree elem;
7422
7423 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7424 i = TREE_INT_CST_LOW (index);
7425 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7426 ;
7427
7428 if (elem)
7429 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7430 modifier);
7431 }
7432
7433 else if (optimize >= 1
7434 && modifier != EXPAND_CONST_ADDRESS
7435 && modifier != EXPAND_INITIALIZER
7436 && modifier != EXPAND_MEMORY
7437 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7438 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7439 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7440 {
7441 if (TREE_CODE (index) == INTEGER_CST)
7442 {
7443 tree init = DECL_INITIAL (array);
7444
7445 if (TREE_CODE (init) == CONSTRUCTOR)
7446 {
7447 tree elem;
7448
7449 for (elem = CONSTRUCTOR_ELTS (init);
7450 (elem
7451 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7452 elem = TREE_CHAIN (elem))
7453 ;
7454
7455 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7456 return expand_expr (fold (TREE_VALUE (elem)), target,
7457 tmode, modifier);
7458 }
7459 else if (TREE_CODE (init) == STRING_CST
7460 && 0 > compare_tree_int (index,
7461 TREE_STRING_LENGTH (init)))
7462 {
7463 tree type = TREE_TYPE (TREE_TYPE (init));
7464 enum machine_mode mode = TYPE_MODE (type);
7465
7466 if (GET_MODE_CLASS (mode) == MODE_INT
7467 && GET_MODE_SIZE (mode) == 1)
7468 return gen_int_mode (TREE_STRING_POINTER (init)
7469 [TREE_INT_CST_LOW (index)], mode);
7470 }
7471 }
7472 }
7473 }
7474 goto normal_inner_ref;
7475
7476 case COMPONENT_REF:
7477 /* If the operand is a CONSTRUCTOR, we can just extract the
7478 appropriate field if it is present. */
7479 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7480 {
7481 tree elt;
7482
7483 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7484 elt = TREE_CHAIN (elt))
7485 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7486 /* We can normally use the value of the field in the
7487 CONSTRUCTOR. However, if this is a bitfield in
7488 an integral mode that we can fit in a HOST_WIDE_INT,
7489 we must mask only the number of bits in the bitfield,
7490 since this is done implicitly by the constructor. If
7491 the bitfield does not meet either of those conditions,
7492 we can't do this optimization. */
7493 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7494 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7495 == MODE_INT)
7496 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7497 <= HOST_BITS_PER_WIDE_INT))))
7498 {
7499 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7500 && modifier == EXPAND_STACK_PARM)
7501 target = 0;
7502 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7503 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7504 {
7505 HOST_WIDE_INT bitsize
7506 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7507 enum machine_mode imode
7508 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7509
7510 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7511 {
7512 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7513 op0 = expand_and (imode, op0, op1, target);
7514 }
7515 else
7516 {
7517 tree count
7518 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7519 0);
7520
7521 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7522 target, 0);
7523 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7524 target, 0);
7525 }
7526 }
7527
7528 return op0;
7529 }
7530 }
7531 goto normal_inner_ref;
7532
7533 case BIT_FIELD_REF:
7534 case ARRAY_RANGE_REF:
7535 normal_inner_ref:
7536 {
7537 enum machine_mode mode1;
7538 HOST_WIDE_INT bitsize, bitpos;
7539 tree offset;
7540 int volatilep = 0;
7541 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7542 &mode1, &unsignedp, &volatilep);
7543 rtx orig_op0;
7544
7545 /* If we got back the original object, something is wrong. Perhaps
7546 we are evaluating an expression too early. In any event, don't
7547 infinitely recurse. */
7548 if (tem == exp)
7549 abort ();
7550
7551 /* If TEM's type is a union of variable size, pass TARGET to the inner
7552 computation, since it will need a temporary and TARGET is known
7553 to have to do. This occurs in unchecked conversion in Ada. */
7554
7555 orig_op0 = op0
7556 = expand_expr (tem,
7557 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7558 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7559 != INTEGER_CST)
7560 && modifier != EXPAND_STACK_PARM
7561 ? target : NULL_RTX),
7562 VOIDmode,
7563 (modifier == EXPAND_INITIALIZER
7564 || modifier == EXPAND_CONST_ADDRESS
7565 || modifier == EXPAND_STACK_PARM)
7566 ? modifier : EXPAND_NORMAL);
7567
7568 /* If this is a constant, put it into a register if it is a
7569 legitimate constant and OFFSET is 0 and memory if it isn't. */
7570 if (CONSTANT_P (op0))
7571 {
7572 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7573 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7574 && offset == 0)
7575 op0 = force_reg (mode, op0);
7576 else
7577 op0 = validize_mem (force_const_mem (mode, op0));
7578 }
7579
7580 if (offset != 0)
7581 {
7582 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7583 EXPAND_SUM);
7584
7585 /* If this object is in a register, put it into memory.
7586 This case can't occur in C, but can in Ada if we have
7587 unchecked conversion of an expression from a scalar type to
7588 an array or record type. */
7589 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7590 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7591 {
7592 /* If the operand is a SAVE_EXPR, we can deal with this by
7593 forcing the SAVE_EXPR into memory. */
7594 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7595 {
7596 put_var_into_stack (TREE_OPERAND (exp, 0),
7597 /*rescan=*/true);
7598 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7599 }
7600 else
7601 {
7602 tree nt
7603 = build_qualified_type (TREE_TYPE (tem),
7604 (TYPE_QUALS (TREE_TYPE (tem))
7605 | TYPE_QUAL_CONST));
7606 rtx memloc = assign_temp (nt, 1, 1, 1);
7607
7608 emit_move_insn (memloc, op0);
7609 op0 = memloc;
7610 }
7611 }
7612
7613 if (GET_CODE (op0) != MEM)
7614 abort ();
7615
7616 #ifdef POINTERS_EXTEND_UNSIGNED
7617 if (GET_MODE (offset_rtx) != Pmode)
7618 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7619 #else
7620 if (GET_MODE (offset_rtx) != ptr_mode)
7621 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7622 #endif
7623
7624 /* A constant address in OP0 can have VOIDmode, we must not try
7625 to call force_reg for that case. Avoid that case. */
7626 if (GET_CODE (op0) == MEM
7627 && GET_MODE (op0) == BLKmode
7628 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7629 && bitsize != 0
7630 && (bitpos % bitsize) == 0
7631 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7632 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7633 {
7634 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7635 bitpos = 0;
7636 }
7637
7638 op0 = offset_address (op0, offset_rtx,
7639 highest_pow2_factor (offset));
7640 }
7641
7642 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7643 record its alignment as BIGGEST_ALIGNMENT. */
7644 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7645 && is_aligning_offset (offset, tem))
7646 set_mem_align (op0, BIGGEST_ALIGNMENT);
7647
7648 /* Don't forget about volatility even if this is a bitfield. */
7649 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7650 {
7651 if (op0 == orig_op0)
7652 op0 = copy_rtx (op0);
7653
7654 MEM_VOLATILE_P (op0) = 1;
7655 }
7656
7657 /* The following code doesn't handle CONCAT.
7658 Assume only bitpos == 0 can be used for CONCAT, due to
7659 one element arrays having the same mode as its element. */
7660 if (GET_CODE (op0) == CONCAT)
7661 {
7662 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7663 abort ();
7664 return op0;
7665 }
7666
7667 /* In cases where an aligned union has an unaligned object
7668 as a field, we might be extracting a BLKmode value from
7669 an integer-mode (e.g., SImode) object. Handle this case
7670 by doing the extract into an object as wide as the field
7671 (which we know to be the width of a basic mode), then
7672 storing into memory, and changing the mode to BLKmode. */
7673 if (mode1 == VOIDmode
7674 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7675 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7676 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7677 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7678 && modifier != EXPAND_CONST_ADDRESS
7679 && modifier != EXPAND_INITIALIZER)
7680 /* If the field isn't aligned enough to fetch as a memref,
7681 fetch it as a bit field. */
7682 || (mode1 != BLKmode
7683 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7684 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))
7685 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0)))
7686 || (bitpos % BITS_PER_UNIT != 0)))
7687 /* If the type and the field are a constant size and the
7688 size of the type isn't the same size as the bitfield,
7689 we must use bitfield operations. */
7690 || (bitsize >= 0
7691 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7692 == INTEGER_CST)
7693 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7694 bitsize)))
7695 {
7696 enum machine_mode ext_mode = mode;
7697
7698 if (ext_mode == BLKmode
7699 && ! (target != 0 && GET_CODE (op0) == MEM
7700 && GET_CODE (target) == MEM
7701 && bitpos % BITS_PER_UNIT == 0))
7702 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7703
7704 if (ext_mode == BLKmode)
7705 {
7706 /* In this case, BITPOS must start at a byte boundary and
7707 TARGET, if specified, must be a MEM. */
7708 if (GET_CODE (op0) != MEM
7709 || (target != 0 && GET_CODE (target) != MEM)
7710 || bitpos % BITS_PER_UNIT != 0)
7711 abort ();
7712
7713 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7714 if (target == 0)
7715 target = assign_temp (type, 0, 1, 1);
7716
7717 emit_block_move (target, op0,
7718 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7719 / BITS_PER_UNIT),
7720 (modifier == EXPAND_STACK_PARM
7721 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7722
7723 return target;
7724 }
7725
7726 op0 = validize_mem (op0);
7727
7728 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7729 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7730
7731 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7732 (modifier == EXPAND_STACK_PARM
7733 ? NULL_RTX : target),
7734 ext_mode, ext_mode,
7735 int_size_in_bytes (TREE_TYPE (tem)));
7736
7737 /* If the result is a record type and BITSIZE is narrower than
7738 the mode of OP0, an integral mode, and this is a big endian
7739 machine, we must put the field into the high-order bits. */
7740 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7741 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7742 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7743 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7744 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7745 - bitsize),
7746 op0, 1);
7747
7748 if (mode == BLKmode)
7749 {
7750 rtx new = assign_temp (build_qualified_type
7751 ((*lang_hooks.types.type_for_mode)
7752 (ext_mode, 0),
7753 TYPE_QUAL_CONST), 0, 1, 1);
7754
7755 emit_move_insn (new, op0);
7756 op0 = copy_rtx (new);
7757 PUT_MODE (op0, BLKmode);
7758 set_mem_attributes (op0, exp, 1);
7759 }
7760
7761 return op0;
7762 }
7763
7764 /* If the result is BLKmode, use that to access the object
7765 now as well. */
7766 if (mode == BLKmode)
7767 mode1 = BLKmode;
7768
7769 /* Get a reference to just this component. */
7770 if (modifier == EXPAND_CONST_ADDRESS
7771 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7772 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7773 else
7774 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7775
7776 if (op0 == orig_op0)
7777 op0 = copy_rtx (op0);
7778
7779 set_mem_attributes (op0, exp, 0);
7780 if (GET_CODE (XEXP (op0, 0)) == REG)
7781 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7782
7783 MEM_VOLATILE_P (op0) |= volatilep;
7784 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7785 || modifier == EXPAND_CONST_ADDRESS
7786 || modifier == EXPAND_INITIALIZER)
7787 return op0;
7788 else if (target == 0)
7789 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7790
7791 convert_move (target, op0, unsignedp);
7792 return target;
7793 }
7794
7795 case VTABLE_REF:
7796 {
7797 rtx insn, before = get_last_insn (), vtbl_ref;
7798
7799 /* Evaluate the interior expression. */
7800 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7801 tmode, modifier);
7802
7803 /* Get or create an instruction off which to hang a note. */
7804 if (REG_P (subtarget))
7805 {
7806 target = subtarget;
7807 insn = get_last_insn ();
7808 if (insn == before)
7809 abort ();
7810 if (! INSN_P (insn))
7811 insn = prev_nonnote_insn (insn);
7812 }
7813 else
7814 {
7815 target = gen_reg_rtx (GET_MODE (subtarget));
7816 insn = emit_move_insn (target, subtarget);
7817 }
7818
7819 /* Collect the data for the note. */
7820 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7821 vtbl_ref = plus_constant (vtbl_ref,
7822 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7823 /* Discard the initial CONST that was added. */
7824 vtbl_ref = XEXP (vtbl_ref, 0);
7825
7826 REG_NOTES (insn)
7827 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7828
7829 return target;
7830 }
7831
7832 /* Intended for a reference to a buffer of a file-object in Pascal.
7833 But it's not certain that a special tree code will really be
7834 necessary for these. INDIRECT_REF might work for them. */
7835 case BUFFER_REF:
7836 abort ();
7837
7838 case IN_EXPR:
7839 {
7840 /* Pascal set IN expression.
7841
7842 Algorithm:
7843 rlo = set_low - (set_low%bits_per_word);
7844 the_word = set [ (index - rlo)/bits_per_word ];
7845 bit_index = index % bits_per_word;
7846 bitmask = 1 << bit_index;
7847 return !!(the_word & bitmask); */
7848
7849 tree set = TREE_OPERAND (exp, 0);
7850 tree index = TREE_OPERAND (exp, 1);
7851 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7852 tree set_type = TREE_TYPE (set);
7853 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7854 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7855 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7856 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7857 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7858 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7859 rtx setaddr = XEXP (setval, 0);
7860 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7861 rtx rlow;
7862 rtx diff, quo, rem, addr, bit, result;
7863
7864 /* If domain is empty, answer is no. Likewise if index is constant
7865 and out of bounds. */
7866 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7867 && TREE_CODE (set_low_bound) == INTEGER_CST
7868 && tree_int_cst_lt (set_high_bound, set_low_bound))
7869 || (TREE_CODE (index) == INTEGER_CST
7870 && TREE_CODE (set_low_bound) == INTEGER_CST
7871 && tree_int_cst_lt (index, set_low_bound))
7872 || (TREE_CODE (set_high_bound) == INTEGER_CST
7873 && TREE_CODE (index) == INTEGER_CST
7874 && tree_int_cst_lt (set_high_bound, index))))
7875 return const0_rtx;
7876
7877 if (target == 0)
7878 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7879
7880 /* If we get here, we have to generate the code for both cases
7881 (in range and out of range). */
7882
7883 op0 = gen_label_rtx ();
7884 op1 = gen_label_rtx ();
7885
7886 if (! (GET_CODE (index_val) == CONST_INT
7887 && GET_CODE (lo_r) == CONST_INT))
7888 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7889 GET_MODE (index_val), iunsignedp, op1);
7890
7891 if (! (GET_CODE (index_val) == CONST_INT
7892 && GET_CODE (hi_r) == CONST_INT))
7893 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7894 GET_MODE (index_val), iunsignedp, op1);
7895
7896 /* Calculate the element number of bit zero in the first word
7897 of the set. */
7898 if (GET_CODE (lo_r) == CONST_INT)
7899 rlow = GEN_INT (INTVAL (lo_r)
7900 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7901 else
7902 rlow = expand_binop (index_mode, and_optab, lo_r,
7903 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7904 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7905
7906 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7907 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7908
7909 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7910 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7911 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7912 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7913
7914 addr = memory_address (byte_mode,
7915 expand_binop (index_mode, add_optab, diff,
7916 setaddr, NULL_RTX, iunsignedp,
7917 OPTAB_LIB_WIDEN));
7918
7919 /* Extract the bit we want to examine. */
7920 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7921 gen_rtx_MEM (byte_mode, addr),
7922 make_tree (TREE_TYPE (index), rem),
7923 NULL_RTX, 1);
7924 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7925 GET_MODE (target) == byte_mode ? target : 0,
7926 1, OPTAB_LIB_WIDEN);
7927
7928 if (result != target)
7929 convert_move (target, result, 1);
7930
7931 /* Output the code to handle the out-of-range case. */
7932 emit_jump (op0);
7933 emit_label (op1);
7934 emit_move_insn (target, const0_rtx);
7935 emit_label (op0);
7936 return target;
7937 }
7938
7939 case WITH_CLEANUP_EXPR:
7940 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7941 {
7942 WITH_CLEANUP_EXPR_RTL (exp)
7943 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7944 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7945 CLEANUP_EH_ONLY (exp));
7946
7947 /* That's it for this cleanup. */
7948 TREE_OPERAND (exp, 1) = 0;
7949 }
7950 return WITH_CLEANUP_EXPR_RTL (exp);
7951
7952 case CLEANUP_POINT_EXPR:
7953 {
7954 /* Start a new binding layer that will keep track of all cleanup
7955 actions to be performed. */
7956 expand_start_bindings (2);
7957
7958 target_temp_slot_level = temp_slot_level;
7959
7960 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7961 /* If we're going to use this value, load it up now. */
7962 if (! ignore)
7963 op0 = force_not_mem (op0);
7964 preserve_temp_slots (op0);
7965 expand_end_bindings (NULL_TREE, 0, 0);
7966 }
7967 return op0;
7968
7969 case CALL_EXPR:
7970 /* Check for a built-in function. */
7971 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7972 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7973 == FUNCTION_DECL)
7974 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7975 {
7976 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7977 == BUILT_IN_FRONTEND)
7978 return (*lang_hooks.expand_expr) (exp, original_target,
7979 tmode, modifier);
7980 else
7981 return expand_builtin (exp, target, subtarget, tmode, ignore);
7982 }
7983
7984 return expand_call (exp, target, ignore);
7985
7986 case NON_LVALUE_EXPR:
7987 case NOP_EXPR:
7988 case CONVERT_EXPR:
7989 case REFERENCE_EXPR:
7990 if (TREE_OPERAND (exp, 0) == error_mark_node)
7991 return const0_rtx;
7992
7993 if (TREE_CODE (type) == UNION_TYPE)
7994 {
7995 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7996
7997 /* If both input and output are BLKmode, this conversion isn't doing
7998 anything except possibly changing memory attribute. */
7999 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8000 {
8001 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
8002 modifier);
8003
8004 result = copy_rtx (result);
8005 set_mem_attributes (result, exp, 0);
8006 return result;
8007 }
8008
8009 if (target == 0)
8010 target = assign_temp (type, 0, 1, 1);
8011
8012 if (GET_CODE (target) == MEM)
8013 /* Store data into beginning of memory target. */
8014 store_expr (TREE_OPERAND (exp, 0),
8015 adjust_address (target, TYPE_MODE (valtype), 0),
8016 modifier == EXPAND_STACK_PARM ? 2 : 0);
8017
8018 else if (GET_CODE (target) == REG)
8019 /* Store this field into a union of the proper type. */
8020 store_field (target,
8021 MIN ((int_size_in_bytes (TREE_TYPE
8022 (TREE_OPERAND (exp, 0)))
8023 * BITS_PER_UNIT),
8024 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8025 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
8026 VOIDmode, 0, type, 0);
8027 else
8028 abort ();
8029
8030 /* Return the entire union. */
8031 return target;
8032 }
8033
8034 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8035 {
8036 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
8037 modifier);
8038
8039 /* If the signedness of the conversion differs and OP0 is
8040 a promoted SUBREG, clear that indication since we now
8041 have to do the proper extension. */
8042 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
8043 && GET_CODE (op0) == SUBREG)
8044 SUBREG_PROMOTED_VAR_P (op0) = 0;
8045
8046 return op0;
8047 }
8048
8049 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
8050 if (GET_MODE (op0) == mode)
8051 return op0;
8052
8053 /* If OP0 is a constant, just convert it into the proper mode. */
8054 if (CONSTANT_P (op0))
8055 {
8056 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8057 enum machine_mode inner_mode = TYPE_MODE (inner_type);
8058
8059 if (modifier == EXPAND_INITIALIZER)
8060 return simplify_gen_subreg (mode, op0, inner_mode,
8061 subreg_lowpart_offset (mode,
8062 inner_mode));
8063 else
8064 return convert_modes (mode, inner_mode, op0,
8065 TREE_UNSIGNED (inner_type));
8066 }
8067
8068 if (modifier == EXPAND_INITIALIZER)
8069 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8070
8071 if (target == 0)
8072 return
8073 convert_to_mode (mode, op0,
8074 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8075 else
8076 convert_move (target, op0,
8077 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8078 return target;
8079
8080 case VIEW_CONVERT_EXPR:
8081 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
8082
8083 /* If the input and output modes are both the same, we are done.
8084 Otherwise, if neither mode is BLKmode and both are integral and within
8085 a word, we can use gen_lowpart. If neither is true, make sure the
8086 operand is in memory and convert the MEM to the new mode. */
8087 if (TYPE_MODE (type) == GET_MODE (op0))
8088 ;
8089 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
8090 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8091 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
8092 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
8093 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
8094 op0 = gen_lowpart (TYPE_MODE (type), op0);
8095 else if (GET_CODE (op0) != MEM)
8096 {
8097 /* If the operand is not a MEM, force it into memory. Since we
8098 are going to be be changing the mode of the MEM, don't call
8099 force_const_mem for constants because we don't allow pool
8100 constants to change mode. */
8101 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8102
8103 if (TREE_ADDRESSABLE (exp))
8104 abort ();
8105
8106 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
8107 target
8108 = assign_stack_temp_for_type
8109 (TYPE_MODE (inner_type),
8110 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
8111
8112 emit_move_insn (target, op0);
8113 op0 = target;
8114 }
8115
8116 /* At this point, OP0 is in the correct mode. If the output type is such
8117 that the operand is known to be aligned, indicate that it is.
8118 Otherwise, we need only be concerned about alignment for non-BLKmode
8119 results. */
8120 if (GET_CODE (op0) == MEM)
8121 {
8122 op0 = copy_rtx (op0);
8123
8124 if (TYPE_ALIGN_OK (type))
8125 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
8126 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
8127 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
8128 {
8129 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8130 HOST_WIDE_INT temp_size
8131 = MAX (int_size_in_bytes (inner_type),
8132 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
8133 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
8134 temp_size, 0, type);
8135 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
8136
8137 if (TREE_ADDRESSABLE (exp))
8138 abort ();
8139
8140 if (GET_MODE (op0) == BLKmode)
8141 emit_block_move (new_with_op0_mode, op0,
8142 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8143 (modifier == EXPAND_STACK_PARM
8144 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8145 else
8146 emit_move_insn (new_with_op0_mode, op0);
8147
8148 op0 = new;
8149 }
8150
8151 op0 = adjust_address (op0, TYPE_MODE (type), 0);
8152 }
8153
8154 return op0;
8155
8156 case PLUS_EXPR:
8157 this_optab = ! unsignedp && flag_trapv
8158 && (GET_MODE_CLASS (mode) == MODE_INT)
8159 ? addv_optab : add_optab;
8160
8161 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
8162 something else, make sure we add the register to the constant and
8163 then to the other thing. This case can occur during strength
8164 reduction and doing it this way will produce better code if the
8165 frame pointer or argument pointer is eliminated.
8166
8167 fold-const.c will ensure that the constant is always in the inner
8168 PLUS_EXPR, so the only case we need to do anything about is if
8169 sp, ap, or fp is our second argument, in which case we must swap
8170 the innermost first argument and our second argument. */
8171
8172 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8173 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8174 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
8175 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8176 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8177 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8178 {
8179 tree t = TREE_OPERAND (exp, 1);
8180
8181 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8182 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8183 }
8184
8185 /* If the result is to be ptr_mode and we are adding an integer to
8186 something, we might be forming a constant. So try to use
8187 plus_constant. If it produces a sum and we can't accept it,
8188 use force_operand. This allows P = &ARR[const] to generate
8189 efficient code on machines where a SYMBOL_REF is not a valid
8190 address.
8191
8192 If this is an EXPAND_SUM call, always return the sum. */
8193 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8194 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8195 {
8196 if (modifier == EXPAND_STACK_PARM)
8197 target = 0;
8198 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8199 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8200 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8201 {
8202 rtx constant_part;
8203
8204 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8205 EXPAND_SUM);
8206 /* Use immed_double_const to ensure that the constant is
8207 truncated according to the mode of OP1, then sign extended
8208 to a HOST_WIDE_INT. Using the constant directly can result
8209 in non-canonical RTL in a 64x32 cross compile. */
8210 constant_part
8211 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8212 (HOST_WIDE_INT) 0,
8213 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8214 op1 = plus_constant (op1, INTVAL (constant_part));
8215 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8216 op1 = force_operand (op1, target);
8217 return op1;
8218 }
8219
8220 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8221 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
8222 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8223 {
8224 rtx constant_part;
8225
8226 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8227 (modifier == EXPAND_INITIALIZER
8228 ? EXPAND_INITIALIZER : EXPAND_SUM));
8229 if (! CONSTANT_P (op0))
8230 {
8231 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8232 VOIDmode, modifier);
8233 /* Don't go to both_summands if modifier
8234 says it's not right to return a PLUS. */
8235 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8236 goto binop2;
8237 goto both_summands;
8238 }
8239 /* Use immed_double_const to ensure that the constant is
8240 truncated according to the mode of OP1, then sign extended
8241 to a HOST_WIDE_INT. Using the constant directly can result
8242 in non-canonical RTL in a 64x32 cross compile. */
8243 constant_part
8244 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8245 (HOST_WIDE_INT) 0,
8246 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8247 op0 = plus_constant (op0, INTVAL (constant_part));
8248 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8249 op0 = force_operand (op0, target);
8250 return op0;
8251 }
8252 }
8253
8254 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8255 subtarget = 0;
8256
8257 /* No sense saving up arithmetic to be done
8258 if it's all in the wrong mode to form part of an address.
8259 And force_operand won't know whether to sign-extend or
8260 zero-extend. */
8261 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8262 || mode != ptr_mode)
8263 {
8264 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8265 if (! operand_equal_p (TREE_OPERAND (exp, 0),
8266 TREE_OPERAND (exp, 1), 0))
8267 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8268 else
8269 op1 = op0;
8270 if (op0 == const0_rtx)
8271 return op1;
8272 if (op1 == const0_rtx)
8273 return op0;
8274 goto binop2;
8275 }
8276
8277 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8278 if (! operand_equal_p (TREE_OPERAND (exp, 0),
8279 TREE_OPERAND (exp, 1), 0))
8280 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8281 VOIDmode, modifier);
8282 else
8283 op1 = op0;
8284
8285 /* We come here from MINUS_EXPR when the second operand is a
8286 constant. */
8287 both_summands:
8288 /* Make sure any term that's a sum with a constant comes last. */
8289 if (GET_CODE (op0) == PLUS
8290 && CONSTANT_P (XEXP (op0, 1)))
8291 {
8292 temp = op0;
8293 op0 = op1;
8294 op1 = temp;
8295 }
8296 /* If adding to a sum including a constant,
8297 associate it to put the constant outside. */
8298 if (GET_CODE (op1) == PLUS
8299 && CONSTANT_P (XEXP (op1, 1)))
8300 {
8301 rtx constant_term = const0_rtx;
8302
8303 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
8304 if (temp != 0)
8305 op0 = temp;
8306 /* Ensure that MULT comes first if there is one. */
8307 else if (GET_CODE (op0) == MULT)
8308 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
8309 else
8310 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
8311
8312 /* Let's also eliminate constants from op0 if possible. */
8313 op0 = eliminate_constant_term (op0, &constant_term);
8314
8315 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8316 their sum should be a constant. Form it into OP1, since the
8317 result we want will then be OP0 + OP1. */
8318
8319 temp = simplify_binary_operation (PLUS, mode, constant_term,
8320 XEXP (op1, 1));
8321 if (temp != 0)
8322 op1 = temp;
8323 else
8324 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
8325 }
8326
8327 /* Put a constant term last and put a multiplication first. */
8328 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
8329 temp = op1, op1 = op0, op0 = temp;
8330
8331 temp = simplify_binary_operation (PLUS, mode, op0, op1);
8332 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
8333
8334 case MINUS_EXPR:
8335 /* For initializers, we are allowed to return a MINUS of two
8336 symbolic constants. Here we handle all cases when both operands
8337 are constant. */
8338 /* Handle difference of two symbolic constants,
8339 for the sake of an initializer. */
8340 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8341 && really_constant_p (TREE_OPERAND (exp, 0))
8342 && really_constant_p (TREE_OPERAND (exp, 1)))
8343 {
8344 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
8345 modifier);
8346 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8347 modifier);
8348
8349 /* If the last operand is a CONST_INT, use plus_constant of
8350 the negated constant. Else make the MINUS. */
8351 if (GET_CODE (op1) == CONST_INT)
8352 return plus_constant (op0, - INTVAL (op1));
8353 else
8354 return gen_rtx_MINUS (mode, op0, op1);
8355 }
8356
8357 this_optab = ! unsignedp && flag_trapv
8358 && (GET_MODE_CLASS(mode) == MODE_INT)
8359 ? subv_optab : sub_optab;
8360
8361 /* No sense saving up arithmetic to be done
8362 if it's all in the wrong mode to form part of an address.
8363 And force_operand won't know whether to sign-extend or
8364 zero-extend. */
8365 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8366 || mode != ptr_mode)
8367 goto binop;
8368
8369 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8370 subtarget = 0;
8371
8372 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8373 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8374
8375 /* Convert A - const to A + (-const). */
8376 if (GET_CODE (op1) == CONST_INT)
8377 {
8378 op1 = negate_rtx (mode, op1);
8379 goto both_summands;
8380 }
8381
8382 goto binop2;
8383
8384 case MULT_EXPR:
8385 /* If first operand is constant, swap them.
8386 Thus the following special case checks need only
8387 check the second operand. */
8388 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8389 {
8390 tree t1 = TREE_OPERAND (exp, 0);
8391 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8392 TREE_OPERAND (exp, 1) = t1;
8393 }
8394
8395 /* Attempt to return something suitable for generating an
8396 indexed address, for machines that support that. */
8397
8398 if (modifier == EXPAND_SUM && mode == ptr_mode
8399 && host_integerp (TREE_OPERAND (exp, 1), 0))
8400 {
8401 tree exp1 = TREE_OPERAND (exp, 1);
8402
8403 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8404 EXPAND_SUM);
8405
8406 /* If we knew for certain that this is arithmetic for an array
8407 reference, and we knew the bounds of the array, then we could
8408 apply the distributive law across (PLUS X C) for constant C.
8409 Without such knowledge, we risk overflowing the computation
8410 when both X and C are large, but X+C isn't. */
8411 /* ??? Could perhaps special-case EXP being unsigned and C being
8412 positive. In that case we are certain that X+C is no smaller
8413 than X and so the transformed expression will overflow iff the
8414 original would have. */
8415
8416 if (GET_CODE (op0) != REG)
8417 op0 = force_operand (op0, NULL_RTX);
8418 if (GET_CODE (op0) != REG)
8419 op0 = copy_to_mode_reg (mode, op0);
8420
8421 return gen_rtx_MULT (mode, op0,
8422 gen_int_mode (tree_low_cst (exp1, 0),
8423 TYPE_MODE (TREE_TYPE (exp1))));
8424 }
8425
8426 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8427 subtarget = 0;
8428
8429 if (modifier == EXPAND_STACK_PARM)
8430 target = 0;
8431
8432 /* Check for multiplying things that have been extended
8433 from a narrower type. If this machine supports multiplying
8434 in that narrower type with a result in the desired type,
8435 do it that way, and avoid the explicit type-conversion. */
8436 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8437 && TREE_CODE (type) == INTEGER_TYPE
8438 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8439 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8440 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8441 && int_fits_type_p (TREE_OPERAND (exp, 1),
8442 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8443 /* Don't use a widening multiply if a shift will do. */
8444 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8445 > HOST_BITS_PER_WIDE_INT)
8446 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8447 ||
8448 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8449 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8450 ==
8451 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8452 /* If both operands are extended, they must either both
8453 be zero-extended or both be sign-extended. */
8454 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8455 ==
8456 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8457 {
8458 enum machine_mode innermode
8459 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8460 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8461 ? smul_widen_optab : umul_widen_optab);
8462 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8463 ? umul_widen_optab : smul_widen_optab);
8464 if (mode == GET_MODE_WIDER_MODE (innermode))
8465 {
8466 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8467 {
8468 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8469 NULL_RTX, VOIDmode, 0);
8470 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8471 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8472 VOIDmode, 0);
8473 else
8474 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8475 NULL_RTX, VOIDmode, 0);
8476 goto binop2;
8477 }
8478 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8479 && innermode == word_mode)
8480 {
8481 rtx htem;
8482 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8483 NULL_RTX, VOIDmode, 0);
8484 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8485 op1 = convert_modes (innermode, mode,
8486 expand_expr (TREE_OPERAND (exp, 1),
8487 NULL_RTX, VOIDmode, 0),
8488 unsignedp);
8489 else
8490 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8491 NULL_RTX, VOIDmode, 0);
8492 temp = expand_binop (mode, other_optab, op0, op1, target,
8493 unsignedp, OPTAB_LIB_WIDEN);
8494 htem = expand_mult_highpart_adjust (innermode,
8495 gen_highpart (innermode, temp),
8496 op0, op1,
8497 gen_highpart (innermode, temp),
8498 unsignedp);
8499 emit_move_insn (gen_highpart (innermode, temp), htem);
8500 return temp;
8501 }
8502 }
8503 }
8504 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8505 if (! operand_equal_p (TREE_OPERAND (exp, 0),
8506 TREE_OPERAND (exp, 1), 0))
8507 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8508 else
8509 op1 = op0;
8510 return expand_mult (mode, op0, op1, target, unsignedp);
8511
8512 case TRUNC_DIV_EXPR:
8513 case FLOOR_DIV_EXPR:
8514 case CEIL_DIV_EXPR:
8515 case ROUND_DIV_EXPR:
8516 case EXACT_DIV_EXPR:
8517 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8518 subtarget = 0;
8519 if (modifier == EXPAND_STACK_PARM)
8520 target = 0;
8521 /* Possible optimization: compute the dividend with EXPAND_SUM
8522 then if the divisor is constant can optimize the case
8523 where some terms of the dividend have coeffs divisible by it. */
8524 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8525 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8526 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8527
8528 case RDIV_EXPR:
8529 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8530 expensive divide. If not, combine will rebuild the original
8531 computation. */
8532 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8533 && TREE_CODE (type) == REAL_TYPE
8534 && !real_onep (TREE_OPERAND (exp, 0)))
8535 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8536 build (RDIV_EXPR, type,
8537 build_real (type, dconst1),
8538 TREE_OPERAND (exp, 1))),
8539 target, tmode, modifier);
8540 this_optab = sdiv_optab;
8541 goto binop;
8542
8543 case TRUNC_MOD_EXPR:
8544 case FLOOR_MOD_EXPR:
8545 case CEIL_MOD_EXPR:
8546 case ROUND_MOD_EXPR:
8547 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8548 subtarget = 0;
8549 if (modifier == EXPAND_STACK_PARM)
8550 target = 0;
8551 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8552 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8553 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8554
8555 case FIX_ROUND_EXPR:
8556 case FIX_FLOOR_EXPR:
8557 case FIX_CEIL_EXPR:
8558 abort (); /* Not used for C. */
8559
8560 case FIX_TRUNC_EXPR:
8561 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8562 if (target == 0 || modifier == EXPAND_STACK_PARM)
8563 target = gen_reg_rtx (mode);
8564 expand_fix (target, op0, unsignedp);
8565 return target;
8566
8567 case FLOAT_EXPR:
8568 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8569 if (target == 0 || modifier == EXPAND_STACK_PARM)
8570 target = gen_reg_rtx (mode);
8571 /* expand_float can't figure out what to do if FROM has VOIDmode.
8572 So give it the correct mode. With -O, cse will optimize this. */
8573 if (GET_MODE (op0) == VOIDmode)
8574 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8575 op0);
8576 expand_float (target, op0,
8577 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8578 return target;
8579
8580 case NEGATE_EXPR:
8581 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8582 if (modifier == EXPAND_STACK_PARM)
8583 target = 0;
8584 temp = expand_unop (mode,
8585 ! unsignedp && flag_trapv
8586 && (GET_MODE_CLASS(mode) == MODE_INT)
8587 ? negv_optab : neg_optab, op0, target, 0);
8588 if (temp == 0)
8589 abort ();
8590 return temp;
8591
8592 case ABS_EXPR:
8593 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8594 if (modifier == EXPAND_STACK_PARM)
8595 target = 0;
8596
8597 /* Handle complex values specially. */
8598 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8599 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8600 return expand_complex_abs (mode, op0, target, unsignedp);
8601
8602 /* Unsigned abs is simply the operand. Testing here means we don't
8603 risk generating incorrect code below. */
8604 if (TREE_UNSIGNED (type))
8605 return op0;
8606
8607 return expand_abs (mode, op0, target, unsignedp,
8608 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8609
8610 case MAX_EXPR:
8611 case MIN_EXPR:
8612 target = original_target;
8613 if (target == 0
8614 || modifier == EXPAND_STACK_PARM
8615 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8616 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8617 || GET_MODE (target) != mode
8618 || (GET_CODE (target) == REG
8619 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8620 target = gen_reg_rtx (mode);
8621 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8622 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8623
8624 /* First try to do it with a special MIN or MAX instruction.
8625 If that does not win, use a conditional jump to select the proper
8626 value. */
8627 this_optab = (TREE_UNSIGNED (type)
8628 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8629 : (code == MIN_EXPR ? smin_optab : smax_optab));
8630
8631 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8632 OPTAB_WIDEN);
8633 if (temp != 0)
8634 return temp;
8635
8636 /* At this point, a MEM target is no longer useful; we will get better
8637 code without it. */
8638
8639 if (GET_CODE (target) == MEM)
8640 target = gen_reg_rtx (mode);
8641
8642 if (target != op0)
8643 emit_move_insn (target, op0);
8644
8645 op0 = gen_label_rtx ();
8646
8647 /* If this mode is an integer too wide to compare properly,
8648 compare word by word. Rely on cse to optimize constant cases. */
8649 if (GET_MODE_CLASS (mode) == MODE_INT
8650 && ! can_compare_p (GE, mode, ccp_jump))
8651 {
8652 if (code == MAX_EXPR)
8653 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8654 target, op1, NULL_RTX, op0);
8655 else
8656 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8657 op1, target, NULL_RTX, op0);
8658 }
8659 else
8660 {
8661 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8662 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8663 unsignedp, mode, NULL_RTX, NULL_RTX,
8664 op0);
8665 }
8666 emit_move_insn (target, op1);
8667 emit_label (op0);
8668 return target;
8669
8670 case BIT_NOT_EXPR:
8671 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8672 if (modifier == EXPAND_STACK_PARM)
8673 target = 0;
8674 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8675 if (temp == 0)
8676 abort ();
8677 return temp;
8678
8679 case FFS_EXPR:
8680 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8681 if (modifier == EXPAND_STACK_PARM)
8682 target = 0;
8683 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8684 if (temp == 0)
8685 abort ();
8686 return temp;
8687
8688 case CLZ_EXPR:
8689 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8690 temp = expand_unop (mode, clz_optab, op0, target, 1);
8691 if (temp == 0)
8692 abort ();
8693 return temp;
8694
8695 case CTZ_EXPR:
8696 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8697 temp = expand_unop (mode, ctz_optab, op0, target, 1);
8698 if (temp == 0)
8699 abort ();
8700 return temp;
8701
8702 case POPCOUNT_EXPR:
8703 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8704 temp = expand_unop (mode, popcount_optab, op0, target, 1);
8705 if (temp == 0)
8706 abort ();
8707 return temp;
8708
8709 case PARITY_EXPR:
8710 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8711 temp = expand_unop (mode, parity_optab, op0, target, 1);
8712 if (temp == 0)
8713 abort ();
8714 return temp;
8715
8716 /* ??? Can optimize bitwise operations with one arg constant.
8717 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8718 and (a bitwise1 b) bitwise2 b (etc)
8719 but that is probably not worth while. */
8720
8721 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8722 boolean values when we want in all cases to compute both of them. In
8723 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8724 as actual zero-or-1 values and then bitwise anding. In cases where
8725 there cannot be any side effects, better code would be made by
8726 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8727 how to recognize those cases. */
8728
8729 case TRUTH_AND_EXPR:
8730 case BIT_AND_EXPR:
8731 this_optab = and_optab;
8732 goto binop;
8733
8734 case TRUTH_OR_EXPR:
8735 case BIT_IOR_EXPR:
8736 this_optab = ior_optab;
8737 goto binop;
8738
8739 case TRUTH_XOR_EXPR:
8740 case BIT_XOR_EXPR:
8741 this_optab = xor_optab;
8742 goto binop;
8743
8744 case LSHIFT_EXPR:
8745 case RSHIFT_EXPR:
8746 case LROTATE_EXPR:
8747 case RROTATE_EXPR:
8748 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8749 subtarget = 0;
8750 if (modifier == EXPAND_STACK_PARM)
8751 target = 0;
8752 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8753 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8754 unsignedp);
8755
8756 /* Could determine the answer when only additive constants differ. Also,
8757 the addition of one can be handled by changing the condition. */
8758 case LT_EXPR:
8759 case LE_EXPR:
8760 case GT_EXPR:
8761 case GE_EXPR:
8762 case EQ_EXPR:
8763 case NE_EXPR:
8764 case UNORDERED_EXPR:
8765 case ORDERED_EXPR:
8766 case UNLT_EXPR:
8767 case UNLE_EXPR:
8768 case UNGT_EXPR:
8769 case UNGE_EXPR:
8770 case UNEQ_EXPR:
8771 temp = do_store_flag (exp,
8772 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8773 tmode != VOIDmode ? tmode : mode, 0);
8774 if (temp != 0)
8775 return temp;
8776
8777 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8778 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8779 && original_target
8780 && GET_CODE (original_target) == REG
8781 && (GET_MODE (original_target)
8782 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8783 {
8784 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8785 VOIDmode, 0);
8786
8787 /* If temp is constant, we can just compute the result. */
8788 if (GET_CODE (temp) == CONST_INT)
8789 {
8790 if (INTVAL (temp) != 0)
8791 emit_move_insn (target, const1_rtx);
8792 else
8793 emit_move_insn (target, const0_rtx);
8794
8795 return target;
8796 }
8797
8798 if (temp != original_target)
8799 {
8800 enum machine_mode mode1 = GET_MODE (temp);
8801 if (mode1 == VOIDmode)
8802 mode1 = tmode != VOIDmode ? tmode : mode;
8803
8804 temp = copy_to_mode_reg (mode1, temp);
8805 }
8806
8807 op1 = gen_label_rtx ();
8808 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8809 GET_MODE (temp), unsignedp, op1);
8810 emit_move_insn (temp, const1_rtx);
8811 emit_label (op1);
8812 return temp;
8813 }
8814
8815 /* If no set-flag instruction, must generate a conditional
8816 store into a temporary variable. Drop through
8817 and handle this like && and ||. */
8818
8819 case TRUTH_ANDIF_EXPR:
8820 case TRUTH_ORIF_EXPR:
8821 if (! ignore
8822 && (target == 0
8823 || modifier == EXPAND_STACK_PARM
8824 || ! safe_from_p (target, exp, 1)
8825 /* Make sure we don't have a hard reg (such as function's return
8826 value) live across basic blocks, if not optimizing. */
8827 || (!optimize && GET_CODE (target) == REG
8828 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8829 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8830
8831 if (target)
8832 emit_clr_insn (target);
8833
8834 op1 = gen_label_rtx ();
8835 jumpifnot (exp, op1);
8836
8837 if (target)
8838 emit_0_to_1_insn (target);
8839
8840 emit_label (op1);
8841 return ignore ? const0_rtx : target;
8842
8843 case TRUTH_NOT_EXPR:
8844 if (modifier == EXPAND_STACK_PARM)
8845 target = 0;
8846 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8847 /* The parser is careful to generate TRUTH_NOT_EXPR
8848 only with operands that are always zero or one. */
8849 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8850 target, 1, OPTAB_LIB_WIDEN);
8851 if (temp == 0)
8852 abort ();
8853 return temp;
8854
8855 case COMPOUND_EXPR:
8856 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8857 emit_queue ();
8858 return expand_expr (TREE_OPERAND (exp, 1),
8859 (ignore ? const0_rtx : target),
8860 VOIDmode, modifier);
8861
8862 case COND_EXPR:
8863 /* If we would have a "singleton" (see below) were it not for a
8864 conversion in each arm, bring that conversion back out. */
8865 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8866 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8867 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8868 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8869 {
8870 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8871 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8872
8873 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8874 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8875 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8876 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8877 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8878 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8879 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8880 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8881 return expand_expr (build1 (NOP_EXPR, type,
8882 build (COND_EXPR, TREE_TYPE (iftrue),
8883 TREE_OPERAND (exp, 0),
8884 iftrue, iffalse)),
8885 target, tmode, modifier);
8886 }
8887
8888 {
8889 /* Note that COND_EXPRs whose type is a structure or union
8890 are required to be constructed to contain assignments of
8891 a temporary variable, so that we can evaluate them here
8892 for side effect only. If type is void, we must do likewise. */
8893
8894 /* If an arm of the branch requires a cleanup,
8895 only that cleanup is performed. */
8896
8897 tree singleton = 0;
8898 tree binary_op = 0, unary_op = 0;
8899
8900 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8901 convert it to our mode, if necessary. */
8902 if (integer_onep (TREE_OPERAND (exp, 1))
8903 && integer_zerop (TREE_OPERAND (exp, 2))
8904 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8905 {
8906 if (ignore)
8907 {
8908 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8909 modifier);
8910 return const0_rtx;
8911 }
8912
8913 if (modifier == EXPAND_STACK_PARM)
8914 target = 0;
8915 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8916 if (GET_MODE (op0) == mode)
8917 return op0;
8918
8919 if (target == 0)
8920 target = gen_reg_rtx (mode);
8921 convert_move (target, op0, unsignedp);
8922 return target;
8923 }
8924
8925 /* Check for X ? A + B : A. If we have this, we can copy A to the
8926 output and conditionally add B. Similarly for unary operations.
8927 Don't do this if X has side-effects because those side effects
8928 might affect A or B and the "?" operation is a sequence point in
8929 ANSI. (operand_equal_p tests for side effects.) */
8930
8931 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8932 && operand_equal_p (TREE_OPERAND (exp, 2),
8933 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8934 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8935 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8936 && operand_equal_p (TREE_OPERAND (exp, 1),
8937 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8938 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8939 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8940 && operand_equal_p (TREE_OPERAND (exp, 2),
8941 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8942 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8943 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8944 && operand_equal_p (TREE_OPERAND (exp, 1),
8945 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8946 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8947
8948 /* If we are not to produce a result, we have no target. Otherwise,
8949 if a target was specified use it; it will not be used as an
8950 intermediate target unless it is safe. If no target, use a
8951 temporary. */
8952
8953 if (ignore)
8954 temp = 0;
8955 else if (modifier == EXPAND_STACK_PARM)
8956 temp = assign_temp (type, 0, 0, 1);
8957 else if (original_target
8958 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8959 || (singleton && GET_CODE (original_target) == REG
8960 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8961 && original_target == var_rtx (singleton)))
8962 && GET_MODE (original_target) == mode
8963 #ifdef HAVE_conditional_move
8964 && (! can_conditionally_move_p (mode)
8965 || GET_CODE (original_target) == REG
8966 || TREE_ADDRESSABLE (type))
8967 #endif
8968 && (GET_CODE (original_target) != MEM
8969 || TREE_ADDRESSABLE (type)))
8970 temp = original_target;
8971 else if (TREE_ADDRESSABLE (type))
8972 abort ();
8973 else
8974 temp = assign_temp (type, 0, 0, 1);
8975
8976 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8977 do the test of X as a store-flag operation, do this as
8978 A + ((X != 0) << log C). Similarly for other simple binary
8979 operators. Only do for C == 1 if BRANCH_COST is low. */
8980 if (temp && singleton && binary_op
8981 && (TREE_CODE (binary_op) == PLUS_EXPR
8982 || TREE_CODE (binary_op) == MINUS_EXPR
8983 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8984 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8985 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8986 : integer_onep (TREE_OPERAND (binary_op, 1)))
8987 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8988 {
8989 rtx result;
8990 tree cond;
8991 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8992 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8993 ? addv_optab : add_optab)
8994 : TREE_CODE (binary_op) == MINUS_EXPR
8995 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8996 ? subv_optab : sub_optab)
8997 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8998 : xor_optab);
8999
9000 /* If we had X ? A : A + 1, do this as A + (X == 0). */
9001 if (singleton == TREE_OPERAND (exp, 1))
9002 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
9003 else
9004 cond = TREE_OPERAND (exp, 0);
9005
9006 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
9007 ? temp : NULL_RTX),
9008 mode, BRANCH_COST <= 1);
9009
9010 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
9011 result = expand_shift (LSHIFT_EXPR, mode, result,
9012 build_int_2 (tree_log2
9013 (TREE_OPERAND
9014 (binary_op, 1)),
9015 0),
9016 (safe_from_p (temp, singleton, 1)
9017 ? temp : NULL_RTX), 0);
9018
9019 if (result)
9020 {
9021 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
9022 return expand_binop (mode, boptab, op1, result, temp,
9023 unsignedp, OPTAB_LIB_WIDEN);
9024 }
9025 }
9026
9027 do_pending_stack_adjust ();
9028 NO_DEFER_POP;
9029 op0 = gen_label_rtx ();
9030
9031 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
9032 {
9033 if (temp != 0)
9034 {
9035 /* If the target conflicts with the other operand of the
9036 binary op, we can't use it. Also, we can't use the target
9037 if it is a hard register, because evaluating the condition
9038 might clobber it. */
9039 if ((binary_op
9040 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
9041 || (GET_CODE (temp) == REG
9042 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
9043 temp = gen_reg_rtx (mode);
9044 store_expr (singleton, temp,
9045 modifier == EXPAND_STACK_PARM ? 2 : 0);
9046 }
9047 else
9048 expand_expr (singleton,
9049 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
9050 if (singleton == TREE_OPERAND (exp, 1))
9051 jumpif (TREE_OPERAND (exp, 0), op0);
9052 else
9053 jumpifnot (TREE_OPERAND (exp, 0), op0);
9054
9055 start_cleanup_deferral ();
9056 if (binary_op && temp == 0)
9057 /* Just touch the other operand. */
9058 expand_expr (TREE_OPERAND (binary_op, 1),
9059 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
9060 else if (binary_op)
9061 store_expr (build (TREE_CODE (binary_op), type,
9062 make_tree (type, temp),
9063 TREE_OPERAND (binary_op, 1)),
9064 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
9065 else
9066 store_expr (build1 (TREE_CODE (unary_op), type,
9067 make_tree (type, temp)),
9068 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
9069 op1 = op0;
9070 }
9071 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
9072 comparison operator. If we have one of these cases, set the
9073 output to A, branch on A (cse will merge these two references),
9074 then set the output to FOO. */
9075 else if (temp
9076 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
9077 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
9078 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
9079 TREE_OPERAND (exp, 1), 0)
9080 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
9081 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
9082 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
9083 {
9084 if (GET_CODE (temp) == REG
9085 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
9086 temp = gen_reg_rtx (mode);
9087 store_expr (TREE_OPERAND (exp, 1), temp,
9088 modifier == EXPAND_STACK_PARM ? 2 : 0);
9089 jumpif (TREE_OPERAND (exp, 0), op0);
9090
9091 start_cleanup_deferral ();
9092 store_expr (TREE_OPERAND (exp, 2), temp,
9093 modifier == EXPAND_STACK_PARM ? 2 : 0);
9094 op1 = op0;
9095 }
9096 else if (temp
9097 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
9098 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
9099 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
9100 TREE_OPERAND (exp, 2), 0)
9101 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
9102 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
9103 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
9104 {
9105 if (GET_CODE (temp) == REG
9106 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
9107 temp = gen_reg_rtx (mode);
9108 store_expr (TREE_OPERAND (exp, 2), temp,
9109 modifier == EXPAND_STACK_PARM ? 2 : 0);
9110 jumpifnot (TREE_OPERAND (exp, 0), op0);
9111
9112 start_cleanup_deferral ();
9113 store_expr (TREE_OPERAND (exp, 1), temp,
9114 modifier == EXPAND_STACK_PARM ? 2 : 0);
9115 op1 = op0;
9116 }
9117 else
9118 {
9119 op1 = gen_label_rtx ();
9120 jumpifnot (TREE_OPERAND (exp, 0), op0);
9121
9122 start_cleanup_deferral ();
9123
9124 /* One branch of the cond can be void, if it never returns. For
9125 example A ? throw : E */
9126 if (temp != 0
9127 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
9128 store_expr (TREE_OPERAND (exp, 1), temp,
9129 modifier == EXPAND_STACK_PARM ? 2 : 0);
9130 else
9131 expand_expr (TREE_OPERAND (exp, 1),
9132 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
9133 end_cleanup_deferral ();
9134 emit_queue ();
9135 emit_jump_insn (gen_jump (op1));
9136 emit_barrier ();
9137 emit_label (op0);
9138 start_cleanup_deferral ();
9139 if (temp != 0
9140 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
9141 store_expr (TREE_OPERAND (exp, 2), temp,
9142 modifier == EXPAND_STACK_PARM ? 2 : 0);
9143 else
9144 expand_expr (TREE_OPERAND (exp, 2),
9145 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
9146 }
9147
9148 end_cleanup_deferral ();
9149
9150 emit_queue ();
9151 emit_label (op1);
9152 OK_DEFER_POP;
9153
9154 return temp;
9155 }
9156
9157 case TARGET_EXPR:
9158 {
9159 /* Something needs to be initialized, but we didn't know
9160 where that thing was when building the tree. For example,
9161 it could be the return value of a function, or a parameter
9162 to a function which lays down in the stack, or a temporary
9163 variable which must be passed by reference.
9164
9165 We guarantee that the expression will either be constructed
9166 or copied into our original target. */
9167
9168 tree slot = TREE_OPERAND (exp, 0);
9169 tree cleanups = NULL_TREE;
9170 tree exp1;
9171
9172 if (TREE_CODE (slot) != VAR_DECL)
9173 abort ();
9174
9175 if (! ignore)
9176 target = original_target;
9177
9178 /* Set this here so that if we get a target that refers to a
9179 register variable that's already been used, put_reg_into_stack
9180 knows that it should fix up those uses. */
9181 TREE_USED (slot) = 1;
9182
9183 if (target == 0)
9184 {
9185 if (DECL_RTL_SET_P (slot))
9186 {
9187 target = DECL_RTL (slot);
9188 /* If we have already expanded the slot, so don't do
9189 it again. (mrs) */
9190 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9191 return target;
9192 }
9193 else
9194 {
9195 target = assign_temp (type, 2, 0, 1);
9196 /* All temp slots at this level must not conflict. */
9197 preserve_temp_slots (target);
9198 SET_DECL_RTL (slot, target);
9199 if (TREE_ADDRESSABLE (slot))
9200 put_var_into_stack (slot, /*rescan=*/false);
9201
9202 /* Since SLOT is not known to the called function
9203 to belong to its stack frame, we must build an explicit
9204 cleanup. This case occurs when we must build up a reference
9205 to pass the reference as an argument. In this case,
9206 it is very likely that such a reference need not be
9207 built here. */
9208
9209 if (TREE_OPERAND (exp, 2) == 0)
9210 TREE_OPERAND (exp, 2)
9211 = (*lang_hooks.maybe_build_cleanup) (slot);
9212 cleanups = TREE_OPERAND (exp, 2);
9213 }
9214 }
9215 else
9216 {
9217 /* This case does occur, when expanding a parameter which
9218 needs to be constructed on the stack. The target
9219 is the actual stack address that we want to initialize.
9220 The function we call will perform the cleanup in this case. */
9221
9222 /* If we have already assigned it space, use that space,
9223 not target that we were passed in, as our target
9224 parameter is only a hint. */
9225 if (DECL_RTL_SET_P (slot))
9226 {
9227 target = DECL_RTL (slot);
9228 /* If we have already expanded the slot, so don't do
9229 it again. (mrs) */
9230 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9231 return target;
9232 }
9233 else
9234 {
9235 SET_DECL_RTL (slot, target);
9236 /* If we must have an addressable slot, then make sure that
9237 the RTL that we just stored in slot is OK. */
9238 if (TREE_ADDRESSABLE (slot))
9239 put_var_into_stack (slot, /*rescan=*/true);
9240 }
9241 }
9242
9243 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
9244 /* Mark it as expanded. */
9245 TREE_OPERAND (exp, 1) = NULL_TREE;
9246
9247 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
9248
9249 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
9250
9251 return target;
9252 }
9253
9254 case INIT_EXPR:
9255 {
9256 tree lhs = TREE_OPERAND (exp, 0);
9257 tree rhs = TREE_OPERAND (exp, 1);
9258
9259 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9260 return temp;
9261 }
9262
9263 case MODIFY_EXPR:
9264 {
9265 /* If lhs is complex, expand calls in rhs before computing it.
9266 That's so we don't compute a pointer and save it over a
9267 call. If lhs is simple, compute it first so we can give it
9268 as a target if the rhs is just a call. This avoids an
9269 extra temp and copy and that prevents a partial-subsumption
9270 which makes bad code. Actually we could treat
9271 component_ref's of vars like vars. */
9272
9273 tree lhs = TREE_OPERAND (exp, 0);
9274 tree rhs = TREE_OPERAND (exp, 1);
9275
9276 temp = 0;
9277
9278 /* Check for |= or &= of a bitfield of size one into another bitfield
9279 of size 1. In this case, (unless we need the result of the
9280 assignment) we can do this more efficiently with a
9281 test followed by an assignment, if necessary.
9282
9283 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9284 things change so we do, this code should be enhanced to
9285 support it. */
9286 if (ignore
9287 && TREE_CODE (lhs) == COMPONENT_REF
9288 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9289 || TREE_CODE (rhs) == BIT_AND_EXPR)
9290 && TREE_OPERAND (rhs, 0) == lhs
9291 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9292 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9293 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9294 {
9295 rtx label = gen_label_rtx ();
9296
9297 do_jump (TREE_OPERAND (rhs, 1),
9298 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
9299 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
9300 expand_assignment (lhs, convert (TREE_TYPE (rhs),
9301 (TREE_CODE (rhs) == BIT_IOR_EXPR
9302 ? integer_one_node
9303 : integer_zero_node)),
9304 0, 0);
9305 do_pending_stack_adjust ();
9306 emit_label (label);
9307 return const0_rtx;
9308 }
9309
9310 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9311
9312 return temp;
9313 }
9314
9315 case RETURN_EXPR:
9316 if (!TREE_OPERAND (exp, 0))
9317 expand_null_return ();
9318 else
9319 expand_return (TREE_OPERAND (exp, 0));
9320 return const0_rtx;
9321
9322 case PREINCREMENT_EXPR:
9323 case PREDECREMENT_EXPR:
9324 return expand_increment (exp, 0, ignore);
9325
9326 case POSTINCREMENT_EXPR:
9327 case POSTDECREMENT_EXPR:
9328 /* Faster to treat as pre-increment if result is not used. */
9329 return expand_increment (exp, ! ignore, ignore);
9330
9331 case ADDR_EXPR:
9332 if (modifier == EXPAND_STACK_PARM)
9333 target = 0;
9334 /* Are we taking the address of a nested function? */
9335 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
9336 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
9337 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
9338 && ! TREE_STATIC (exp))
9339 {
9340 op0 = trampoline_address (TREE_OPERAND (exp, 0));
9341 op0 = force_operand (op0, target);
9342 }
9343 /* If we are taking the address of something erroneous, just
9344 return a zero. */
9345 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9346 return const0_rtx;
9347 /* If we are taking the address of a constant and are at the
9348 top level, we have to use output_constant_def since we can't
9349 call force_const_mem at top level. */
9350 else if (cfun == 0
9351 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9352 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9353 == 'c')))
9354 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
9355 else
9356 {
9357 /* We make sure to pass const0_rtx down if we came in with
9358 ignore set, to avoid doing the cleanups twice for something. */
9359 op0 = expand_expr (TREE_OPERAND (exp, 0),
9360 ignore ? const0_rtx : NULL_RTX, VOIDmode,
9361 (modifier == EXPAND_INITIALIZER
9362 ? modifier : EXPAND_CONST_ADDRESS));
9363
9364 /* If we are going to ignore the result, OP0 will have been set
9365 to const0_rtx, so just return it. Don't get confused and
9366 think we are taking the address of the constant. */
9367 if (ignore)
9368 return op0;
9369
9370 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9371 clever and returns a REG when given a MEM. */
9372 op0 = protect_from_queue (op0, 1);
9373
9374 /* We would like the object in memory. If it is a constant, we can
9375 have it be statically allocated into memory. For a non-constant,
9376 we need to allocate some memory and store the value into it. */
9377
9378 if (CONSTANT_P (op0))
9379 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9380 op0);
9381 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9382 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9383 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
9384 {
9385 /* If the operand is a SAVE_EXPR, we can deal with this by
9386 forcing the SAVE_EXPR into memory. */
9387 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9388 {
9389 put_var_into_stack (TREE_OPERAND (exp, 0),
9390 /*rescan=*/true);
9391 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9392 }
9393 else
9394 {
9395 /* If this object is in a register, it can't be BLKmode. */
9396 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9397 rtx memloc = assign_temp (inner_type, 1, 1, 1);
9398
9399 if (GET_CODE (op0) == PARALLEL)
9400 /* Handle calls that pass values in multiple
9401 non-contiguous locations. The Irix 6 ABI has examples
9402 of this. */
9403 emit_group_store (memloc, op0,
9404 int_size_in_bytes (inner_type));
9405 else
9406 emit_move_insn (memloc, op0);
9407
9408 op0 = memloc;
9409 }
9410 }
9411
9412 if (GET_CODE (op0) != MEM)
9413 abort ();
9414
9415 mark_temp_addr_taken (op0);
9416 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9417 {
9418 op0 = XEXP (op0, 0);
9419 #ifdef POINTERS_EXTEND_UNSIGNED
9420 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9421 && mode == ptr_mode)
9422 op0 = convert_memory_address (ptr_mode, op0);
9423 #endif
9424 return op0;
9425 }
9426
9427 /* If OP0 is not aligned as least as much as the type requires, we
9428 need to make a temporary, copy OP0 to it, and take the address of
9429 the temporary. We want to use the alignment of the type, not of
9430 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9431 the test for BLKmode means that can't happen. The test for
9432 BLKmode is because we never make mis-aligned MEMs with
9433 non-BLKmode.
9434
9435 We don't need to do this at all if the machine doesn't have
9436 strict alignment. */
9437 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9438 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9439 > MEM_ALIGN (op0))
9440 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9441 {
9442 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9443 rtx new;
9444
9445 if (TYPE_ALIGN_OK (inner_type))
9446 abort ();
9447
9448 if (TREE_ADDRESSABLE (inner_type))
9449 {
9450 /* We can't make a bitwise copy of this object, so fail. */
9451 error ("cannot take the address of an unaligned member");
9452 return const0_rtx;
9453 }
9454
9455 new = assign_stack_temp_for_type
9456 (TYPE_MODE (inner_type),
9457 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9458 : int_size_in_bytes (inner_type),
9459 1, build_qualified_type (inner_type,
9460 (TYPE_QUALS (inner_type)
9461 | TYPE_QUAL_CONST)));
9462
9463 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9464 (modifier == EXPAND_STACK_PARM
9465 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9466
9467 op0 = new;
9468 }
9469
9470 op0 = force_operand (XEXP (op0, 0), target);
9471 }
9472
9473 if (flag_force_addr
9474 && GET_CODE (op0) != REG
9475 && modifier != EXPAND_CONST_ADDRESS
9476 && modifier != EXPAND_INITIALIZER
9477 && modifier != EXPAND_SUM)
9478 op0 = force_reg (Pmode, op0);
9479
9480 if (GET_CODE (op0) == REG
9481 && ! REG_USERVAR_P (op0))
9482 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9483
9484 #ifdef POINTERS_EXTEND_UNSIGNED
9485 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9486 && mode == ptr_mode)
9487 op0 = convert_memory_address (ptr_mode, op0);
9488 #endif
9489
9490 return op0;
9491
9492 case ENTRY_VALUE_EXPR:
9493 abort ();
9494
9495 /* COMPLEX type for Extended Pascal & Fortran */
9496 case COMPLEX_EXPR:
9497 {
9498 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9499 rtx insns;
9500
9501 /* Get the rtx code of the operands. */
9502 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9503 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9504
9505 if (! target)
9506 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9507
9508 start_sequence ();
9509
9510 /* Move the real (op0) and imaginary (op1) parts to their location. */
9511 emit_move_insn (gen_realpart (mode, target), op0);
9512 emit_move_insn (gen_imagpart (mode, target), op1);
9513
9514 insns = get_insns ();
9515 end_sequence ();
9516
9517 /* Complex construction should appear as a single unit. */
9518 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9519 each with a separate pseudo as destination.
9520 It's not correct for flow to treat them as a unit. */
9521 if (GET_CODE (target) != CONCAT)
9522 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9523 else
9524 emit_insn (insns);
9525
9526 return target;
9527 }
9528
9529 case REALPART_EXPR:
9530 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9531 return gen_realpart (mode, op0);
9532
9533 case IMAGPART_EXPR:
9534 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9535 return gen_imagpart (mode, op0);
9536
9537 case CONJ_EXPR:
9538 {
9539 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9540 rtx imag_t;
9541 rtx insns;
9542
9543 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9544
9545 if (! target)
9546 target = gen_reg_rtx (mode);
9547
9548 start_sequence ();
9549
9550 /* Store the realpart and the negated imagpart to target. */
9551 emit_move_insn (gen_realpart (partmode, target),
9552 gen_realpart (partmode, op0));
9553
9554 imag_t = gen_imagpart (partmode, target);
9555 temp = expand_unop (partmode,
9556 ! unsignedp && flag_trapv
9557 && (GET_MODE_CLASS(partmode) == MODE_INT)
9558 ? negv_optab : neg_optab,
9559 gen_imagpart (partmode, op0), imag_t, 0);
9560 if (temp != imag_t)
9561 emit_move_insn (imag_t, temp);
9562
9563 insns = get_insns ();
9564 end_sequence ();
9565
9566 /* Conjugate should appear as a single unit
9567 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9568 each with a separate pseudo as destination.
9569 It's not correct for flow to treat them as a unit. */
9570 if (GET_CODE (target) != CONCAT)
9571 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9572 else
9573 emit_insn (insns);
9574
9575 return target;
9576 }
9577
9578 case TRY_CATCH_EXPR:
9579 {
9580 tree handler = TREE_OPERAND (exp, 1);
9581
9582 expand_eh_region_start ();
9583
9584 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9585
9586 expand_eh_region_end_cleanup (handler);
9587
9588 return op0;
9589 }
9590
9591 case TRY_FINALLY_EXPR:
9592 {
9593 tree try_block = TREE_OPERAND (exp, 0);
9594 tree finally_block = TREE_OPERAND (exp, 1);
9595
9596 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9597 {
9598 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9599 is not sufficient, so we cannot expand the block twice.
9600 So we play games with GOTO_SUBROUTINE_EXPR to let us
9601 expand the thing only once. */
9602 /* When not optimizing, we go ahead with this form since
9603 (1) user breakpoints operate more predictably without
9604 code duplication, and
9605 (2) we're not running any of the global optimizers
9606 that would explode in time/space with the highly
9607 connected CFG created by the indirect branching. */
9608
9609 rtx finally_label = gen_label_rtx ();
9610 rtx done_label = gen_label_rtx ();
9611 rtx return_link = gen_reg_rtx (Pmode);
9612 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9613 (tree) finally_label, (tree) return_link);
9614 TREE_SIDE_EFFECTS (cleanup) = 1;
9615
9616 /* Start a new binding layer that will keep track of all cleanup
9617 actions to be performed. */
9618 expand_start_bindings (2);
9619 target_temp_slot_level = temp_slot_level;
9620
9621 expand_decl_cleanup (NULL_TREE, cleanup);
9622 op0 = expand_expr (try_block, target, tmode, modifier);
9623
9624 preserve_temp_slots (op0);
9625 expand_end_bindings (NULL_TREE, 0, 0);
9626 emit_jump (done_label);
9627 emit_label (finally_label);
9628 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9629 emit_indirect_jump (return_link);
9630 emit_label (done_label);
9631 }
9632 else
9633 {
9634 expand_start_bindings (2);
9635 target_temp_slot_level = temp_slot_level;
9636
9637 expand_decl_cleanup (NULL_TREE, finally_block);
9638 op0 = expand_expr (try_block, target, tmode, modifier);
9639
9640 preserve_temp_slots (op0);
9641 expand_end_bindings (NULL_TREE, 0, 0);
9642 }
9643
9644 return op0;
9645 }
9646
9647 case GOTO_SUBROUTINE_EXPR:
9648 {
9649 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9650 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9651 rtx return_address = gen_label_rtx ();
9652 emit_move_insn (return_link,
9653 gen_rtx_LABEL_REF (Pmode, return_address));
9654 emit_jump (subr);
9655 emit_label (return_address);
9656 return const0_rtx;
9657 }
9658
9659 case VA_ARG_EXPR:
9660 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9661
9662 case EXC_PTR_EXPR:
9663 return get_exception_pointer (cfun);
9664
9665 case FDESC_EXPR:
9666 /* Function descriptors are not valid except for as
9667 initialization constants, and should not be expanded. */
9668 abort ();
9669
9670 default:
9671 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9672 }
9673
9674 /* Here to do an ordinary binary operator, generating an instruction
9675 from the optab already placed in `this_optab'. */
9676 binop:
9677 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9678 subtarget = 0;
9679 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9680 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9681 binop2:
9682 if (modifier == EXPAND_STACK_PARM)
9683 target = 0;
9684 temp = expand_binop (mode, this_optab, op0, op1, target,
9685 unsignedp, OPTAB_LIB_WIDEN);
9686 if (temp == 0)
9687 abort ();
9688 return temp;
9689 }
9690 \f
9691 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9692 when applied to the address of EXP produces an address known to be
9693 aligned more than BIGGEST_ALIGNMENT. */
9694
9695 static int
9696 is_aligning_offset (offset, exp)
9697 tree offset;
9698 tree exp;
9699 {
9700 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9701 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9702 || TREE_CODE (offset) == NOP_EXPR
9703 || TREE_CODE (offset) == CONVERT_EXPR
9704 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9705 offset = TREE_OPERAND (offset, 0);
9706
9707 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9708 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9709 if (TREE_CODE (offset) != BIT_AND_EXPR
9710 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9711 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9712 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9713 return 0;
9714
9715 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9716 It must be NEGATE_EXPR. Then strip any more conversions. */
9717 offset = TREE_OPERAND (offset, 0);
9718 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9719 || TREE_CODE (offset) == NOP_EXPR
9720 || TREE_CODE (offset) == CONVERT_EXPR)
9721 offset = TREE_OPERAND (offset, 0);
9722
9723 if (TREE_CODE (offset) != NEGATE_EXPR)
9724 return 0;
9725
9726 offset = TREE_OPERAND (offset, 0);
9727 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9728 || TREE_CODE (offset) == NOP_EXPR
9729 || TREE_CODE (offset) == CONVERT_EXPR)
9730 offset = TREE_OPERAND (offset, 0);
9731
9732 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9733 whose type is the same as EXP. */
9734 return (TREE_CODE (offset) == ADDR_EXPR
9735 && (TREE_OPERAND (offset, 0) == exp
9736 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9737 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9738 == TREE_TYPE (exp)))));
9739 }
9740 \f
9741 /* Return the tree node if an ARG corresponds to a string constant or zero
9742 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9743 in bytes within the string that ARG is accessing. The type of the
9744 offset will be `sizetype'. */
9745
9746 tree
9747 string_constant (arg, ptr_offset)
9748 tree arg;
9749 tree *ptr_offset;
9750 {
9751 STRIP_NOPS (arg);
9752
9753 if (TREE_CODE (arg) == ADDR_EXPR
9754 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9755 {
9756 *ptr_offset = size_zero_node;
9757 return TREE_OPERAND (arg, 0);
9758 }
9759 else if (TREE_CODE (arg) == PLUS_EXPR)
9760 {
9761 tree arg0 = TREE_OPERAND (arg, 0);
9762 tree arg1 = TREE_OPERAND (arg, 1);
9763
9764 STRIP_NOPS (arg0);
9765 STRIP_NOPS (arg1);
9766
9767 if (TREE_CODE (arg0) == ADDR_EXPR
9768 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9769 {
9770 *ptr_offset = convert (sizetype, arg1);
9771 return TREE_OPERAND (arg0, 0);
9772 }
9773 else if (TREE_CODE (arg1) == ADDR_EXPR
9774 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9775 {
9776 *ptr_offset = convert (sizetype, arg0);
9777 return TREE_OPERAND (arg1, 0);
9778 }
9779 }
9780
9781 return 0;
9782 }
9783 \f
9784 /* Expand code for a post- or pre- increment or decrement
9785 and return the RTX for the result.
9786 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9787
9788 static rtx
9789 expand_increment (exp, post, ignore)
9790 tree exp;
9791 int post, ignore;
9792 {
9793 rtx op0, op1;
9794 rtx temp, value;
9795 tree incremented = TREE_OPERAND (exp, 0);
9796 optab this_optab = add_optab;
9797 int icode;
9798 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9799 int op0_is_copy = 0;
9800 int single_insn = 0;
9801 /* 1 means we can't store into OP0 directly,
9802 because it is a subreg narrower than a word,
9803 and we don't dare clobber the rest of the word. */
9804 int bad_subreg = 0;
9805
9806 /* Stabilize any component ref that might need to be
9807 evaluated more than once below. */
9808 if (!post
9809 || TREE_CODE (incremented) == BIT_FIELD_REF
9810 || (TREE_CODE (incremented) == COMPONENT_REF
9811 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9812 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9813 incremented = stabilize_reference (incremented);
9814 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9815 ones into save exprs so that they don't accidentally get evaluated
9816 more than once by the code below. */
9817 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9818 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9819 incremented = save_expr (incremented);
9820
9821 /* Compute the operands as RTX.
9822 Note whether OP0 is the actual lvalue or a copy of it:
9823 I believe it is a copy iff it is a register or subreg
9824 and insns were generated in computing it. */
9825
9826 temp = get_last_insn ();
9827 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9828
9829 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9830 in place but instead must do sign- or zero-extension during assignment,
9831 so we copy it into a new register and let the code below use it as
9832 a copy.
9833
9834 Note that we can safely modify this SUBREG since it is know not to be
9835 shared (it was made by the expand_expr call above). */
9836
9837 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9838 {
9839 if (post)
9840 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9841 else
9842 bad_subreg = 1;
9843 }
9844 else if (GET_CODE (op0) == SUBREG
9845 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9846 {
9847 /* We cannot increment this SUBREG in place. If we are
9848 post-incrementing, get a copy of the old value. Otherwise,
9849 just mark that we cannot increment in place. */
9850 if (post)
9851 op0 = copy_to_reg (op0);
9852 else
9853 bad_subreg = 1;
9854 }
9855
9856 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9857 && temp != get_last_insn ());
9858 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9859
9860 /* Decide whether incrementing or decrementing. */
9861 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9862 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9863 this_optab = sub_optab;
9864
9865 /* Convert decrement by a constant into a negative increment. */
9866 if (this_optab == sub_optab
9867 && GET_CODE (op1) == CONST_INT)
9868 {
9869 op1 = GEN_INT (-INTVAL (op1));
9870 this_optab = add_optab;
9871 }
9872
9873 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9874 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9875
9876 /* For a preincrement, see if we can do this with a single instruction. */
9877 if (!post)
9878 {
9879 icode = (int) this_optab->handlers[(int) mode].insn_code;
9880 if (icode != (int) CODE_FOR_nothing
9881 /* Make sure that OP0 is valid for operands 0 and 1
9882 of the insn we want to queue. */
9883 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9884 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9885 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9886 single_insn = 1;
9887 }
9888
9889 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9890 then we cannot just increment OP0. We must therefore contrive to
9891 increment the original value. Then, for postincrement, we can return
9892 OP0 since it is a copy of the old value. For preincrement, expand here
9893 unless we can do it with a single insn.
9894
9895 Likewise if storing directly into OP0 would clobber high bits
9896 we need to preserve (bad_subreg). */
9897 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9898 {
9899 /* This is the easiest way to increment the value wherever it is.
9900 Problems with multiple evaluation of INCREMENTED are prevented
9901 because either (1) it is a component_ref or preincrement,
9902 in which case it was stabilized above, or (2) it is an array_ref
9903 with constant index in an array in a register, which is
9904 safe to reevaluate. */
9905 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9906 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9907 ? MINUS_EXPR : PLUS_EXPR),
9908 TREE_TYPE (exp),
9909 incremented,
9910 TREE_OPERAND (exp, 1));
9911
9912 while (TREE_CODE (incremented) == NOP_EXPR
9913 || TREE_CODE (incremented) == CONVERT_EXPR)
9914 {
9915 newexp = convert (TREE_TYPE (incremented), newexp);
9916 incremented = TREE_OPERAND (incremented, 0);
9917 }
9918
9919 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9920 return post ? op0 : temp;
9921 }
9922
9923 if (post)
9924 {
9925 /* We have a true reference to the value in OP0.
9926 If there is an insn to add or subtract in this mode, queue it.
9927 Queueing the increment insn avoids the register shuffling
9928 that often results if we must increment now and first save
9929 the old value for subsequent use. */
9930
9931 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9932 op0 = stabilize (op0);
9933 #endif
9934
9935 icode = (int) this_optab->handlers[(int) mode].insn_code;
9936 if (icode != (int) CODE_FOR_nothing
9937 /* Make sure that OP0 is valid for operands 0 and 1
9938 of the insn we want to queue. */
9939 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9940 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9941 {
9942 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9943 op1 = force_reg (mode, op1);
9944
9945 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9946 }
9947 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9948 {
9949 rtx addr = (general_operand (XEXP (op0, 0), mode)
9950 ? force_reg (Pmode, XEXP (op0, 0))
9951 : copy_to_reg (XEXP (op0, 0)));
9952 rtx temp, result;
9953
9954 op0 = replace_equiv_address (op0, addr);
9955 temp = force_reg (GET_MODE (op0), op0);
9956 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9957 op1 = force_reg (mode, op1);
9958
9959 /* The increment queue is LIFO, thus we have to `queue'
9960 the instructions in reverse order. */
9961 enqueue_insn (op0, gen_move_insn (op0, temp));
9962 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9963 return result;
9964 }
9965 }
9966
9967 /* Preincrement, or we can't increment with one simple insn. */
9968 if (post)
9969 /* Save a copy of the value before inc or dec, to return it later. */
9970 temp = value = copy_to_reg (op0);
9971 else
9972 /* Arrange to return the incremented value. */
9973 /* Copy the rtx because expand_binop will protect from the queue,
9974 and the results of that would be invalid for us to return
9975 if our caller does emit_queue before using our result. */
9976 temp = copy_rtx (value = op0);
9977
9978 /* Increment however we can. */
9979 op1 = expand_binop (mode, this_optab, value, op1, op0,
9980 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9981
9982 /* Make sure the value is stored into OP0. */
9983 if (op1 != op0)
9984 emit_move_insn (op0, op1);
9985
9986 return temp;
9987 }
9988 \f
9989 /* Generate code to calculate EXP using a store-flag instruction
9990 and return an rtx for the result. EXP is either a comparison
9991 or a TRUTH_NOT_EXPR whose operand is a comparison.
9992
9993 If TARGET is nonzero, store the result there if convenient.
9994
9995 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9996 cheap.
9997
9998 Return zero if there is no suitable set-flag instruction
9999 available on this machine.
10000
10001 Once expand_expr has been called on the arguments of the comparison,
10002 we are committed to doing the store flag, since it is not safe to
10003 re-evaluate the expression. We emit the store-flag insn by calling
10004 emit_store_flag, but only expand the arguments if we have a reason
10005 to believe that emit_store_flag will be successful. If we think that
10006 it will, but it isn't, we have to simulate the store-flag with a
10007 set/jump/set sequence. */
10008
10009 static rtx
10010 do_store_flag (exp, target, mode, only_cheap)
10011 tree exp;
10012 rtx target;
10013 enum machine_mode mode;
10014 int only_cheap;
10015 {
10016 enum rtx_code code;
10017 tree arg0, arg1, type;
10018 tree tem;
10019 enum machine_mode operand_mode;
10020 int invert = 0;
10021 int unsignedp;
10022 rtx op0, op1;
10023 enum insn_code icode;
10024 rtx subtarget = target;
10025 rtx result, label;
10026
10027 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10028 result at the end. We can't simply invert the test since it would
10029 have already been inverted if it were valid. This case occurs for
10030 some floating-point comparisons. */
10031
10032 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10033 invert = 1, exp = TREE_OPERAND (exp, 0);
10034
10035 arg0 = TREE_OPERAND (exp, 0);
10036 arg1 = TREE_OPERAND (exp, 1);
10037
10038 /* Don't crash if the comparison was erroneous. */
10039 if (arg0 == error_mark_node || arg1 == error_mark_node)
10040 return const0_rtx;
10041
10042 type = TREE_TYPE (arg0);
10043 operand_mode = TYPE_MODE (type);
10044 unsignedp = TREE_UNSIGNED (type);
10045
10046 /* We won't bother with BLKmode store-flag operations because it would mean
10047 passing a lot of information to emit_store_flag. */
10048 if (operand_mode == BLKmode)
10049 return 0;
10050
10051 /* We won't bother with store-flag operations involving function pointers
10052 when function pointers must be canonicalized before comparisons. */
10053 #ifdef HAVE_canonicalize_funcptr_for_compare
10054 if (HAVE_canonicalize_funcptr_for_compare
10055 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10056 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10057 == FUNCTION_TYPE))
10058 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10059 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10060 == FUNCTION_TYPE))))
10061 return 0;
10062 #endif
10063
10064 STRIP_NOPS (arg0);
10065 STRIP_NOPS (arg1);
10066
10067 /* Get the rtx comparison code to use. We know that EXP is a comparison
10068 operation of some type. Some comparisons against 1 and -1 can be
10069 converted to comparisons with zero. Do so here so that the tests
10070 below will be aware that we have a comparison with zero. These
10071 tests will not catch constants in the first operand, but constants
10072 are rarely passed as the first operand. */
10073
10074 switch (TREE_CODE (exp))
10075 {
10076 case EQ_EXPR:
10077 code = EQ;
10078 break;
10079 case NE_EXPR:
10080 code = NE;
10081 break;
10082 case LT_EXPR:
10083 if (integer_onep (arg1))
10084 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10085 else
10086 code = unsignedp ? LTU : LT;
10087 break;
10088 case LE_EXPR:
10089 if (! unsignedp && integer_all_onesp (arg1))
10090 arg1 = integer_zero_node, code = LT;
10091 else
10092 code = unsignedp ? LEU : LE;
10093 break;
10094 case GT_EXPR:
10095 if (! unsignedp && integer_all_onesp (arg1))
10096 arg1 = integer_zero_node, code = GE;
10097 else
10098 code = unsignedp ? GTU : GT;
10099 break;
10100 case GE_EXPR:
10101 if (integer_onep (arg1))
10102 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10103 else
10104 code = unsignedp ? GEU : GE;
10105 break;
10106
10107 case UNORDERED_EXPR:
10108 code = UNORDERED;
10109 break;
10110 case ORDERED_EXPR:
10111 code = ORDERED;
10112 break;
10113 case UNLT_EXPR:
10114 code = UNLT;
10115 break;
10116 case UNLE_EXPR:
10117 code = UNLE;
10118 break;
10119 case UNGT_EXPR:
10120 code = UNGT;
10121 break;
10122 case UNGE_EXPR:
10123 code = UNGE;
10124 break;
10125 case UNEQ_EXPR:
10126 code = UNEQ;
10127 break;
10128
10129 default:
10130 abort ();
10131 }
10132
10133 /* Put a constant second. */
10134 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10135 {
10136 tem = arg0; arg0 = arg1; arg1 = tem;
10137 code = swap_condition (code);
10138 }
10139
10140 /* If this is an equality or inequality test of a single bit, we can
10141 do this by shifting the bit being tested to the low-order bit and
10142 masking the result with the constant 1. If the condition was EQ,
10143 we xor it with 1. This does not require an scc insn and is faster
10144 than an scc insn even if we have it. */
10145
10146 if ((code == NE || code == EQ)
10147 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10148 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10149 {
10150 tree inner = TREE_OPERAND (arg0, 0);
10151 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10152 int ops_unsignedp;
10153
10154 /* If INNER is a right shift of a constant and it plus BITNUM does
10155 not overflow, adjust BITNUM and INNER. */
10156
10157 if (TREE_CODE (inner) == RSHIFT_EXPR
10158 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10159 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10160 && bitnum < TYPE_PRECISION (type)
10161 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10162 bitnum - TYPE_PRECISION (type)))
10163 {
10164 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10165 inner = TREE_OPERAND (inner, 0);
10166 }
10167
10168 /* If we are going to be able to omit the AND below, we must do our
10169 operations as unsigned. If we must use the AND, we have a choice.
10170 Normally unsigned is faster, but for some machines signed is. */
10171 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10172 #ifdef LOAD_EXTEND_OP
10173 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10174 #else
10175 : 1
10176 #endif
10177 );
10178
10179 if (! get_subtarget (subtarget)
10180 || GET_MODE (subtarget) != operand_mode
10181 || ! safe_from_p (subtarget, inner, 1))
10182 subtarget = 0;
10183
10184 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10185
10186 if (bitnum != 0)
10187 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10188 size_int (bitnum), subtarget, ops_unsignedp);
10189
10190 if (GET_MODE (op0) != mode)
10191 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10192
10193 if ((code == EQ && ! invert) || (code == NE && invert))
10194 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10195 ops_unsignedp, OPTAB_LIB_WIDEN);
10196
10197 /* Put the AND last so it can combine with more things. */
10198 if (bitnum != TYPE_PRECISION (type) - 1)
10199 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10200
10201 return op0;
10202 }
10203
10204 /* Now see if we are likely to be able to do this. Return if not. */
10205 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10206 return 0;
10207
10208 icode = setcc_gen_code[(int) code];
10209 if (icode == CODE_FOR_nothing
10210 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10211 {
10212 /* We can only do this if it is one of the special cases that
10213 can be handled without an scc insn. */
10214 if ((code == LT && integer_zerop (arg1))
10215 || (! only_cheap && code == GE && integer_zerop (arg1)))
10216 ;
10217 else if (BRANCH_COST >= 0
10218 && ! only_cheap && (code == NE || code == EQ)
10219 && TREE_CODE (type) != REAL_TYPE
10220 && ((abs_optab->handlers[(int) operand_mode].insn_code
10221 != CODE_FOR_nothing)
10222 || (ffs_optab->handlers[(int) operand_mode].insn_code
10223 != CODE_FOR_nothing)))
10224 ;
10225 else
10226 return 0;
10227 }
10228
10229 if (! get_subtarget (target)
10230 || GET_MODE (subtarget) != operand_mode
10231 || ! safe_from_p (subtarget, arg1, 1))
10232 subtarget = 0;
10233
10234 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10235 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10236
10237 if (target == 0)
10238 target = gen_reg_rtx (mode);
10239
10240 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10241 because, if the emit_store_flag does anything it will succeed and
10242 OP0 and OP1 will not be used subsequently. */
10243
10244 result = emit_store_flag (target, code,
10245 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10246 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10247 operand_mode, unsignedp, 1);
10248
10249 if (result)
10250 {
10251 if (invert)
10252 result = expand_binop (mode, xor_optab, result, const1_rtx,
10253 result, 0, OPTAB_LIB_WIDEN);
10254 return result;
10255 }
10256
10257 /* If this failed, we have to do this with set/compare/jump/set code. */
10258 if (GET_CODE (target) != REG
10259 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10260 target = gen_reg_rtx (GET_MODE (target));
10261
10262 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10263 result = compare_from_rtx (op0, op1, code, unsignedp,
10264 operand_mode, NULL_RTX);
10265 if (GET_CODE (result) == CONST_INT)
10266 return (((result == const0_rtx && ! invert)
10267 || (result != const0_rtx && invert))
10268 ? const0_rtx : const1_rtx);
10269
10270 /* The code of RESULT may not match CODE if compare_from_rtx
10271 decided to swap its operands and reverse the original code.
10272
10273 We know that compare_from_rtx returns either a CONST_INT or
10274 a new comparison code, so it is safe to just extract the
10275 code from RESULT. */
10276 code = GET_CODE (result);
10277
10278 label = gen_label_rtx ();
10279 if (bcc_gen_fctn[(int) code] == 0)
10280 abort ();
10281
10282 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10283 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10284 emit_label (label);
10285
10286 return target;
10287 }
10288 \f
10289
10290 /* Stubs in case we haven't got a casesi insn. */
10291 #ifndef HAVE_casesi
10292 # define HAVE_casesi 0
10293 # define gen_casesi(a, b, c, d, e) (0)
10294 # define CODE_FOR_casesi CODE_FOR_nothing
10295 #endif
10296
10297 /* If the machine does not have a case insn that compares the bounds,
10298 this means extra overhead for dispatch tables, which raises the
10299 threshold for using them. */
10300 #ifndef CASE_VALUES_THRESHOLD
10301 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10302 #endif /* CASE_VALUES_THRESHOLD */
10303
10304 unsigned int
10305 case_values_threshold ()
10306 {
10307 return CASE_VALUES_THRESHOLD;
10308 }
10309
10310 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10311 0 otherwise (i.e. if there is no casesi instruction). */
10312 int
10313 try_casesi (index_type, index_expr, minval, range,
10314 table_label, default_label)
10315 tree index_type, index_expr, minval, range;
10316 rtx table_label ATTRIBUTE_UNUSED;
10317 rtx default_label;
10318 {
10319 enum machine_mode index_mode = SImode;
10320 int index_bits = GET_MODE_BITSIZE (index_mode);
10321 rtx op1, op2, index;
10322 enum machine_mode op_mode;
10323
10324 if (! HAVE_casesi)
10325 return 0;
10326
10327 /* Convert the index to SImode. */
10328 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10329 {
10330 enum machine_mode omode = TYPE_MODE (index_type);
10331 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10332
10333 /* We must handle the endpoints in the original mode. */
10334 index_expr = build (MINUS_EXPR, index_type,
10335 index_expr, minval);
10336 minval = integer_zero_node;
10337 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10338 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10339 omode, 1, default_label);
10340 /* Now we can safely truncate. */
10341 index = convert_to_mode (index_mode, index, 0);
10342 }
10343 else
10344 {
10345 if (TYPE_MODE (index_type) != index_mode)
10346 {
10347 index_expr = convert ((*lang_hooks.types.type_for_size)
10348 (index_bits, 0), index_expr);
10349 index_type = TREE_TYPE (index_expr);
10350 }
10351
10352 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10353 }
10354 emit_queue ();
10355 index = protect_from_queue (index, 0);
10356 do_pending_stack_adjust ();
10357
10358 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10359 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10360 (index, op_mode))
10361 index = copy_to_mode_reg (op_mode, index);
10362
10363 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10364
10365 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10366 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10367 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10368 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10369 (op1, op_mode))
10370 op1 = copy_to_mode_reg (op_mode, op1);
10371
10372 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10373
10374 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10375 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10376 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10377 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10378 (op2, op_mode))
10379 op2 = copy_to_mode_reg (op_mode, op2);
10380
10381 emit_jump_insn (gen_casesi (index, op1, op2,
10382 table_label, default_label));
10383 return 1;
10384 }
10385
10386 /* Attempt to generate a tablejump instruction; same concept. */
10387 #ifndef HAVE_tablejump
10388 #define HAVE_tablejump 0
10389 #define gen_tablejump(x, y) (0)
10390 #endif
10391
10392 /* Subroutine of the next function.
10393
10394 INDEX is the value being switched on, with the lowest value
10395 in the table already subtracted.
10396 MODE is its expected mode (needed if INDEX is constant).
10397 RANGE is the length of the jump table.
10398 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10399
10400 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10401 index value is out of range. */
10402
10403 static void
10404 do_tablejump (index, mode, range, table_label, default_label)
10405 rtx index, range, table_label, default_label;
10406 enum machine_mode mode;
10407 {
10408 rtx temp, vector;
10409
10410 if (INTVAL (range) > cfun->max_jumptable_ents)
10411 cfun->max_jumptable_ents = INTVAL (range);
10412
10413 /* Do an unsigned comparison (in the proper mode) between the index
10414 expression and the value which represents the length of the range.
10415 Since we just finished subtracting the lower bound of the range
10416 from the index expression, this comparison allows us to simultaneously
10417 check that the original index expression value is both greater than
10418 or equal to the minimum value of the range and less than or equal to
10419 the maximum value of the range. */
10420
10421 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10422 default_label);
10423
10424 /* If index is in range, it must fit in Pmode.
10425 Convert to Pmode so we can index with it. */
10426 if (mode != Pmode)
10427 index = convert_to_mode (Pmode, index, 1);
10428
10429 /* Don't let a MEM slip thru, because then INDEX that comes
10430 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10431 and break_out_memory_refs will go to work on it and mess it up. */
10432 #ifdef PIC_CASE_VECTOR_ADDRESS
10433 if (flag_pic && GET_CODE (index) != REG)
10434 index = copy_to_mode_reg (Pmode, index);
10435 #endif
10436
10437 /* If flag_force_addr were to affect this address
10438 it could interfere with the tricky assumptions made
10439 about addresses that contain label-refs,
10440 which may be valid only very near the tablejump itself. */
10441 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10442 GET_MODE_SIZE, because this indicates how large insns are. The other
10443 uses should all be Pmode, because they are addresses. This code
10444 could fail if addresses and insns are not the same size. */
10445 index = gen_rtx_PLUS (Pmode,
10446 gen_rtx_MULT (Pmode, index,
10447 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10448 gen_rtx_LABEL_REF (Pmode, table_label));
10449 #ifdef PIC_CASE_VECTOR_ADDRESS
10450 if (flag_pic)
10451 index = PIC_CASE_VECTOR_ADDRESS (index);
10452 else
10453 #endif
10454 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10455 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10456 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10457 RTX_UNCHANGING_P (vector) = 1;
10458 MEM_NOTRAP_P (vector) = 1;
10459 convert_move (temp, vector, 0);
10460
10461 emit_jump_insn (gen_tablejump (temp, table_label));
10462
10463 /* If we are generating PIC code or if the table is PC-relative, the
10464 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10465 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10466 emit_barrier ();
10467 }
10468
10469 int
10470 try_tablejump (index_type, index_expr, minval, range,
10471 table_label, default_label)
10472 tree index_type, index_expr, minval, range;
10473 rtx table_label, default_label;
10474 {
10475 rtx index;
10476
10477 if (! HAVE_tablejump)
10478 return 0;
10479
10480 index_expr = fold (build (MINUS_EXPR, index_type,
10481 convert (index_type, index_expr),
10482 convert (index_type, minval)));
10483 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10484 emit_queue ();
10485 index = protect_from_queue (index, 0);
10486 do_pending_stack_adjust ();
10487
10488 do_tablejump (index, TYPE_MODE (index_type),
10489 convert_modes (TYPE_MODE (index_type),
10490 TYPE_MODE (TREE_TYPE (range)),
10491 expand_expr (range, NULL_RTX,
10492 VOIDmode, 0),
10493 TREE_UNSIGNED (TREE_TYPE (range))),
10494 table_label, default_label);
10495 return 1;
10496 }
10497
10498 /* Nonzero if the mode is a valid vector mode for this architecture.
10499 This returns nonzero even if there is no hardware support for the
10500 vector mode, but we can emulate with narrower modes. */
10501
10502 int
10503 vector_mode_valid_p (mode)
10504 enum machine_mode mode;
10505 {
10506 enum mode_class class = GET_MODE_CLASS (mode);
10507 enum machine_mode innermode;
10508
10509 /* Doh! What's going on? */
10510 if (class != MODE_VECTOR_INT
10511 && class != MODE_VECTOR_FLOAT)
10512 return 0;
10513
10514 /* Hardware support. Woo hoo! */
10515 if (VECTOR_MODE_SUPPORTED_P (mode))
10516 return 1;
10517
10518 innermode = GET_MODE_INNER (mode);
10519
10520 /* We should probably return 1 if requesting V4DI and we have no DI,
10521 but we have V2DI, but this is probably very unlikely. */
10522
10523 /* If we have support for the inner mode, we can safely emulate it.
10524 We may not have V2DI, but me can emulate with a pair of DIs. */
10525 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10526 }
10527
10528 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10529 static rtx
10530 const_vector_from_tree (exp)
10531 tree exp;
10532 {
10533 rtvec v;
10534 int units, i;
10535 tree link, elt;
10536 enum machine_mode inner, mode;
10537
10538 mode = TYPE_MODE (TREE_TYPE (exp));
10539
10540 if (is_zeros_p (exp))
10541 return CONST0_RTX (mode);
10542
10543 units = GET_MODE_NUNITS (mode);
10544 inner = GET_MODE_INNER (mode);
10545
10546 v = rtvec_alloc (units);
10547
10548 link = TREE_VECTOR_CST_ELTS (exp);
10549 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10550 {
10551 elt = TREE_VALUE (link);
10552
10553 if (TREE_CODE (elt) == REAL_CST)
10554 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10555 inner);
10556 else
10557 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10558 TREE_INT_CST_HIGH (elt),
10559 inner);
10560 }
10561
10562 /* Initialize remaining elements to 0. */
10563 for (; i < units; ++i)
10564 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10565
10566 return gen_rtx_raw_CONST_VECTOR (mode, v);
10567 }
10568
10569 #include "gt-expr.h"