ac756432f20be533d25cbf55f0973ca3fcdfc8a4
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
36 #include "expr.h"
37 #include "optabs.h"
38 #include "libfuncs.h"
39 #include "recog.h"
40 #include "reload.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "langhooks.h"
46 #include "intl.h"
47 #include "tm_p.h"
48
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
51
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
54
55 #ifdef PUSH_ROUNDING
56
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first. */
59 #endif
60
61 #endif
62
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
66 #else
67 #define STACK_PUSH_CODE PRE_INC
68 #endif
69 #endif
70
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
74 #endif
75
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
82 int cse_not_expected;
83
84 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
85 static tree placeholder_list = 0;
86
87 /* This structure is used by move_by_pieces to describe the move to
88 be performed. */
89 struct move_by_pieces
90 {
91 rtx to;
92 rtx to_addr;
93 int autinc_to;
94 int explicit_inc_to;
95 rtx from;
96 rtx from_addr;
97 int autinc_from;
98 int explicit_inc_from;
99 unsigned HOST_WIDE_INT len;
100 HOST_WIDE_INT offset;
101 int reverse;
102 };
103
104 /* This structure is used by store_by_pieces to describe the clear to
105 be performed. */
106
107 struct store_by_pieces
108 {
109 rtx to;
110 rtx to_addr;
111 int autinc_to;
112 int explicit_inc_to;
113 unsigned HOST_WIDE_INT len;
114 HOST_WIDE_INT offset;
115 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
116 PTR constfundata;
117 int reverse;
118 };
119
120 extern struct obstack permanent_obstack;
121
122 static rtx enqueue_insn PARAMS ((rtx, rtx));
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
124 PARAMS ((unsigned HOST_WIDE_INT,
125 unsigned int));
126 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *));
128 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
129 enum machine_mode));
130 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
131 unsigned int));
132 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
133 unsigned int));
134 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
135 enum machine_mode,
136 struct store_by_pieces *));
137 static rtx get_subtarget PARAMS ((rtx));
138 static int is_zeros_p PARAMS ((tree));
139 static int mostly_zeros_p PARAMS ((tree));
140 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
142 tree, tree, int, int));
143 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
144 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
145 HOST_WIDE_INT, enum machine_mode,
146 tree, enum machine_mode, int, tree,
147 int));
148 static rtx var_rtx PARAMS ((tree));
149 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
150 static rtx expand_increment PARAMS ((tree, int, int));
151 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
152 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
153 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
154 rtx, rtx));
155 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
156 #ifdef PUSH_ROUNDING
157 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
158 #endif
159 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
160
161 /* Record for each mode whether we can move a register directly to or
162 from an object of that mode in memory. If we can't, we won't try
163 to use that mode directly when accessing a field of that mode. */
164
165 static char direct_load[NUM_MACHINE_MODES];
166 static char direct_store[NUM_MACHINE_MODES];
167
168 /* If a memory-to-memory move would take MOVE_RATIO or more simple
169 move-instruction sequences, we will do a movstr or libcall instead. */
170
171 #ifndef MOVE_RATIO
172 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
173 #define MOVE_RATIO 2
174 #else
175 /* If we are optimizing for space (-Os), cut down the default move ratio. */
176 #define MOVE_RATIO (optimize_size ? 3 : 15)
177 #endif
178 #endif
179
180 /* This macro is used to determine whether move_by_pieces should be called
181 to perform a structure copy. */
182 #ifndef MOVE_BY_PIECES_P
183 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
185 #endif
186
187 /* This array records the insn_code of insns to perform block moves. */
188 enum insn_code movstr_optab[NUM_MACHINE_MODES];
189
190 /* This array records the insn_code of insns to perform block clears. */
191 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
192
193 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
194
195 #ifndef SLOW_UNALIGNED_ACCESS
196 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
197 #endif
198 \f
199 /* This is run once per compilation to set up which modes can be used
200 directly in memory and to initialize the block move optab. */
201
202 void
203 init_expr_once ()
204 {
205 rtx insn, pat;
206 enum machine_mode mode;
207 int num_clobbers;
208 rtx mem, mem1;
209
210 start_sequence ();
211
212 /* Try indexing by frame ptr and try by stack ptr.
213 It is known that on the Convex the stack ptr isn't a valid index.
214 With luck, one or the other is valid on any machine. */
215 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
216 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
217
218 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
219 pat = PATTERN (insn);
220
221 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
222 mode = (enum machine_mode) ((int) mode + 1))
223 {
224 int regno;
225 rtx reg;
226
227 direct_load[(int) mode] = direct_store[(int) mode] = 0;
228 PUT_MODE (mem, mode);
229 PUT_MODE (mem1, mode);
230
231 /* See if there is some register that can be used in this mode and
232 directly loaded or stored from memory. */
233
234 if (mode != VOIDmode && mode != BLKmode)
235 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
236 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
237 regno++)
238 {
239 if (! HARD_REGNO_MODE_OK (regno, mode))
240 continue;
241
242 reg = gen_rtx_REG (mode, regno);
243
244 SET_SRC (pat) = mem;
245 SET_DEST (pat) = reg;
246 if (recog (pat, insn, &num_clobbers) >= 0)
247 direct_load[(int) mode] = 1;
248
249 SET_SRC (pat) = mem1;
250 SET_DEST (pat) = reg;
251 if (recog (pat, insn, &num_clobbers) >= 0)
252 direct_load[(int) mode] = 1;
253
254 SET_SRC (pat) = reg;
255 SET_DEST (pat) = mem;
256 if (recog (pat, insn, &num_clobbers) >= 0)
257 direct_store[(int) mode] = 1;
258
259 SET_SRC (pat) = reg;
260 SET_DEST (pat) = mem1;
261 if (recog (pat, insn, &num_clobbers) >= 0)
262 direct_store[(int) mode] = 1;
263 }
264 }
265
266 end_sequence ();
267 }
268
269 /* This is run at the start of compiling a function. */
270
271 void
272 init_expr ()
273 {
274 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
275
276 pending_chain = 0;
277 pending_stack_adjust = 0;
278 stack_pointer_delta = 0;
279 inhibit_defer_pop = 0;
280 saveregs_value = 0;
281 apply_args_value = 0;
282 forced_labels = 0;
283 }
284
285 void
286 mark_expr_status (p)
287 struct expr_status *p;
288 {
289 if (p == NULL)
290 return;
291
292 ggc_mark_rtx (p->x_saveregs_value);
293 ggc_mark_rtx (p->x_apply_args_value);
294 ggc_mark_rtx (p->x_forced_labels);
295 }
296
297 void
298 free_expr_status (f)
299 struct function *f;
300 {
301 free (f->expr);
302 f->expr = NULL;
303 }
304
305 /* Small sanity check that the queue is empty at the end of a function. */
306
307 void
308 finish_expr_for_function ()
309 {
310 if (pending_chain)
311 abort ();
312 }
313 \f
314 /* Manage the queue of increment instructions to be output
315 for POSTINCREMENT_EXPR expressions, etc. */
316
317 /* Queue up to increment (or change) VAR later. BODY says how:
318 BODY should be the same thing you would pass to emit_insn
319 to increment right away. It will go to emit_insn later on.
320
321 The value is a QUEUED expression to be used in place of VAR
322 where you want to guarantee the pre-incrementation value of VAR. */
323
324 static rtx
325 enqueue_insn (var, body)
326 rtx var, body;
327 {
328 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
329 body, pending_chain);
330 return pending_chain;
331 }
332
333 /* Use protect_from_queue to convert a QUEUED expression
334 into something that you can put immediately into an instruction.
335 If the queued incrementation has not happened yet,
336 protect_from_queue returns the variable itself.
337 If the incrementation has happened, protect_from_queue returns a temp
338 that contains a copy of the old value of the variable.
339
340 Any time an rtx which might possibly be a QUEUED is to be put
341 into an instruction, it must be passed through protect_from_queue first.
342 QUEUED expressions are not meaningful in instructions.
343
344 Do not pass a value through protect_from_queue and then hold
345 on to it for a while before putting it in an instruction!
346 If the queue is flushed in between, incorrect code will result. */
347
348 rtx
349 protect_from_queue (x, modify)
350 rtx x;
351 int modify;
352 {
353 RTX_CODE code = GET_CODE (x);
354
355 #if 0 /* A QUEUED can hang around after the queue is forced out. */
356 /* Shortcut for most common case. */
357 if (pending_chain == 0)
358 return x;
359 #endif
360
361 if (code != QUEUED)
362 {
363 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
364 use of autoincrement. Make a copy of the contents of the memory
365 location rather than a copy of the address, but not if the value is
366 of mode BLKmode. Don't modify X in place since it might be
367 shared. */
368 if (code == MEM && GET_MODE (x) != BLKmode
369 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
370 {
371 rtx y = XEXP (x, 0);
372 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
373
374 if (QUEUED_INSN (y))
375 {
376 rtx temp = gen_reg_rtx (GET_MODE (x));
377
378 emit_insn_before (gen_move_insn (temp, new),
379 QUEUED_INSN (y));
380 return temp;
381 }
382
383 /* Copy the address into a pseudo, so that the returned value
384 remains correct across calls to emit_queue. */
385 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
386 }
387
388 /* Otherwise, recursively protect the subexpressions of all
389 the kinds of rtx's that can contain a QUEUED. */
390 if (code == MEM)
391 {
392 rtx tem = protect_from_queue (XEXP (x, 0), 0);
393 if (tem != XEXP (x, 0))
394 {
395 x = copy_rtx (x);
396 XEXP (x, 0) = tem;
397 }
398 }
399 else if (code == PLUS || code == MULT)
400 {
401 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
402 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
403 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
404 {
405 x = copy_rtx (x);
406 XEXP (x, 0) = new0;
407 XEXP (x, 1) = new1;
408 }
409 }
410 return x;
411 }
412 /* If the increment has not happened, use the variable itself. Copy it
413 into a new pseudo so that the value remains correct across calls to
414 emit_queue. */
415 if (QUEUED_INSN (x) == 0)
416 return copy_to_reg (QUEUED_VAR (x));
417 /* If the increment has happened and a pre-increment copy exists,
418 use that copy. */
419 if (QUEUED_COPY (x) != 0)
420 return QUEUED_COPY (x);
421 /* The increment has happened but we haven't set up a pre-increment copy.
422 Set one up now, and use it. */
423 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
424 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
425 QUEUED_INSN (x));
426 return QUEUED_COPY (x);
427 }
428
429 /* Return nonzero if X contains a QUEUED expression:
430 if it contains anything that will be altered by a queued increment.
431 We handle only combinations of MEM, PLUS, MINUS and MULT operators
432 since memory addresses generally contain only those. */
433
434 int
435 queued_subexp_p (x)
436 rtx x;
437 {
438 enum rtx_code code = GET_CODE (x);
439 switch (code)
440 {
441 case QUEUED:
442 return 1;
443 case MEM:
444 return queued_subexp_p (XEXP (x, 0));
445 case MULT:
446 case PLUS:
447 case MINUS:
448 return (queued_subexp_p (XEXP (x, 0))
449 || queued_subexp_p (XEXP (x, 1)));
450 default:
451 return 0;
452 }
453 }
454
455 /* Perform all the pending incrementations. */
456
457 void
458 emit_queue ()
459 {
460 rtx p;
461 while ((p = pending_chain))
462 {
463 rtx body = QUEUED_BODY (p);
464
465 if (GET_CODE (body) == SEQUENCE)
466 {
467 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
468 emit_insn (QUEUED_BODY (p));
469 }
470 else
471 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
472 pending_chain = QUEUED_NEXT (p);
473 }
474 }
475 \f
476 /* Copy data from FROM to TO, where the machine modes are not the same.
477 Both modes may be integer, or both may be floating.
478 UNSIGNEDP should be nonzero if FROM is an unsigned type.
479 This causes zero-extension instead of sign-extension. */
480
481 void
482 convert_move (to, from, unsignedp)
483 rtx to, from;
484 int unsignedp;
485 {
486 enum machine_mode to_mode = GET_MODE (to);
487 enum machine_mode from_mode = GET_MODE (from);
488 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
489 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
490 enum insn_code code;
491 rtx libcall;
492
493 /* rtx code for making an equivalent value. */
494 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
495
496 to = protect_from_queue (to, 1);
497 from = protect_from_queue (from, 0);
498
499 if (to_real != from_real)
500 abort ();
501
502 /* If FROM is a SUBREG that indicates that we have already done at least
503 the required extension, strip it. We don't handle such SUBREGs as
504 TO here. */
505
506 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
507 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
508 >= GET_MODE_SIZE (to_mode))
509 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
510 from = gen_lowpart (to_mode, from), from_mode = to_mode;
511
512 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
513 abort ();
514
515 if (to_mode == from_mode
516 || (from_mode == VOIDmode && CONSTANT_P (from)))
517 {
518 emit_move_insn (to, from);
519 return;
520 }
521
522 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
523 {
524 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
525 abort ();
526
527 if (VECTOR_MODE_P (to_mode))
528 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
529 else
530 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
531
532 emit_move_insn (to, from);
533 return;
534 }
535
536 if (to_real != from_real)
537 abort ();
538
539 if (to_real)
540 {
541 rtx value, insns;
542
543 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
544 {
545 /* Try converting directly if the insn is supported. */
546 if ((code = can_extend_p (to_mode, from_mode, 0))
547 != CODE_FOR_nothing)
548 {
549 emit_unop_insn (code, to, from, UNKNOWN);
550 return;
551 }
552 }
553
554 #ifdef HAVE_trunchfqf2
555 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
556 {
557 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
558 return;
559 }
560 #endif
561 #ifdef HAVE_trunctqfqf2
562 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
563 {
564 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
565 return;
566 }
567 #endif
568 #ifdef HAVE_truncsfqf2
569 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
570 {
571 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
572 return;
573 }
574 #endif
575 #ifdef HAVE_truncdfqf2
576 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
577 {
578 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
579 return;
580 }
581 #endif
582 #ifdef HAVE_truncxfqf2
583 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
584 {
585 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
586 return;
587 }
588 #endif
589 #ifdef HAVE_trunctfqf2
590 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
591 {
592 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
593 return;
594 }
595 #endif
596
597 #ifdef HAVE_trunctqfhf2
598 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
599 {
600 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
601 return;
602 }
603 #endif
604 #ifdef HAVE_truncsfhf2
605 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
606 {
607 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
608 return;
609 }
610 #endif
611 #ifdef HAVE_truncdfhf2
612 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
613 {
614 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
615 return;
616 }
617 #endif
618 #ifdef HAVE_truncxfhf2
619 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
620 {
621 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
622 return;
623 }
624 #endif
625 #ifdef HAVE_trunctfhf2
626 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
627 {
628 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
629 return;
630 }
631 #endif
632
633 #ifdef HAVE_truncsftqf2
634 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
635 {
636 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
637 return;
638 }
639 #endif
640 #ifdef HAVE_truncdftqf2
641 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
642 {
643 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
644 return;
645 }
646 #endif
647 #ifdef HAVE_truncxftqf2
648 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
649 {
650 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
651 return;
652 }
653 #endif
654 #ifdef HAVE_trunctftqf2
655 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
656 {
657 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
658 return;
659 }
660 #endif
661
662 #ifdef HAVE_truncdfsf2
663 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
664 {
665 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
666 return;
667 }
668 #endif
669 #ifdef HAVE_truncxfsf2
670 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
671 {
672 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
673 return;
674 }
675 #endif
676 #ifdef HAVE_trunctfsf2
677 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
678 {
679 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
680 return;
681 }
682 #endif
683 #ifdef HAVE_truncxfdf2
684 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
685 {
686 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
687 return;
688 }
689 #endif
690 #ifdef HAVE_trunctfdf2
691 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
692 {
693 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
694 return;
695 }
696 #endif
697
698 libcall = (rtx) 0;
699 switch (from_mode)
700 {
701 case SFmode:
702 switch (to_mode)
703 {
704 case DFmode:
705 libcall = extendsfdf2_libfunc;
706 break;
707
708 case XFmode:
709 libcall = extendsfxf2_libfunc;
710 break;
711
712 case TFmode:
713 libcall = extendsftf2_libfunc;
714 break;
715
716 default:
717 break;
718 }
719 break;
720
721 case DFmode:
722 switch (to_mode)
723 {
724 case SFmode:
725 libcall = truncdfsf2_libfunc;
726 break;
727
728 case XFmode:
729 libcall = extenddfxf2_libfunc;
730 break;
731
732 case TFmode:
733 libcall = extenddftf2_libfunc;
734 break;
735
736 default:
737 break;
738 }
739 break;
740
741 case XFmode:
742 switch (to_mode)
743 {
744 case SFmode:
745 libcall = truncxfsf2_libfunc;
746 break;
747
748 case DFmode:
749 libcall = truncxfdf2_libfunc;
750 break;
751
752 default:
753 break;
754 }
755 break;
756
757 case TFmode:
758 switch (to_mode)
759 {
760 case SFmode:
761 libcall = trunctfsf2_libfunc;
762 break;
763
764 case DFmode:
765 libcall = trunctfdf2_libfunc;
766 break;
767
768 default:
769 break;
770 }
771 break;
772
773 default:
774 break;
775 }
776
777 if (libcall == (rtx) 0)
778 /* This conversion is not implemented yet. */
779 abort ();
780
781 start_sequence ();
782 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
783 1, from, from_mode);
784 insns = get_insns ();
785 end_sequence ();
786 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
787 from));
788 return;
789 }
790
791 /* Now both modes are integers. */
792
793 /* Handle expanding beyond a word. */
794 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
795 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
796 {
797 rtx insns;
798 rtx lowpart;
799 rtx fill_value;
800 rtx lowfrom;
801 int i;
802 enum machine_mode lowpart_mode;
803 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
804
805 /* Try converting directly if the insn is supported. */
806 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
807 != CODE_FOR_nothing)
808 {
809 /* If FROM is a SUBREG, put it into a register. Do this
810 so that we always generate the same set of insns for
811 better cse'ing; if an intermediate assignment occurred,
812 we won't be doing the operation directly on the SUBREG. */
813 if (optimize > 0 && GET_CODE (from) == SUBREG)
814 from = force_reg (from_mode, from);
815 emit_unop_insn (code, to, from, equiv_code);
816 return;
817 }
818 /* Next, try converting via full word. */
819 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
820 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
821 != CODE_FOR_nothing))
822 {
823 if (GET_CODE (to) == REG)
824 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
825 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
826 emit_unop_insn (code, to,
827 gen_lowpart (word_mode, to), equiv_code);
828 return;
829 }
830
831 /* No special multiword conversion insn; do it by hand. */
832 start_sequence ();
833
834 /* Since we will turn this into a no conflict block, we must ensure
835 that the source does not overlap the target. */
836
837 if (reg_overlap_mentioned_p (to, from))
838 from = force_reg (from_mode, from);
839
840 /* Get a copy of FROM widened to a word, if necessary. */
841 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
842 lowpart_mode = word_mode;
843 else
844 lowpart_mode = from_mode;
845
846 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
847
848 lowpart = gen_lowpart (lowpart_mode, to);
849 emit_move_insn (lowpart, lowfrom);
850
851 /* Compute the value to put in each remaining word. */
852 if (unsignedp)
853 fill_value = const0_rtx;
854 else
855 {
856 #ifdef HAVE_slt
857 if (HAVE_slt
858 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
859 && STORE_FLAG_VALUE == -1)
860 {
861 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
862 lowpart_mode, 0);
863 fill_value = gen_reg_rtx (word_mode);
864 emit_insn (gen_slt (fill_value));
865 }
866 else
867 #endif
868 {
869 fill_value
870 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
871 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
872 NULL_RTX, 0);
873 fill_value = convert_to_mode (word_mode, fill_value, 1);
874 }
875 }
876
877 /* Fill the remaining words. */
878 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
879 {
880 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
881 rtx subword = operand_subword (to, index, 1, to_mode);
882
883 if (subword == 0)
884 abort ();
885
886 if (fill_value != subword)
887 emit_move_insn (subword, fill_value);
888 }
889
890 insns = get_insns ();
891 end_sequence ();
892
893 emit_no_conflict_block (insns, to, from, NULL_RTX,
894 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
895 return;
896 }
897
898 /* Truncating multi-word to a word or less. */
899 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
900 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
901 {
902 if (!((GET_CODE (from) == MEM
903 && ! MEM_VOLATILE_P (from)
904 && direct_load[(int) to_mode]
905 && ! mode_dependent_address_p (XEXP (from, 0)))
906 || GET_CODE (from) == REG
907 || GET_CODE (from) == SUBREG))
908 from = force_reg (from_mode, from);
909 convert_move (to, gen_lowpart (word_mode, from), 0);
910 return;
911 }
912
913 /* Handle pointer conversion. */ /* SPEE 900220. */
914 if (to_mode == PQImode)
915 {
916 if (from_mode != QImode)
917 from = convert_to_mode (QImode, from, unsignedp);
918
919 #ifdef HAVE_truncqipqi2
920 if (HAVE_truncqipqi2)
921 {
922 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
923 return;
924 }
925 #endif /* HAVE_truncqipqi2 */
926 abort ();
927 }
928
929 if (from_mode == PQImode)
930 {
931 if (to_mode != QImode)
932 {
933 from = convert_to_mode (QImode, from, unsignedp);
934 from_mode = QImode;
935 }
936 else
937 {
938 #ifdef HAVE_extendpqiqi2
939 if (HAVE_extendpqiqi2)
940 {
941 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
942 return;
943 }
944 #endif /* HAVE_extendpqiqi2 */
945 abort ();
946 }
947 }
948
949 if (to_mode == PSImode)
950 {
951 if (from_mode != SImode)
952 from = convert_to_mode (SImode, from, unsignedp);
953
954 #ifdef HAVE_truncsipsi2
955 if (HAVE_truncsipsi2)
956 {
957 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
958 return;
959 }
960 #endif /* HAVE_truncsipsi2 */
961 abort ();
962 }
963
964 if (from_mode == PSImode)
965 {
966 if (to_mode != SImode)
967 {
968 from = convert_to_mode (SImode, from, unsignedp);
969 from_mode = SImode;
970 }
971 else
972 {
973 #ifdef HAVE_extendpsisi2
974 if (! unsignedp && HAVE_extendpsisi2)
975 {
976 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
977 return;
978 }
979 #endif /* HAVE_extendpsisi2 */
980 #ifdef HAVE_zero_extendpsisi2
981 if (unsignedp && HAVE_zero_extendpsisi2)
982 {
983 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
984 return;
985 }
986 #endif /* HAVE_zero_extendpsisi2 */
987 abort ();
988 }
989 }
990
991 if (to_mode == PDImode)
992 {
993 if (from_mode != DImode)
994 from = convert_to_mode (DImode, from, unsignedp);
995
996 #ifdef HAVE_truncdipdi2
997 if (HAVE_truncdipdi2)
998 {
999 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1000 return;
1001 }
1002 #endif /* HAVE_truncdipdi2 */
1003 abort ();
1004 }
1005
1006 if (from_mode == PDImode)
1007 {
1008 if (to_mode != DImode)
1009 {
1010 from = convert_to_mode (DImode, from, unsignedp);
1011 from_mode = DImode;
1012 }
1013 else
1014 {
1015 #ifdef HAVE_extendpdidi2
1016 if (HAVE_extendpdidi2)
1017 {
1018 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1019 return;
1020 }
1021 #endif /* HAVE_extendpdidi2 */
1022 abort ();
1023 }
1024 }
1025
1026 /* Now follow all the conversions between integers
1027 no more than a word long. */
1028
1029 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1030 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1031 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1032 GET_MODE_BITSIZE (from_mode)))
1033 {
1034 if (!((GET_CODE (from) == MEM
1035 && ! MEM_VOLATILE_P (from)
1036 && direct_load[(int) to_mode]
1037 && ! mode_dependent_address_p (XEXP (from, 0)))
1038 || GET_CODE (from) == REG
1039 || GET_CODE (from) == SUBREG))
1040 from = force_reg (from_mode, from);
1041 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1042 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1043 from = copy_to_reg (from);
1044 emit_move_insn (to, gen_lowpart (to_mode, from));
1045 return;
1046 }
1047
1048 /* Handle extension. */
1049 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1050 {
1051 /* Convert directly if that works. */
1052 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1053 != CODE_FOR_nothing)
1054 {
1055 if (flag_force_mem)
1056 from = force_not_mem (from);
1057
1058 emit_unop_insn (code, to, from, equiv_code);
1059 return;
1060 }
1061 else
1062 {
1063 enum machine_mode intermediate;
1064 rtx tmp;
1065 tree shift_amount;
1066
1067 /* Search for a mode to convert via. */
1068 for (intermediate = from_mode; intermediate != VOIDmode;
1069 intermediate = GET_MODE_WIDER_MODE (intermediate))
1070 if (((can_extend_p (to_mode, intermediate, unsignedp)
1071 != CODE_FOR_nothing)
1072 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1073 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1074 GET_MODE_BITSIZE (intermediate))))
1075 && (can_extend_p (intermediate, from_mode, unsignedp)
1076 != CODE_FOR_nothing))
1077 {
1078 convert_move (to, convert_to_mode (intermediate, from,
1079 unsignedp), unsignedp);
1080 return;
1081 }
1082
1083 /* No suitable intermediate mode.
1084 Generate what we need with shifts. */
1085 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1086 - GET_MODE_BITSIZE (from_mode), 0);
1087 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1088 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1089 to, unsignedp);
1090 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1091 to, unsignedp);
1092 if (tmp != to)
1093 emit_move_insn (to, tmp);
1094 return;
1095 }
1096 }
1097
1098 /* Support special truncate insns for certain modes. */
1099
1100 if (from_mode == DImode && to_mode == SImode)
1101 {
1102 #ifdef HAVE_truncdisi2
1103 if (HAVE_truncdisi2)
1104 {
1105 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1106 return;
1107 }
1108 #endif
1109 convert_move (to, force_reg (from_mode, from), unsignedp);
1110 return;
1111 }
1112
1113 if (from_mode == DImode && to_mode == HImode)
1114 {
1115 #ifdef HAVE_truncdihi2
1116 if (HAVE_truncdihi2)
1117 {
1118 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1119 return;
1120 }
1121 #endif
1122 convert_move (to, force_reg (from_mode, from), unsignedp);
1123 return;
1124 }
1125
1126 if (from_mode == DImode && to_mode == QImode)
1127 {
1128 #ifdef HAVE_truncdiqi2
1129 if (HAVE_truncdiqi2)
1130 {
1131 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1132 return;
1133 }
1134 #endif
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1136 return;
1137 }
1138
1139 if (from_mode == SImode && to_mode == HImode)
1140 {
1141 #ifdef HAVE_truncsihi2
1142 if (HAVE_truncsihi2)
1143 {
1144 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1145 return;
1146 }
1147 #endif
1148 convert_move (to, force_reg (from_mode, from), unsignedp);
1149 return;
1150 }
1151
1152 if (from_mode == SImode && to_mode == QImode)
1153 {
1154 #ifdef HAVE_truncsiqi2
1155 if (HAVE_truncsiqi2)
1156 {
1157 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1158 return;
1159 }
1160 #endif
1161 convert_move (to, force_reg (from_mode, from), unsignedp);
1162 return;
1163 }
1164
1165 if (from_mode == HImode && to_mode == QImode)
1166 {
1167 #ifdef HAVE_trunchiqi2
1168 if (HAVE_trunchiqi2)
1169 {
1170 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1171 return;
1172 }
1173 #endif
1174 convert_move (to, force_reg (from_mode, from), unsignedp);
1175 return;
1176 }
1177
1178 if (from_mode == TImode && to_mode == DImode)
1179 {
1180 #ifdef HAVE_trunctidi2
1181 if (HAVE_trunctidi2)
1182 {
1183 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1184 return;
1185 }
1186 #endif
1187 convert_move (to, force_reg (from_mode, from), unsignedp);
1188 return;
1189 }
1190
1191 if (from_mode == TImode && to_mode == SImode)
1192 {
1193 #ifdef HAVE_trunctisi2
1194 if (HAVE_trunctisi2)
1195 {
1196 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1197 return;
1198 }
1199 #endif
1200 convert_move (to, force_reg (from_mode, from), unsignedp);
1201 return;
1202 }
1203
1204 if (from_mode == TImode && to_mode == HImode)
1205 {
1206 #ifdef HAVE_trunctihi2
1207 if (HAVE_trunctihi2)
1208 {
1209 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1210 return;
1211 }
1212 #endif
1213 convert_move (to, force_reg (from_mode, from), unsignedp);
1214 return;
1215 }
1216
1217 if (from_mode == TImode && to_mode == QImode)
1218 {
1219 #ifdef HAVE_trunctiqi2
1220 if (HAVE_trunctiqi2)
1221 {
1222 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1223 return;
1224 }
1225 #endif
1226 convert_move (to, force_reg (from_mode, from), unsignedp);
1227 return;
1228 }
1229
1230 /* Handle truncation of volatile memrefs, and so on;
1231 the things that couldn't be truncated directly,
1232 and for which there was no special instruction. */
1233 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1234 {
1235 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1236 emit_move_insn (to, temp);
1237 return;
1238 }
1239
1240 /* Mode combination is not recognized. */
1241 abort ();
1242 }
1243
1244 /* Return an rtx for a value that would result
1245 from converting X to mode MODE.
1246 Both X and MODE may be floating, or both integer.
1247 UNSIGNEDP is nonzero if X is an unsigned value.
1248 This can be done by referring to a part of X in place
1249 or by copying to a new temporary with conversion.
1250
1251 This function *must not* call protect_from_queue
1252 except when putting X into an insn (in which case convert_move does it). */
1253
1254 rtx
1255 convert_to_mode (mode, x, unsignedp)
1256 enum machine_mode mode;
1257 rtx x;
1258 int unsignedp;
1259 {
1260 return convert_modes (mode, VOIDmode, x, unsignedp);
1261 }
1262
1263 /* Return an rtx for a value that would result
1264 from converting X from mode OLDMODE to mode MODE.
1265 Both modes may be floating, or both integer.
1266 UNSIGNEDP is nonzero if X is an unsigned value.
1267
1268 This can be done by referring to a part of X in place
1269 or by copying to a new temporary with conversion.
1270
1271 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1272
1273 This function *must not* call protect_from_queue
1274 except when putting X into an insn (in which case convert_move does it). */
1275
1276 rtx
1277 convert_modes (mode, oldmode, x, unsignedp)
1278 enum machine_mode mode, oldmode;
1279 rtx x;
1280 int unsignedp;
1281 {
1282 rtx temp;
1283
1284 /* If FROM is a SUBREG that indicates that we have already done at least
1285 the required extension, strip it. */
1286
1287 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1288 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1289 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1290 x = gen_lowpart (mode, x);
1291
1292 if (GET_MODE (x) != VOIDmode)
1293 oldmode = GET_MODE (x);
1294
1295 if (mode == oldmode)
1296 return x;
1297
1298 /* There is one case that we must handle specially: If we are converting
1299 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1300 we are to interpret the constant as unsigned, gen_lowpart will do
1301 the wrong if the constant appears negative. What we want to do is
1302 make the high-order word of the constant zero, not all ones. */
1303
1304 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1305 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1306 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1307 {
1308 HOST_WIDE_INT val = INTVAL (x);
1309
1310 if (oldmode != VOIDmode
1311 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1312 {
1313 int width = GET_MODE_BITSIZE (oldmode);
1314
1315 /* We need to zero extend VAL. */
1316 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1317 }
1318
1319 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1320 }
1321
1322 /* We can do this with a gen_lowpart if both desired and current modes
1323 are integer, and this is either a constant integer, a register, or a
1324 non-volatile MEM. Except for the constant case where MODE is no
1325 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1326
1327 if ((GET_CODE (x) == CONST_INT
1328 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1329 || (GET_MODE_CLASS (mode) == MODE_INT
1330 && GET_MODE_CLASS (oldmode) == MODE_INT
1331 && (GET_CODE (x) == CONST_DOUBLE
1332 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1333 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1334 && direct_load[(int) mode])
1335 || (GET_CODE (x) == REG
1336 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1337 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1338 {
1339 /* ?? If we don't know OLDMODE, we have to assume here that
1340 X does not need sign- or zero-extension. This may not be
1341 the case, but it's the best we can do. */
1342 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1343 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1344 {
1345 HOST_WIDE_INT val = INTVAL (x);
1346 int width = GET_MODE_BITSIZE (oldmode);
1347
1348 /* We must sign or zero-extend in this case. Start by
1349 zero-extending, then sign extend if we need to. */
1350 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1351 if (! unsignedp
1352 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1353 val |= (HOST_WIDE_INT) (-1) << width;
1354
1355 return GEN_INT (trunc_int_for_mode (val, mode));
1356 }
1357
1358 return gen_lowpart (mode, x);
1359 }
1360
1361 temp = gen_reg_rtx (mode);
1362 convert_move (temp, x, unsignedp);
1363 return temp;
1364 }
1365 \f
1366 /* This macro is used to determine what the largest unit size that
1367 move_by_pieces can use is. */
1368
1369 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1370 move efficiently, as opposed to MOVE_MAX which is the maximum
1371 number of bytes we can move with a single instruction. */
1372
1373 #ifndef MOVE_MAX_PIECES
1374 #define MOVE_MAX_PIECES MOVE_MAX
1375 #endif
1376
1377 /* Generate several move instructions to copy LEN bytes from block FROM to
1378 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1379 and TO through protect_from_queue before calling.
1380
1381 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1382 used to push FROM to the stack.
1383
1384 ALIGN is maximum alignment we can assume. */
1385
1386 void
1387 move_by_pieces (to, from, len, align)
1388 rtx to, from;
1389 unsigned HOST_WIDE_INT len;
1390 unsigned int align;
1391 {
1392 struct move_by_pieces data;
1393 rtx to_addr, from_addr = XEXP (from, 0);
1394 unsigned int max_size = MOVE_MAX_PIECES + 1;
1395 enum machine_mode mode = VOIDmode, tmode;
1396 enum insn_code icode;
1397
1398 data.offset = 0;
1399 data.from_addr = from_addr;
1400 if (to)
1401 {
1402 to_addr = XEXP (to, 0);
1403 data.to = to;
1404 data.autinc_to
1405 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1406 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1407 data.reverse
1408 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1409 }
1410 else
1411 {
1412 to_addr = NULL_RTX;
1413 data.to = NULL_RTX;
1414 data.autinc_to = 1;
1415 #ifdef STACK_GROWS_DOWNWARD
1416 data.reverse = 1;
1417 #else
1418 data.reverse = 0;
1419 #endif
1420 }
1421 data.to_addr = to_addr;
1422 data.from = from;
1423 data.autinc_from
1424 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1425 || GET_CODE (from_addr) == POST_INC
1426 || GET_CODE (from_addr) == POST_DEC);
1427
1428 data.explicit_inc_from = 0;
1429 data.explicit_inc_to = 0;
1430 if (data.reverse) data.offset = len;
1431 data.len = len;
1432
1433 /* If copying requires more than two move insns,
1434 copy addresses to registers (to make displacements shorter)
1435 and use post-increment if available. */
1436 if (!(data.autinc_from && data.autinc_to)
1437 && move_by_pieces_ninsns (len, align) > 2)
1438 {
1439 /* Find the mode of the largest move... */
1440 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1441 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1442 if (GET_MODE_SIZE (tmode) < max_size)
1443 mode = tmode;
1444
1445 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1446 {
1447 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1448 data.autinc_from = 1;
1449 data.explicit_inc_from = -1;
1450 }
1451 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1452 {
1453 data.from_addr = copy_addr_to_reg (from_addr);
1454 data.autinc_from = 1;
1455 data.explicit_inc_from = 1;
1456 }
1457 if (!data.autinc_from && CONSTANT_P (from_addr))
1458 data.from_addr = copy_addr_to_reg (from_addr);
1459 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1460 {
1461 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1462 data.autinc_to = 1;
1463 data.explicit_inc_to = -1;
1464 }
1465 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1466 {
1467 data.to_addr = copy_addr_to_reg (to_addr);
1468 data.autinc_to = 1;
1469 data.explicit_inc_to = 1;
1470 }
1471 if (!data.autinc_to && CONSTANT_P (to_addr))
1472 data.to_addr = copy_addr_to_reg (to_addr);
1473 }
1474
1475 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1476 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1477 align = MOVE_MAX * BITS_PER_UNIT;
1478
1479 /* First move what we can in the largest integer mode, then go to
1480 successively smaller modes. */
1481
1482 while (max_size > 1)
1483 {
1484 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1485 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1486 if (GET_MODE_SIZE (tmode) < max_size)
1487 mode = tmode;
1488
1489 if (mode == VOIDmode)
1490 break;
1491
1492 icode = mov_optab->handlers[(int) mode].insn_code;
1493 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1494 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1495
1496 max_size = GET_MODE_SIZE (mode);
1497 }
1498
1499 /* The code above should have handled everything. */
1500 if (data.len > 0)
1501 abort ();
1502 }
1503
1504 /* Return number of insns required to move L bytes by pieces.
1505 ALIGN (in bits) is maximum alignment we can assume. */
1506
1507 static unsigned HOST_WIDE_INT
1508 move_by_pieces_ninsns (l, align)
1509 unsigned HOST_WIDE_INT l;
1510 unsigned int align;
1511 {
1512 unsigned HOST_WIDE_INT n_insns = 0;
1513 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1514
1515 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1516 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1517 align = MOVE_MAX * BITS_PER_UNIT;
1518
1519 while (max_size > 1)
1520 {
1521 enum machine_mode mode = VOIDmode, tmode;
1522 enum insn_code icode;
1523
1524 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1525 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1526 if (GET_MODE_SIZE (tmode) < max_size)
1527 mode = tmode;
1528
1529 if (mode == VOIDmode)
1530 break;
1531
1532 icode = mov_optab->handlers[(int) mode].insn_code;
1533 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1534 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1535
1536 max_size = GET_MODE_SIZE (mode);
1537 }
1538
1539 if (l)
1540 abort ();
1541 return n_insns;
1542 }
1543
1544 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1545 with move instructions for mode MODE. GENFUN is the gen_... function
1546 to make a move insn for that mode. DATA has all the other info. */
1547
1548 static void
1549 move_by_pieces_1 (genfun, mode, data)
1550 rtx (*genfun) PARAMS ((rtx, ...));
1551 enum machine_mode mode;
1552 struct move_by_pieces *data;
1553 {
1554 unsigned int size = GET_MODE_SIZE (mode);
1555 rtx to1 = NULL_RTX, from1;
1556
1557 while (data->len >= size)
1558 {
1559 if (data->reverse)
1560 data->offset -= size;
1561
1562 if (data->to)
1563 {
1564 if (data->autinc_to)
1565 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1566 data->offset);
1567 else
1568 to1 = adjust_address (data->to, mode, data->offset);
1569 }
1570
1571 if (data->autinc_from)
1572 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1573 data->offset);
1574 else
1575 from1 = adjust_address (data->from, mode, data->offset);
1576
1577 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1578 emit_insn (gen_add2_insn (data->to_addr,
1579 GEN_INT (-(HOST_WIDE_INT)size)));
1580 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1581 emit_insn (gen_add2_insn (data->from_addr,
1582 GEN_INT (-(HOST_WIDE_INT)size)));
1583
1584 if (data->to)
1585 emit_insn ((*genfun) (to1, from1));
1586 else
1587 {
1588 #ifdef PUSH_ROUNDING
1589 emit_single_push_insn (mode, from1, NULL);
1590 #else
1591 abort ();
1592 #endif
1593 }
1594
1595 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1596 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1597 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1598 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1599
1600 if (! data->reverse)
1601 data->offset += size;
1602
1603 data->len -= size;
1604 }
1605 }
1606 \f
1607 /* Emit code to move a block Y to a block X.
1608 This may be done with string-move instructions,
1609 with multiple scalar move instructions, or with a library call.
1610
1611 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1612 with mode BLKmode.
1613 SIZE is an rtx that says how long they are.
1614 ALIGN is the maximum alignment we can assume they have.
1615
1616 Return the address of the new block, if memcpy is called and returns it,
1617 0 otherwise. */
1618
1619 rtx
1620 emit_block_move (x, y, size)
1621 rtx x, y;
1622 rtx size;
1623 {
1624 rtx retval = 0;
1625 #ifdef TARGET_MEM_FUNCTIONS
1626 static tree fn;
1627 tree call_expr, arg_list;
1628 #endif
1629 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1630
1631 if (GET_MODE (x) != BLKmode)
1632 abort ();
1633
1634 if (GET_MODE (y) != BLKmode)
1635 abort ();
1636
1637 x = protect_from_queue (x, 1);
1638 y = protect_from_queue (y, 0);
1639 size = protect_from_queue (size, 0);
1640
1641 if (GET_CODE (x) != MEM)
1642 abort ();
1643 if (GET_CODE (y) != MEM)
1644 abort ();
1645 if (size == 0)
1646 abort ();
1647
1648 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1649 move_by_pieces (x, y, INTVAL (size), align);
1650 else
1651 {
1652 /* Try the most limited insn first, because there's no point
1653 including more than one in the machine description unless
1654 the more limited one has some advantage. */
1655
1656 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1657 enum machine_mode mode;
1658
1659 /* Since this is a move insn, we don't care about volatility. */
1660 volatile_ok = 1;
1661
1662 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1663 mode = GET_MODE_WIDER_MODE (mode))
1664 {
1665 enum insn_code code = movstr_optab[(int) mode];
1666 insn_operand_predicate_fn pred;
1667
1668 if (code != CODE_FOR_nothing
1669 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1670 here because if SIZE is less than the mode mask, as it is
1671 returned by the macro, it will definitely be less than the
1672 actual mode mask. */
1673 && ((GET_CODE (size) == CONST_INT
1674 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1675 <= (GET_MODE_MASK (mode) >> 1)))
1676 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1677 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1678 || (*pred) (x, BLKmode))
1679 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1680 || (*pred) (y, BLKmode))
1681 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1682 || (*pred) (opalign, VOIDmode)))
1683 {
1684 rtx op2;
1685 rtx last = get_last_insn ();
1686 rtx pat;
1687
1688 op2 = convert_to_mode (mode, size, 1);
1689 pred = insn_data[(int) code].operand[2].predicate;
1690 if (pred != 0 && ! (*pred) (op2, mode))
1691 op2 = copy_to_mode_reg (mode, op2);
1692
1693 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1694 if (pat)
1695 {
1696 emit_insn (pat);
1697 volatile_ok = 0;
1698 return 0;
1699 }
1700 else
1701 delete_insns_since (last);
1702 }
1703 }
1704
1705 volatile_ok = 0;
1706
1707 /* X, Y, or SIZE may have been passed through protect_from_queue.
1708
1709 It is unsafe to save the value generated by protect_from_queue
1710 and reuse it later. Consider what happens if emit_queue is
1711 called before the return value from protect_from_queue is used.
1712
1713 Expansion of the CALL_EXPR below will call emit_queue before
1714 we are finished emitting RTL for argument setup. So if we are
1715 not careful we could get the wrong value for an argument.
1716
1717 To avoid this problem we go ahead and emit code to copy X, Y &
1718 SIZE into new pseudos. We can then place those new pseudos
1719 into an RTL_EXPR and use them later, even after a call to
1720 emit_queue.
1721
1722 Note this is not strictly needed for library calls since they
1723 do not call emit_queue before loading their arguments. However,
1724 we may need to have library calls call emit_queue in the future
1725 since failing to do so could cause problems for targets which
1726 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1727 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1728 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1729
1730 #ifdef TARGET_MEM_FUNCTIONS
1731 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1732 #else
1733 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1734 TREE_UNSIGNED (integer_type_node));
1735 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1736 #endif
1737
1738 #ifdef TARGET_MEM_FUNCTIONS
1739 /* It is incorrect to use the libcall calling conventions to call
1740 memcpy in this context.
1741
1742 This could be a user call to memcpy and the user may wish to
1743 examine the return value from memcpy.
1744
1745 For targets where libcalls and normal calls have different conventions
1746 for returning pointers, we could end up generating incorrect code.
1747
1748 So instead of using a libcall sequence we build up a suitable
1749 CALL_EXPR and expand the call in the normal fashion. */
1750 if (fn == NULL_TREE)
1751 {
1752 tree fntype;
1753
1754 /* This was copied from except.c, I don't know if all this is
1755 necessary in this context or not. */
1756 fn = get_identifier ("memcpy");
1757 fntype = build_pointer_type (void_type_node);
1758 fntype = build_function_type (fntype, NULL_TREE);
1759 fn = build_decl (FUNCTION_DECL, fn, fntype);
1760 ggc_add_tree_root (&fn, 1);
1761 DECL_EXTERNAL (fn) = 1;
1762 TREE_PUBLIC (fn) = 1;
1763 DECL_ARTIFICIAL (fn) = 1;
1764 TREE_NOTHROW (fn) = 1;
1765 make_decl_rtl (fn, NULL);
1766 assemble_external (fn);
1767 }
1768
1769 /* We need to make an argument list for the function call.
1770
1771 memcpy has three arguments, the first two are void * addresses and
1772 the last is a size_t byte count for the copy. */
1773 arg_list
1774 = build_tree_list (NULL_TREE,
1775 make_tree (build_pointer_type (void_type_node), x));
1776 TREE_CHAIN (arg_list)
1777 = build_tree_list (NULL_TREE,
1778 make_tree (build_pointer_type (void_type_node), y));
1779 TREE_CHAIN (TREE_CHAIN (arg_list))
1780 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1781 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1782
1783 /* Now we have to build up the CALL_EXPR itself. */
1784 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1785 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1786 call_expr, arg_list, NULL_TREE);
1787 TREE_SIDE_EFFECTS (call_expr) = 1;
1788
1789 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1790 #else
1791 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1792 VOIDmode, 3, y, Pmode, x, Pmode,
1793 convert_to_mode (TYPE_MODE (integer_type_node), size,
1794 TREE_UNSIGNED (integer_type_node)),
1795 TYPE_MODE (integer_type_node));
1796 #endif
1797
1798 /* If we are initializing a readonly value, show the above call
1799 clobbered it. Otherwise, a load from it may erroneously be hoisted
1800 from a loop. */
1801 if (RTX_UNCHANGING_P (x))
1802 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1803 }
1804
1805 return retval;
1806 }
1807 \f
1808 /* Copy all or part of a value X into registers starting at REGNO.
1809 The number of registers to be filled is NREGS. */
1810
1811 void
1812 move_block_to_reg (regno, x, nregs, mode)
1813 int regno;
1814 rtx x;
1815 int nregs;
1816 enum machine_mode mode;
1817 {
1818 int i;
1819 #ifdef HAVE_load_multiple
1820 rtx pat;
1821 rtx last;
1822 #endif
1823
1824 if (nregs == 0)
1825 return;
1826
1827 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1828 x = validize_mem (force_const_mem (mode, x));
1829
1830 /* See if the machine can do this with a load multiple insn. */
1831 #ifdef HAVE_load_multiple
1832 if (HAVE_load_multiple)
1833 {
1834 last = get_last_insn ();
1835 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1836 GEN_INT (nregs));
1837 if (pat)
1838 {
1839 emit_insn (pat);
1840 return;
1841 }
1842 else
1843 delete_insns_since (last);
1844 }
1845 #endif
1846
1847 for (i = 0; i < nregs; i++)
1848 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1849 operand_subword_force (x, i, mode));
1850 }
1851
1852 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1853 The number of registers to be filled is NREGS. SIZE indicates the number
1854 of bytes in the object X. */
1855
1856 void
1857 move_block_from_reg (regno, x, nregs, size)
1858 int regno;
1859 rtx x;
1860 int nregs;
1861 int size;
1862 {
1863 int i;
1864 #ifdef HAVE_store_multiple
1865 rtx pat;
1866 rtx last;
1867 #endif
1868 enum machine_mode mode;
1869
1870 if (nregs == 0)
1871 return;
1872
1873 /* If SIZE is that of a mode no bigger than a word, just use that
1874 mode's store operation. */
1875 if (size <= UNITS_PER_WORD
1876 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
1877 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1878 {
1879 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1880 return;
1881 }
1882
1883 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1884 to the left before storing to memory. Note that the previous test
1885 doesn't handle all cases (e.g. SIZE == 3). */
1886 if (size < UNITS_PER_WORD
1887 && BYTES_BIG_ENDIAN
1888 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1889 {
1890 rtx tem = operand_subword (x, 0, 1, BLKmode);
1891 rtx shift;
1892
1893 if (tem == 0)
1894 abort ();
1895
1896 shift = expand_shift (LSHIFT_EXPR, word_mode,
1897 gen_rtx_REG (word_mode, regno),
1898 build_int_2 ((UNITS_PER_WORD - size)
1899 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1900 emit_move_insn (tem, shift);
1901 return;
1902 }
1903
1904 /* See if the machine can do this with a store multiple insn. */
1905 #ifdef HAVE_store_multiple
1906 if (HAVE_store_multiple)
1907 {
1908 last = get_last_insn ();
1909 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1910 GEN_INT (nregs));
1911 if (pat)
1912 {
1913 emit_insn (pat);
1914 return;
1915 }
1916 else
1917 delete_insns_since (last);
1918 }
1919 #endif
1920
1921 for (i = 0; i < nregs; i++)
1922 {
1923 rtx tem = operand_subword (x, i, 1, BLKmode);
1924
1925 if (tem == 0)
1926 abort ();
1927
1928 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1929 }
1930 }
1931
1932 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1933 registers represented by a PARALLEL. SSIZE represents the total size of
1934 block SRC in bytes, or -1 if not known. */
1935 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
1936 the balance will be in what would be the low-order memory addresses, i.e.
1937 left justified for big endian, right justified for little endian. This
1938 happens to be true for the targets currently using this support. If this
1939 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1940 would be needed. */
1941
1942 void
1943 emit_group_load (dst, orig_src, ssize)
1944 rtx dst, orig_src;
1945 int ssize;
1946 {
1947 rtx *tmps, src;
1948 int start, i;
1949
1950 if (GET_CODE (dst) != PARALLEL)
1951 abort ();
1952
1953 /* Check for a NULL entry, used to indicate that the parameter goes
1954 both on the stack and in registers. */
1955 if (XEXP (XVECEXP (dst, 0, 0), 0))
1956 start = 0;
1957 else
1958 start = 1;
1959
1960 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1961
1962 /* Process the pieces. */
1963 for (i = start; i < XVECLEN (dst, 0); i++)
1964 {
1965 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1966 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1967 unsigned int bytelen = GET_MODE_SIZE (mode);
1968 int shift = 0;
1969
1970 /* Handle trailing fragments that run over the size of the struct. */
1971 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1972 {
1973 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1974 bytelen = ssize - bytepos;
1975 if (bytelen <= 0)
1976 abort ();
1977 }
1978
1979 /* If we won't be loading directly from memory, protect the real source
1980 from strange tricks we might play; but make sure that the source can
1981 be loaded directly into the destination. */
1982 src = orig_src;
1983 if (GET_CODE (orig_src) != MEM
1984 && (!CONSTANT_P (orig_src)
1985 || (GET_MODE (orig_src) != mode
1986 && GET_MODE (orig_src) != VOIDmode)))
1987 {
1988 if (GET_MODE (orig_src) == VOIDmode)
1989 src = gen_reg_rtx (mode);
1990 else
1991 src = gen_reg_rtx (GET_MODE (orig_src));
1992
1993 emit_move_insn (src, orig_src);
1994 }
1995
1996 /* Optimize the access just a bit. */
1997 if (GET_CODE (src) == MEM
1998 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
1999 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2000 && bytelen == GET_MODE_SIZE (mode))
2001 {
2002 tmps[i] = gen_reg_rtx (mode);
2003 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2004 }
2005 else if (GET_CODE (src) == CONCAT)
2006 {
2007 if (bytepos == 0
2008 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2009 tmps[i] = XEXP (src, 0);
2010 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2011 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2012 tmps[i] = XEXP (src, 1);
2013 else if (bytepos == 0)
2014 {
2015 rtx mem = assign_stack_temp (GET_MODE (src),
2016 GET_MODE_SIZE (GET_MODE (src)), 0);
2017 emit_move_insn (mem, src);
2018 tmps[i] = adjust_address (mem, mode, 0);
2019 }
2020 else
2021 abort ();
2022 }
2023 else if (CONSTANT_P (src)
2024 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2025 tmps[i] = src;
2026 else
2027 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2028 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2029 mode, mode, ssize);
2030
2031 if (BYTES_BIG_ENDIAN && shift)
2032 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2033 tmps[i], 0, OPTAB_WIDEN);
2034 }
2035
2036 emit_queue ();
2037
2038 /* Copy the extracted pieces into the proper (probable) hard regs. */
2039 for (i = start; i < XVECLEN (dst, 0); i++)
2040 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2041 }
2042
2043 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2044 registers represented by a PARALLEL. SSIZE represents the total size of
2045 block DST, or -1 if not known. */
2046
2047 void
2048 emit_group_store (orig_dst, src, ssize)
2049 rtx orig_dst, src;
2050 int ssize;
2051 {
2052 rtx *tmps, dst;
2053 int start, i;
2054
2055 if (GET_CODE (src) != PARALLEL)
2056 abort ();
2057
2058 /* Check for a NULL entry, used to indicate that the parameter goes
2059 both on the stack and in registers. */
2060 if (XEXP (XVECEXP (src, 0, 0), 0))
2061 start = 0;
2062 else
2063 start = 1;
2064
2065 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2066
2067 /* Copy the (probable) hard regs into pseudos. */
2068 for (i = start; i < XVECLEN (src, 0); i++)
2069 {
2070 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2071 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2072 emit_move_insn (tmps[i], reg);
2073 }
2074 emit_queue ();
2075
2076 /* If we won't be storing directly into memory, protect the real destination
2077 from strange tricks we might play. */
2078 dst = orig_dst;
2079 if (GET_CODE (dst) == PARALLEL)
2080 {
2081 rtx temp;
2082
2083 /* We can get a PARALLEL dst if there is a conditional expression in
2084 a return statement. In that case, the dst and src are the same,
2085 so no action is necessary. */
2086 if (rtx_equal_p (dst, src))
2087 return;
2088
2089 /* It is unclear if we can ever reach here, but we may as well handle
2090 it. Allocate a temporary, and split this into a store/load to/from
2091 the temporary. */
2092
2093 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2094 emit_group_store (temp, src, ssize);
2095 emit_group_load (dst, temp, ssize);
2096 return;
2097 }
2098 else if (GET_CODE (dst) != MEM)
2099 {
2100 dst = gen_reg_rtx (GET_MODE (orig_dst));
2101 /* Make life a bit easier for combine. */
2102 emit_move_insn (dst, const0_rtx);
2103 }
2104
2105 /* Process the pieces. */
2106 for (i = start; i < XVECLEN (src, 0); i++)
2107 {
2108 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2109 enum machine_mode mode = GET_MODE (tmps[i]);
2110 unsigned int bytelen = GET_MODE_SIZE (mode);
2111
2112 /* Handle trailing fragments that run over the size of the struct. */
2113 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2114 {
2115 if (BYTES_BIG_ENDIAN)
2116 {
2117 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2118 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2119 tmps[i], 0, OPTAB_WIDEN);
2120 }
2121 bytelen = ssize - bytepos;
2122 }
2123
2124 /* Optimize the access just a bit. */
2125 if (GET_CODE (dst) == MEM
2126 && MEM_ALIGN (dst) >= GET_MODE_ALIGNMENT (mode)
2127 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2128 && bytelen == GET_MODE_SIZE (mode))
2129 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2130 else
2131 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2132 mode, tmps[i], ssize);
2133 }
2134
2135 emit_queue ();
2136
2137 /* Copy from the pseudo into the (probable) hard reg. */
2138 if (GET_CODE (dst) == REG)
2139 emit_move_insn (orig_dst, dst);
2140 }
2141
2142 /* Generate code to copy a BLKmode object of TYPE out of a
2143 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2144 is null, a stack temporary is created. TGTBLK is returned.
2145
2146 The primary purpose of this routine is to handle functions
2147 that return BLKmode structures in registers. Some machines
2148 (the PA for example) want to return all small structures
2149 in registers regardless of the structure's alignment. */
2150
2151 rtx
2152 copy_blkmode_from_reg (tgtblk, srcreg, type)
2153 rtx tgtblk;
2154 rtx srcreg;
2155 tree type;
2156 {
2157 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2158 rtx src = NULL, dst = NULL;
2159 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2160 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2161
2162 if (tgtblk == 0)
2163 {
2164 tgtblk = assign_temp (build_qualified_type (type,
2165 (TYPE_QUALS (type)
2166 | TYPE_QUAL_CONST)),
2167 0, 1, 1);
2168 preserve_temp_slots (tgtblk);
2169 }
2170
2171 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2172 into a new pseudo which is a full word.
2173
2174 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2175 the wrong part of the register gets copied so we fake a type conversion
2176 in place. */
2177 if (GET_MODE (srcreg) != BLKmode
2178 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2179 {
2180 if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2181 srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2182 else
2183 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2184 }
2185
2186 /* Structures whose size is not a multiple of a word are aligned
2187 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2188 machine, this means we must skip the empty high order bytes when
2189 calculating the bit offset. */
2190 if (BYTES_BIG_ENDIAN
2191 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2192 && bytes % UNITS_PER_WORD)
2193 big_endian_correction
2194 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2195
2196 /* Copy the structure BITSIZE bites at a time.
2197
2198 We could probably emit more efficient code for machines which do not use
2199 strict alignment, but it doesn't seem worth the effort at the current
2200 time. */
2201 for (bitpos = 0, xbitpos = big_endian_correction;
2202 bitpos < bytes * BITS_PER_UNIT;
2203 bitpos += bitsize, xbitpos += bitsize)
2204 {
2205 /* We need a new source operand each time xbitpos is on a
2206 word boundary and when xbitpos == big_endian_correction
2207 (the first time through). */
2208 if (xbitpos % BITS_PER_WORD == 0
2209 || xbitpos == big_endian_correction)
2210 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2211 GET_MODE (srcreg));
2212
2213 /* We need a new destination operand each time bitpos is on
2214 a word boundary. */
2215 if (bitpos % BITS_PER_WORD == 0)
2216 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2217
2218 /* Use xbitpos for the source extraction (right justified) and
2219 xbitpos for the destination store (left justified). */
2220 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2221 extract_bit_field (src, bitsize,
2222 xbitpos % BITS_PER_WORD, 1,
2223 NULL_RTX, word_mode, word_mode,
2224 BITS_PER_WORD),
2225 BITS_PER_WORD);
2226 }
2227
2228 return tgtblk;
2229 }
2230
2231 /* Add a USE expression for REG to the (possibly empty) list pointed
2232 to by CALL_FUSAGE. REG must denote a hard register. */
2233
2234 void
2235 use_reg (call_fusage, reg)
2236 rtx *call_fusage, reg;
2237 {
2238 if (GET_CODE (reg) != REG
2239 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2240 abort ();
2241
2242 *call_fusage
2243 = gen_rtx_EXPR_LIST (VOIDmode,
2244 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2245 }
2246
2247 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2248 starting at REGNO. All of these registers must be hard registers. */
2249
2250 void
2251 use_regs (call_fusage, regno, nregs)
2252 rtx *call_fusage;
2253 int regno;
2254 int nregs;
2255 {
2256 int i;
2257
2258 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2259 abort ();
2260
2261 for (i = 0; i < nregs; i++)
2262 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2263 }
2264
2265 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2266 PARALLEL REGS. This is for calls that pass values in multiple
2267 non-contiguous locations. The Irix 6 ABI has examples of this. */
2268
2269 void
2270 use_group_regs (call_fusage, regs)
2271 rtx *call_fusage;
2272 rtx regs;
2273 {
2274 int i;
2275
2276 for (i = 0; i < XVECLEN (regs, 0); i++)
2277 {
2278 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2279
2280 /* A NULL entry means the parameter goes both on the stack and in
2281 registers. This can also be a MEM for targets that pass values
2282 partially on the stack and partially in registers. */
2283 if (reg != 0 && GET_CODE (reg) == REG)
2284 use_reg (call_fusage, reg);
2285 }
2286 }
2287 \f
2288
2289 int
2290 can_store_by_pieces (len, constfun, constfundata, align)
2291 unsigned HOST_WIDE_INT len;
2292 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2293 PTR constfundata;
2294 unsigned int align;
2295 {
2296 unsigned HOST_WIDE_INT max_size, l;
2297 HOST_WIDE_INT offset = 0;
2298 enum machine_mode mode, tmode;
2299 enum insn_code icode;
2300 int reverse;
2301 rtx cst;
2302
2303 if (! MOVE_BY_PIECES_P (len, align))
2304 return 0;
2305
2306 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2307 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2308 align = MOVE_MAX * BITS_PER_UNIT;
2309
2310 /* We would first store what we can in the largest integer mode, then go to
2311 successively smaller modes. */
2312
2313 for (reverse = 0;
2314 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2315 reverse++)
2316 {
2317 l = len;
2318 mode = VOIDmode;
2319 max_size = MOVE_MAX_PIECES + 1;
2320 while (max_size > 1)
2321 {
2322 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2323 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2324 if (GET_MODE_SIZE (tmode) < max_size)
2325 mode = tmode;
2326
2327 if (mode == VOIDmode)
2328 break;
2329
2330 icode = mov_optab->handlers[(int) mode].insn_code;
2331 if (icode != CODE_FOR_nothing
2332 && align >= GET_MODE_ALIGNMENT (mode))
2333 {
2334 unsigned int size = GET_MODE_SIZE (mode);
2335
2336 while (l >= size)
2337 {
2338 if (reverse)
2339 offset -= size;
2340
2341 cst = (*constfun) (constfundata, offset, mode);
2342 if (!LEGITIMATE_CONSTANT_P (cst))
2343 return 0;
2344
2345 if (!reverse)
2346 offset += size;
2347
2348 l -= size;
2349 }
2350 }
2351
2352 max_size = GET_MODE_SIZE (mode);
2353 }
2354
2355 /* The code above should have handled everything. */
2356 if (l != 0)
2357 abort ();
2358 }
2359
2360 return 1;
2361 }
2362
2363 /* Generate several move instructions to store LEN bytes generated by
2364 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2365 pointer which will be passed as argument in every CONSTFUN call.
2366 ALIGN is maximum alignment we can assume. */
2367
2368 void
2369 store_by_pieces (to, len, constfun, constfundata, align)
2370 rtx to;
2371 unsigned HOST_WIDE_INT len;
2372 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2373 PTR constfundata;
2374 unsigned int align;
2375 {
2376 struct store_by_pieces data;
2377
2378 if (! MOVE_BY_PIECES_P (len, align))
2379 abort ();
2380 to = protect_from_queue (to, 1);
2381 data.constfun = constfun;
2382 data.constfundata = constfundata;
2383 data.len = len;
2384 data.to = to;
2385 store_by_pieces_1 (&data, align);
2386 }
2387
2388 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2389 rtx with BLKmode). The caller must pass TO through protect_from_queue
2390 before calling. ALIGN is maximum alignment we can assume. */
2391
2392 static void
2393 clear_by_pieces (to, len, align)
2394 rtx to;
2395 unsigned HOST_WIDE_INT len;
2396 unsigned int align;
2397 {
2398 struct store_by_pieces data;
2399
2400 data.constfun = clear_by_pieces_1;
2401 data.constfundata = NULL;
2402 data.len = len;
2403 data.to = to;
2404 store_by_pieces_1 (&data, align);
2405 }
2406
2407 /* Callback routine for clear_by_pieces.
2408 Return const0_rtx unconditionally. */
2409
2410 static rtx
2411 clear_by_pieces_1 (data, offset, mode)
2412 PTR data ATTRIBUTE_UNUSED;
2413 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2414 enum machine_mode mode ATTRIBUTE_UNUSED;
2415 {
2416 return const0_rtx;
2417 }
2418
2419 /* Subroutine of clear_by_pieces and store_by_pieces.
2420 Generate several move instructions to store LEN bytes of block TO. (A MEM
2421 rtx with BLKmode). The caller must pass TO through protect_from_queue
2422 before calling. ALIGN is maximum alignment we can assume. */
2423
2424 static void
2425 store_by_pieces_1 (data, align)
2426 struct store_by_pieces *data;
2427 unsigned int align;
2428 {
2429 rtx to_addr = XEXP (data->to, 0);
2430 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2431 enum machine_mode mode = VOIDmode, tmode;
2432 enum insn_code icode;
2433
2434 data->offset = 0;
2435 data->to_addr = to_addr;
2436 data->autinc_to
2437 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2438 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2439
2440 data->explicit_inc_to = 0;
2441 data->reverse
2442 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2443 if (data->reverse)
2444 data->offset = data->len;
2445
2446 /* If storing requires more than two move insns,
2447 copy addresses to registers (to make displacements shorter)
2448 and use post-increment if available. */
2449 if (!data->autinc_to
2450 && move_by_pieces_ninsns (data->len, align) > 2)
2451 {
2452 /* Determine the main mode we'll be using. */
2453 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2454 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2455 if (GET_MODE_SIZE (tmode) < max_size)
2456 mode = tmode;
2457
2458 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2459 {
2460 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2461 data->autinc_to = 1;
2462 data->explicit_inc_to = -1;
2463 }
2464
2465 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2466 && ! data->autinc_to)
2467 {
2468 data->to_addr = copy_addr_to_reg (to_addr);
2469 data->autinc_to = 1;
2470 data->explicit_inc_to = 1;
2471 }
2472
2473 if ( !data->autinc_to && CONSTANT_P (to_addr))
2474 data->to_addr = copy_addr_to_reg (to_addr);
2475 }
2476
2477 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2478 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2479 align = MOVE_MAX * BITS_PER_UNIT;
2480
2481 /* First store what we can in the largest integer mode, then go to
2482 successively smaller modes. */
2483
2484 while (max_size > 1)
2485 {
2486 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2487 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2488 if (GET_MODE_SIZE (tmode) < max_size)
2489 mode = tmode;
2490
2491 if (mode == VOIDmode)
2492 break;
2493
2494 icode = mov_optab->handlers[(int) mode].insn_code;
2495 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2496 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2497
2498 max_size = GET_MODE_SIZE (mode);
2499 }
2500
2501 /* The code above should have handled everything. */
2502 if (data->len != 0)
2503 abort ();
2504 }
2505
2506 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2507 with move instructions for mode MODE. GENFUN is the gen_... function
2508 to make a move insn for that mode. DATA has all the other info. */
2509
2510 static void
2511 store_by_pieces_2 (genfun, mode, data)
2512 rtx (*genfun) PARAMS ((rtx, ...));
2513 enum machine_mode mode;
2514 struct store_by_pieces *data;
2515 {
2516 unsigned int size = GET_MODE_SIZE (mode);
2517 rtx to1, cst;
2518
2519 while (data->len >= size)
2520 {
2521 if (data->reverse)
2522 data->offset -= size;
2523
2524 if (data->autinc_to)
2525 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2526 data->offset);
2527 else
2528 to1 = adjust_address (data->to, mode, data->offset);
2529
2530 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2531 emit_insn (gen_add2_insn (data->to_addr,
2532 GEN_INT (-(HOST_WIDE_INT) size)));
2533
2534 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2535 emit_insn ((*genfun) (to1, cst));
2536
2537 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2538 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2539
2540 if (! data->reverse)
2541 data->offset += size;
2542
2543 data->len -= size;
2544 }
2545 }
2546 \f
2547 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2548 its length in bytes. */
2549
2550 rtx
2551 clear_storage (object, size)
2552 rtx object;
2553 rtx size;
2554 {
2555 #ifdef TARGET_MEM_FUNCTIONS
2556 static tree fn;
2557 tree call_expr, arg_list;
2558 #endif
2559 rtx retval = 0;
2560 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2561 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2562
2563 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2564 just move a zero. Otherwise, do this a piece at a time. */
2565 if (GET_MODE (object) != BLKmode
2566 && GET_CODE (size) == CONST_INT
2567 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2568 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2569 else
2570 {
2571 object = protect_from_queue (object, 1);
2572 size = protect_from_queue (size, 0);
2573
2574 if (GET_CODE (size) == CONST_INT
2575 && MOVE_BY_PIECES_P (INTVAL (size), align))
2576 clear_by_pieces (object, INTVAL (size), align);
2577 else
2578 {
2579 /* Try the most limited insn first, because there's no point
2580 including more than one in the machine description unless
2581 the more limited one has some advantage. */
2582
2583 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2584 enum machine_mode mode;
2585
2586 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2587 mode = GET_MODE_WIDER_MODE (mode))
2588 {
2589 enum insn_code code = clrstr_optab[(int) mode];
2590 insn_operand_predicate_fn pred;
2591
2592 if (code != CODE_FOR_nothing
2593 /* We don't need MODE to be narrower than
2594 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2595 the mode mask, as it is returned by the macro, it will
2596 definitely be less than the actual mode mask. */
2597 && ((GET_CODE (size) == CONST_INT
2598 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2599 <= (GET_MODE_MASK (mode) >> 1)))
2600 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2601 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2602 || (*pred) (object, BLKmode))
2603 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2604 || (*pred) (opalign, VOIDmode)))
2605 {
2606 rtx op1;
2607 rtx last = get_last_insn ();
2608 rtx pat;
2609
2610 op1 = convert_to_mode (mode, size, 1);
2611 pred = insn_data[(int) code].operand[1].predicate;
2612 if (pred != 0 && ! (*pred) (op1, mode))
2613 op1 = copy_to_mode_reg (mode, op1);
2614
2615 pat = GEN_FCN ((int) code) (object, op1, opalign);
2616 if (pat)
2617 {
2618 emit_insn (pat);
2619 return 0;
2620 }
2621 else
2622 delete_insns_since (last);
2623 }
2624 }
2625
2626 /* OBJECT or SIZE may have been passed through protect_from_queue.
2627
2628 It is unsafe to save the value generated by protect_from_queue
2629 and reuse it later. Consider what happens if emit_queue is
2630 called before the return value from protect_from_queue is used.
2631
2632 Expansion of the CALL_EXPR below will call emit_queue before
2633 we are finished emitting RTL for argument setup. So if we are
2634 not careful we could get the wrong value for an argument.
2635
2636 To avoid this problem we go ahead and emit code to copy OBJECT
2637 and SIZE into new pseudos. We can then place those new pseudos
2638 into an RTL_EXPR and use them later, even after a call to
2639 emit_queue.
2640
2641 Note this is not strictly needed for library calls since they
2642 do not call emit_queue before loading their arguments. However,
2643 we may need to have library calls call emit_queue in the future
2644 since failing to do so could cause problems for targets which
2645 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2646 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2647
2648 #ifdef TARGET_MEM_FUNCTIONS
2649 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2650 #else
2651 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2652 TREE_UNSIGNED (integer_type_node));
2653 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2654 #endif
2655
2656 #ifdef TARGET_MEM_FUNCTIONS
2657 /* It is incorrect to use the libcall calling conventions to call
2658 memset in this context.
2659
2660 This could be a user call to memset and the user may wish to
2661 examine the return value from memset.
2662
2663 For targets where libcalls and normal calls have different
2664 conventions for returning pointers, we could end up generating
2665 incorrect code.
2666
2667 So instead of using a libcall sequence we build up a suitable
2668 CALL_EXPR and expand the call in the normal fashion. */
2669 if (fn == NULL_TREE)
2670 {
2671 tree fntype;
2672
2673 /* This was copied from except.c, I don't know if all this is
2674 necessary in this context or not. */
2675 fn = get_identifier ("memset");
2676 fntype = build_pointer_type (void_type_node);
2677 fntype = build_function_type (fntype, NULL_TREE);
2678 fn = build_decl (FUNCTION_DECL, fn, fntype);
2679 ggc_add_tree_root (&fn, 1);
2680 DECL_EXTERNAL (fn) = 1;
2681 TREE_PUBLIC (fn) = 1;
2682 DECL_ARTIFICIAL (fn) = 1;
2683 TREE_NOTHROW (fn) = 1;
2684 make_decl_rtl (fn, NULL);
2685 assemble_external (fn);
2686 }
2687
2688 /* We need to make an argument list for the function call.
2689
2690 memset has three arguments, the first is a void * addresses, the
2691 second an integer with the initialization value, the last is a
2692 size_t byte count for the copy. */
2693 arg_list
2694 = build_tree_list (NULL_TREE,
2695 make_tree (build_pointer_type (void_type_node),
2696 object));
2697 TREE_CHAIN (arg_list)
2698 = build_tree_list (NULL_TREE,
2699 make_tree (integer_type_node, const0_rtx));
2700 TREE_CHAIN (TREE_CHAIN (arg_list))
2701 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2702 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2703
2704 /* Now we have to build up the CALL_EXPR itself. */
2705 call_expr = build1 (ADDR_EXPR,
2706 build_pointer_type (TREE_TYPE (fn)), fn);
2707 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2708 call_expr, arg_list, NULL_TREE);
2709 TREE_SIDE_EFFECTS (call_expr) = 1;
2710
2711 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2712 #else
2713 emit_library_call (bzero_libfunc, LCT_NORMAL,
2714 VOIDmode, 2, object, Pmode, size,
2715 TYPE_MODE (integer_type_node));
2716 #endif
2717
2718 /* If we are initializing a readonly value, show the above call
2719 clobbered it. Otherwise, a load from it may erroneously be
2720 hoisted from a loop. */
2721 if (RTX_UNCHANGING_P (object))
2722 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2723 }
2724 }
2725
2726 return retval;
2727 }
2728
2729 /* Generate code to copy Y into X.
2730 Both Y and X must have the same mode, except that
2731 Y can be a constant with VOIDmode.
2732 This mode cannot be BLKmode; use emit_block_move for that.
2733
2734 Return the last instruction emitted. */
2735
2736 rtx
2737 emit_move_insn (x, y)
2738 rtx x, y;
2739 {
2740 enum machine_mode mode = GET_MODE (x);
2741 rtx y_cst = NULL_RTX;
2742 rtx last_insn;
2743
2744 x = protect_from_queue (x, 1);
2745 y = protect_from_queue (y, 0);
2746
2747 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2748 abort ();
2749
2750 /* Never force constant_p_rtx to memory. */
2751 if (GET_CODE (y) == CONSTANT_P_RTX)
2752 ;
2753 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2754 {
2755 y_cst = y;
2756 y = force_const_mem (mode, y);
2757 }
2758
2759 /* If X or Y are memory references, verify that their addresses are valid
2760 for the machine. */
2761 if (GET_CODE (x) == MEM
2762 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2763 && ! push_operand (x, GET_MODE (x)))
2764 || (flag_force_addr
2765 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2766 x = validize_mem (x);
2767
2768 if (GET_CODE (y) == MEM
2769 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2770 || (flag_force_addr
2771 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2772 y = validize_mem (y);
2773
2774 if (mode == BLKmode)
2775 abort ();
2776
2777 last_insn = emit_move_insn_1 (x, y);
2778
2779 if (y_cst && GET_CODE (x) == REG)
2780 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2781
2782 return last_insn;
2783 }
2784
2785 /* Low level part of emit_move_insn.
2786 Called just like emit_move_insn, but assumes X and Y
2787 are basically valid. */
2788
2789 rtx
2790 emit_move_insn_1 (x, y)
2791 rtx x, y;
2792 {
2793 enum machine_mode mode = GET_MODE (x);
2794 enum machine_mode submode;
2795 enum mode_class class = GET_MODE_CLASS (mode);
2796
2797 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2798 abort ();
2799
2800 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2801 return
2802 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2803
2804 /* Expand complex moves by moving real part and imag part, if possible. */
2805 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2806 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2807 * BITS_PER_UNIT),
2808 (class == MODE_COMPLEX_INT
2809 ? MODE_INT : MODE_FLOAT),
2810 0))
2811 && (mov_optab->handlers[(int) submode].insn_code
2812 != CODE_FOR_nothing))
2813 {
2814 /* Don't split destination if it is a stack push. */
2815 int stack = push_operand (x, GET_MODE (x));
2816
2817 #ifdef PUSH_ROUNDING
2818 /* In case we output to the stack, but the size is smaller machine can
2819 push exactly, we need to use move instructions. */
2820 if (stack
2821 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2822 != GET_MODE_SIZE (submode)))
2823 {
2824 rtx temp;
2825 HOST_WIDE_INT offset1, offset2;
2826
2827 /* Do not use anti_adjust_stack, since we don't want to update
2828 stack_pointer_delta. */
2829 temp = expand_binop (Pmode,
2830 #ifdef STACK_GROWS_DOWNWARD
2831 sub_optab,
2832 #else
2833 add_optab,
2834 #endif
2835 stack_pointer_rtx,
2836 GEN_INT
2837 (PUSH_ROUNDING
2838 (GET_MODE_SIZE (GET_MODE (x)))),
2839 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2840
2841 if (temp != stack_pointer_rtx)
2842 emit_move_insn (stack_pointer_rtx, temp);
2843
2844 #ifdef STACK_GROWS_DOWNWARD
2845 offset1 = 0;
2846 offset2 = GET_MODE_SIZE (submode);
2847 #else
2848 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2849 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2850 + GET_MODE_SIZE (submode));
2851 #endif
2852
2853 emit_move_insn (change_address (x, submode,
2854 gen_rtx_PLUS (Pmode,
2855 stack_pointer_rtx,
2856 GEN_INT (offset1))),
2857 gen_realpart (submode, y));
2858 emit_move_insn (change_address (x, submode,
2859 gen_rtx_PLUS (Pmode,
2860 stack_pointer_rtx,
2861 GEN_INT (offset2))),
2862 gen_imagpart (submode, y));
2863 }
2864 else
2865 #endif
2866 /* If this is a stack, push the highpart first, so it
2867 will be in the argument order.
2868
2869 In that case, change_address is used only to convert
2870 the mode, not to change the address. */
2871 if (stack)
2872 {
2873 /* Note that the real part always precedes the imag part in memory
2874 regardless of machine's endianness. */
2875 #ifdef STACK_GROWS_DOWNWARD
2876 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2877 (gen_rtx_MEM (submode, XEXP (x, 0)),
2878 gen_imagpart (submode, y)));
2879 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2880 (gen_rtx_MEM (submode, XEXP (x, 0)),
2881 gen_realpart (submode, y)));
2882 #else
2883 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2884 (gen_rtx_MEM (submode, XEXP (x, 0)),
2885 gen_realpart (submode, y)));
2886 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2887 (gen_rtx_MEM (submode, XEXP (x, 0)),
2888 gen_imagpart (submode, y)));
2889 #endif
2890 }
2891 else
2892 {
2893 rtx realpart_x, realpart_y;
2894 rtx imagpart_x, imagpart_y;
2895
2896 /* If this is a complex value with each part being smaller than a
2897 word, the usual calling sequence will likely pack the pieces into
2898 a single register. Unfortunately, SUBREG of hard registers only
2899 deals in terms of words, so we have a problem converting input
2900 arguments to the CONCAT of two registers that is used elsewhere
2901 for complex values. If this is before reload, we can copy it into
2902 memory and reload. FIXME, we should see about using extract and
2903 insert on integer registers, but complex short and complex char
2904 variables should be rarely used. */
2905 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2906 && (reload_in_progress | reload_completed) == 0)
2907 {
2908 int packed_dest_p
2909 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2910 int packed_src_p
2911 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2912
2913 if (packed_dest_p || packed_src_p)
2914 {
2915 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2916 ? MODE_FLOAT : MODE_INT);
2917
2918 enum machine_mode reg_mode
2919 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2920
2921 if (reg_mode != BLKmode)
2922 {
2923 rtx mem = assign_stack_temp (reg_mode,
2924 GET_MODE_SIZE (mode), 0);
2925 rtx cmem = adjust_address (mem, mode, 0);
2926
2927 cfun->cannot_inline
2928 = N_("function using short complex types cannot be inline");
2929
2930 if (packed_dest_p)
2931 {
2932 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2933
2934 emit_move_insn_1 (cmem, y);
2935 return emit_move_insn_1 (sreg, mem);
2936 }
2937 else
2938 {
2939 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2940
2941 emit_move_insn_1 (mem, sreg);
2942 return emit_move_insn_1 (x, cmem);
2943 }
2944 }
2945 }
2946 }
2947
2948 realpart_x = gen_realpart (submode, x);
2949 realpart_y = gen_realpart (submode, y);
2950 imagpart_x = gen_imagpart (submode, x);
2951 imagpart_y = gen_imagpart (submode, y);
2952
2953 /* Show the output dies here. This is necessary for SUBREGs
2954 of pseudos since we cannot track their lifetimes correctly;
2955 hard regs shouldn't appear here except as return values.
2956 We never want to emit such a clobber after reload. */
2957 if (x != y
2958 && ! (reload_in_progress || reload_completed)
2959 && (GET_CODE (realpart_x) == SUBREG
2960 || GET_CODE (imagpart_x) == SUBREG))
2961 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2962
2963 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2964 (realpart_x, realpart_y));
2965 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2966 (imagpart_x, imagpart_y));
2967 }
2968
2969 return get_last_insn ();
2970 }
2971
2972 /* This will handle any multi-word mode that lacks a move_insn pattern.
2973 However, you will get better code if you define such patterns,
2974 even if they must turn into multiple assembler instructions. */
2975 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2976 {
2977 rtx last_insn = 0;
2978 rtx seq, inner;
2979 int need_clobber;
2980 int i;
2981
2982 #ifdef PUSH_ROUNDING
2983
2984 /* If X is a push on the stack, do the push now and replace
2985 X with a reference to the stack pointer. */
2986 if (push_operand (x, GET_MODE (x)))
2987 {
2988 rtx temp;
2989 enum rtx_code code;
2990
2991 /* Do not use anti_adjust_stack, since we don't want to update
2992 stack_pointer_delta. */
2993 temp = expand_binop (Pmode,
2994 #ifdef STACK_GROWS_DOWNWARD
2995 sub_optab,
2996 #else
2997 add_optab,
2998 #endif
2999 stack_pointer_rtx,
3000 GEN_INT
3001 (PUSH_ROUNDING
3002 (GET_MODE_SIZE (GET_MODE (x)))),
3003 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3004
3005 if (temp != stack_pointer_rtx)
3006 emit_move_insn (stack_pointer_rtx, temp);
3007
3008 code = GET_CODE (XEXP (x, 0));
3009
3010 /* Just hope that small offsets off SP are OK. */
3011 if (code == POST_INC)
3012 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3013 GEN_INT (-((HOST_WIDE_INT)
3014 GET_MODE_SIZE (GET_MODE (x)))));
3015 else if (code == POST_DEC)
3016 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3017 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3018 else
3019 temp = stack_pointer_rtx;
3020
3021 x = change_address (x, VOIDmode, temp);
3022 }
3023 #endif
3024
3025 /* If we are in reload, see if either operand is a MEM whose address
3026 is scheduled for replacement. */
3027 if (reload_in_progress && GET_CODE (x) == MEM
3028 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3029 x = replace_equiv_address_nv (x, inner);
3030 if (reload_in_progress && GET_CODE (y) == MEM
3031 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3032 y = replace_equiv_address_nv (y, inner);
3033
3034 start_sequence ();
3035
3036 need_clobber = 0;
3037 for (i = 0;
3038 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3039 i++)
3040 {
3041 rtx xpart = operand_subword (x, i, 1, mode);
3042 rtx ypart = operand_subword (y, i, 1, mode);
3043
3044 /* If we can't get a part of Y, put Y into memory if it is a
3045 constant. Otherwise, force it into a register. If we still
3046 can't get a part of Y, abort. */
3047 if (ypart == 0 && CONSTANT_P (y))
3048 {
3049 y = force_const_mem (mode, y);
3050 ypart = operand_subword (y, i, 1, mode);
3051 }
3052 else if (ypart == 0)
3053 ypart = operand_subword_force (y, i, mode);
3054
3055 if (xpart == 0 || ypart == 0)
3056 abort ();
3057
3058 need_clobber |= (GET_CODE (xpart) == SUBREG);
3059
3060 last_insn = emit_move_insn (xpart, ypart);
3061 }
3062
3063 seq = gen_sequence ();
3064 end_sequence ();
3065
3066 /* Show the output dies here. This is necessary for SUBREGs
3067 of pseudos since we cannot track their lifetimes correctly;
3068 hard regs shouldn't appear here except as return values.
3069 We never want to emit such a clobber after reload. */
3070 if (x != y
3071 && ! (reload_in_progress || reload_completed)
3072 && need_clobber != 0)
3073 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3074
3075 emit_insn (seq);
3076
3077 return last_insn;
3078 }
3079 else
3080 abort ();
3081 }
3082 \f
3083 /* Pushing data onto the stack. */
3084
3085 /* Push a block of length SIZE (perhaps variable)
3086 and return an rtx to address the beginning of the block.
3087 Note that it is not possible for the value returned to be a QUEUED.
3088 The value may be virtual_outgoing_args_rtx.
3089
3090 EXTRA is the number of bytes of padding to push in addition to SIZE.
3091 BELOW nonzero means this padding comes at low addresses;
3092 otherwise, the padding comes at high addresses. */
3093
3094 rtx
3095 push_block (size, extra, below)
3096 rtx size;
3097 int extra, below;
3098 {
3099 rtx temp;
3100
3101 size = convert_modes (Pmode, ptr_mode, size, 1);
3102 if (CONSTANT_P (size))
3103 anti_adjust_stack (plus_constant (size, extra));
3104 else if (GET_CODE (size) == REG && extra == 0)
3105 anti_adjust_stack (size);
3106 else
3107 {
3108 temp = copy_to_mode_reg (Pmode, size);
3109 if (extra != 0)
3110 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3111 temp, 0, OPTAB_LIB_WIDEN);
3112 anti_adjust_stack (temp);
3113 }
3114
3115 #ifndef STACK_GROWS_DOWNWARD
3116 if (0)
3117 #else
3118 if (1)
3119 #endif
3120 {
3121 temp = virtual_outgoing_args_rtx;
3122 if (extra != 0 && below)
3123 temp = plus_constant (temp, extra);
3124 }
3125 else
3126 {
3127 if (GET_CODE (size) == CONST_INT)
3128 temp = plus_constant (virtual_outgoing_args_rtx,
3129 -INTVAL (size) - (below ? 0 : extra));
3130 else if (extra != 0 && !below)
3131 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3132 negate_rtx (Pmode, plus_constant (size, extra)));
3133 else
3134 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3135 negate_rtx (Pmode, size));
3136 }
3137
3138 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3139 }
3140
3141 #ifdef PUSH_ROUNDING
3142
3143 /* Emit single push insn. */
3144
3145 static void
3146 emit_single_push_insn (mode, x, type)
3147 rtx x;
3148 enum machine_mode mode;
3149 tree type;
3150 {
3151 rtx dest_addr;
3152 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3153 rtx dest;
3154 enum insn_code icode;
3155 insn_operand_predicate_fn pred;
3156
3157 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3158 /* If there is push pattern, use it. Otherwise try old way of throwing
3159 MEM representing push operation to move expander. */
3160 icode = push_optab->handlers[(int) mode].insn_code;
3161 if (icode != CODE_FOR_nothing)
3162 {
3163 if (((pred = insn_data[(int) icode].operand[0].predicate)
3164 && !((*pred) (x, mode))))
3165 x = force_reg (mode, x);
3166 emit_insn (GEN_FCN (icode) (x));
3167 return;
3168 }
3169 if (GET_MODE_SIZE (mode) == rounded_size)
3170 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3171 else
3172 {
3173 #ifdef STACK_GROWS_DOWNWARD
3174 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3175 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3176 #else
3177 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3178 GEN_INT (rounded_size));
3179 #endif
3180 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3181 }
3182
3183 dest = gen_rtx_MEM (mode, dest_addr);
3184
3185 if (type != 0)
3186 {
3187 set_mem_attributes (dest, type, 1);
3188
3189 if (flag_optimize_sibling_calls)
3190 /* Function incoming arguments may overlap with sibling call
3191 outgoing arguments and we cannot allow reordering of reads
3192 from function arguments with stores to outgoing arguments
3193 of sibling calls. */
3194 set_mem_alias_set (dest, 0);
3195 }
3196 emit_move_insn (dest, x);
3197 }
3198 #endif
3199
3200 /* Generate code to push X onto the stack, assuming it has mode MODE and
3201 type TYPE.
3202 MODE is redundant except when X is a CONST_INT (since they don't
3203 carry mode info).
3204 SIZE is an rtx for the size of data to be copied (in bytes),
3205 needed only if X is BLKmode.
3206
3207 ALIGN (in bits) is maximum alignment we can assume.
3208
3209 If PARTIAL and REG are both nonzero, then copy that many of the first
3210 words of X into registers starting with REG, and push the rest of X.
3211 The amount of space pushed is decreased by PARTIAL words,
3212 rounded *down* to a multiple of PARM_BOUNDARY.
3213 REG must be a hard register in this case.
3214 If REG is zero but PARTIAL is not, take any all others actions for an
3215 argument partially in registers, but do not actually load any
3216 registers.
3217
3218 EXTRA is the amount in bytes of extra space to leave next to this arg.
3219 This is ignored if an argument block has already been allocated.
3220
3221 On a machine that lacks real push insns, ARGS_ADDR is the address of
3222 the bottom of the argument block for this call. We use indexing off there
3223 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3224 argument block has not been preallocated.
3225
3226 ARGS_SO_FAR is the size of args previously pushed for this call.
3227
3228 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3229 for arguments passed in registers. If nonzero, it will be the number
3230 of bytes required. */
3231
3232 void
3233 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3234 args_addr, args_so_far, reg_parm_stack_space,
3235 alignment_pad)
3236 rtx x;
3237 enum machine_mode mode;
3238 tree type;
3239 rtx size;
3240 unsigned int align;
3241 int partial;
3242 rtx reg;
3243 int extra;
3244 rtx args_addr;
3245 rtx args_so_far;
3246 int reg_parm_stack_space;
3247 rtx alignment_pad;
3248 {
3249 rtx xinner;
3250 enum direction stack_direction
3251 #ifdef STACK_GROWS_DOWNWARD
3252 = downward;
3253 #else
3254 = upward;
3255 #endif
3256
3257 /* Decide where to pad the argument: `downward' for below,
3258 `upward' for above, or `none' for don't pad it.
3259 Default is below for small data on big-endian machines; else above. */
3260 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3261
3262 /* Invert direction if stack is post-decrement.
3263 FIXME: why? */
3264 if (STACK_PUSH_CODE == POST_DEC)
3265 if (where_pad != none)
3266 where_pad = (where_pad == downward ? upward : downward);
3267
3268 xinner = x = protect_from_queue (x, 0);
3269
3270 if (mode == BLKmode)
3271 {
3272 /* Copy a block into the stack, entirely or partially. */
3273
3274 rtx temp;
3275 int used = partial * UNITS_PER_WORD;
3276 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3277 int skip;
3278
3279 if (size == 0)
3280 abort ();
3281
3282 used -= offset;
3283
3284 /* USED is now the # of bytes we need not copy to the stack
3285 because registers will take care of them. */
3286
3287 if (partial != 0)
3288 xinner = adjust_address (xinner, BLKmode, used);
3289
3290 /* If the partial register-part of the arg counts in its stack size,
3291 skip the part of stack space corresponding to the registers.
3292 Otherwise, start copying to the beginning of the stack space,
3293 by setting SKIP to 0. */
3294 skip = (reg_parm_stack_space == 0) ? 0 : used;
3295
3296 #ifdef PUSH_ROUNDING
3297 /* Do it with several push insns if that doesn't take lots of insns
3298 and if there is no difficulty with push insns that skip bytes
3299 on the stack for alignment purposes. */
3300 if (args_addr == 0
3301 && PUSH_ARGS
3302 && GET_CODE (size) == CONST_INT
3303 && skip == 0
3304 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3305 /* Here we avoid the case of a structure whose weak alignment
3306 forces many pushes of a small amount of data,
3307 and such small pushes do rounding that causes trouble. */
3308 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3309 || align >= BIGGEST_ALIGNMENT
3310 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3311 == (align / BITS_PER_UNIT)))
3312 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3313 {
3314 /* Push padding now if padding above and stack grows down,
3315 or if padding below and stack grows up.
3316 But if space already allocated, this has already been done. */
3317 if (extra && args_addr == 0
3318 && where_pad != none && where_pad != stack_direction)
3319 anti_adjust_stack (GEN_INT (extra));
3320
3321 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3322 }
3323 else
3324 #endif /* PUSH_ROUNDING */
3325 {
3326 rtx target;
3327
3328 /* Otherwise make space on the stack and copy the data
3329 to the address of that space. */
3330
3331 /* Deduct words put into registers from the size we must copy. */
3332 if (partial != 0)
3333 {
3334 if (GET_CODE (size) == CONST_INT)
3335 size = GEN_INT (INTVAL (size) - used);
3336 else
3337 size = expand_binop (GET_MODE (size), sub_optab, size,
3338 GEN_INT (used), NULL_RTX, 0,
3339 OPTAB_LIB_WIDEN);
3340 }
3341
3342 /* Get the address of the stack space.
3343 In this case, we do not deal with EXTRA separately.
3344 A single stack adjust will do. */
3345 if (! args_addr)
3346 {
3347 temp = push_block (size, extra, where_pad == downward);
3348 extra = 0;
3349 }
3350 else if (GET_CODE (args_so_far) == CONST_INT)
3351 temp = memory_address (BLKmode,
3352 plus_constant (args_addr,
3353 skip + INTVAL (args_so_far)));
3354 else
3355 temp = memory_address (BLKmode,
3356 plus_constant (gen_rtx_PLUS (Pmode,
3357 args_addr,
3358 args_so_far),
3359 skip));
3360 target = gen_rtx_MEM (BLKmode, temp);
3361
3362 if (type != 0)
3363 {
3364 set_mem_attributes (target, type, 1);
3365 /* Function incoming arguments may overlap with sibling call
3366 outgoing arguments and we cannot allow reordering of reads
3367 from function arguments with stores to outgoing arguments
3368 of sibling calls. */
3369 set_mem_alias_set (target, 0);
3370 }
3371 else
3372 set_mem_align (target, align);
3373
3374 /* TEMP is the address of the block. Copy the data there. */
3375 if (GET_CODE (size) == CONST_INT
3376 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3377 {
3378 move_by_pieces (target, xinner, INTVAL (size), align);
3379 goto ret;
3380 }
3381 else
3382 {
3383 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3384 enum machine_mode mode;
3385
3386 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3387 mode != VOIDmode;
3388 mode = GET_MODE_WIDER_MODE (mode))
3389 {
3390 enum insn_code code = movstr_optab[(int) mode];
3391 insn_operand_predicate_fn pred;
3392
3393 if (code != CODE_FOR_nothing
3394 && ((GET_CODE (size) == CONST_INT
3395 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3396 <= (GET_MODE_MASK (mode) >> 1)))
3397 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3398 && (!(pred = insn_data[(int) code].operand[0].predicate)
3399 || ((*pred) (target, BLKmode)))
3400 && (!(pred = insn_data[(int) code].operand[1].predicate)
3401 || ((*pred) (xinner, BLKmode)))
3402 && (!(pred = insn_data[(int) code].operand[3].predicate)
3403 || ((*pred) (opalign, VOIDmode))))
3404 {
3405 rtx op2 = convert_to_mode (mode, size, 1);
3406 rtx last = get_last_insn ();
3407 rtx pat;
3408
3409 pred = insn_data[(int) code].operand[2].predicate;
3410 if (pred != 0 && ! (*pred) (op2, mode))
3411 op2 = copy_to_mode_reg (mode, op2);
3412
3413 pat = GEN_FCN ((int) code) (target, xinner,
3414 op2, opalign);
3415 if (pat)
3416 {
3417 emit_insn (pat);
3418 goto ret;
3419 }
3420 else
3421 delete_insns_since (last);
3422 }
3423 }
3424 }
3425
3426 if (!ACCUMULATE_OUTGOING_ARGS)
3427 {
3428 /* If the source is referenced relative to the stack pointer,
3429 copy it to another register to stabilize it. We do not need
3430 to do this if we know that we won't be changing sp. */
3431
3432 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3433 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3434 temp = copy_to_reg (temp);
3435 }
3436
3437 /* Make inhibit_defer_pop nonzero around the library call
3438 to force it to pop the bcopy-arguments right away. */
3439 NO_DEFER_POP;
3440 #ifdef TARGET_MEM_FUNCTIONS
3441 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3442 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3443 convert_to_mode (TYPE_MODE (sizetype),
3444 size, TREE_UNSIGNED (sizetype)),
3445 TYPE_MODE (sizetype));
3446 #else
3447 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3448 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3449 convert_to_mode (TYPE_MODE (integer_type_node),
3450 size,
3451 TREE_UNSIGNED (integer_type_node)),
3452 TYPE_MODE (integer_type_node));
3453 #endif
3454 OK_DEFER_POP;
3455 }
3456 }
3457 else if (partial > 0)
3458 {
3459 /* Scalar partly in registers. */
3460
3461 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3462 int i;
3463 int not_stack;
3464 /* # words of start of argument
3465 that we must make space for but need not store. */
3466 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3467 int args_offset = INTVAL (args_so_far);
3468 int skip;
3469
3470 /* Push padding now if padding above and stack grows down,
3471 or if padding below and stack grows up.
3472 But if space already allocated, this has already been done. */
3473 if (extra && args_addr == 0
3474 && where_pad != none && where_pad != stack_direction)
3475 anti_adjust_stack (GEN_INT (extra));
3476
3477 /* If we make space by pushing it, we might as well push
3478 the real data. Otherwise, we can leave OFFSET nonzero
3479 and leave the space uninitialized. */
3480 if (args_addr == 0)
3481 offset = 0;
3482
3483 /* Now NOT_STACK gets the number of words that we don't need to
3484 allocate on the stack. */
3485 not_stack = partial - offset;
3486
3487 /* If the partial register-part of the arg counts in its stack size,
3488 skip the part of stack space corresponding to the registers.
3489 Otherwise, start copying to the beginning of the stack space,
3490 by setting SKIP to 0. */
3491 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3492
3493 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3494 x = validize_mem (force_const_mem (mode, x));
3495
3496 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3497 SUBREGs of such registers are not allowed. */
3498 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3499 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3500 x = copy_to_reg (x);
3501
3502 /* Loop over all the words allocated on the stack for this arg. */
3503 /* We can do it by words, because any scalar bigger than a word
3504 has a size a multiple of a word. */
3505 #ifndef PUSH_ARGS_REVERSED
3506 for (i = not_stack; i < size; i++)
3507 #else
3508 for (i = size - 1; i >= not_stack; i--)
3509 #endif
3510 if (i >= not_stack + offset)
3511 emit_push_insn (operand_subword_force (x, i, mode),
3512 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3513 0, args_addr,
3514 GEN_INT (args_offset + ((i - not_stack + skip)
3515 * UNITS_PER_WORD)),
3516 reg_parm_stack_space, alignment_pad);
3517 }
3518 else
3519 {
3520 rtx addr;
3521 rtx target = NULL_RTX;
3522 rtx dest;
3523
3524 /* Push padding now if padding above and stack grows down,
3525 or if padding below and stack grows up.
3526 But if space already allocated, this has already been done. */
3527 if (extra && args_addr == 0
3528 && where_pad != none && where_pad != stack_direction)
3529 anti_adjust_stack (GEN_INT (extra));
3530
3531 #ifdef PUSH_ROUNDING
3532 if (args_addr == 0 && PUSH_ARGS)
3533 emit_single_push_insn (mode, x, type);
3534 else
3535 #endif
3536 {
3537 if (GET_CODE (args_so_far) == CONST_INT)
3538 addr
3539 = memory_address (mode,
3540 plus_constant (args_addr,
3541 INTVAL (args_so_far)));
3542 else
3543 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3544 args_so_far));
3545 target = addr;
3546 dest = gen_rtx_MEM (mode, addr);
3547 if (type != 0)
3548 {
3549 set_mem_attributes (dest, type, 1);
3550 /* Function incoming arguments may overlap with sibling call
3551 outgoing arguments and we cannot allow reordering of reads
3552 from function arguments with stores to outgoing arguments
3553 of sibling calls. */
3554 set_mem_alias_set (dest, 0);
3555 }
3556
3557 emit_move_insn (dest, x);
3558 }
3559
3560 }
3561
3562 ret:
3563 /* If part should go in registers, copy that part
3564 into the appropriate registers. Do this now, at the end,
3565 since mem-to-mem copies above may do function calls. */
3566 if (partial > 0 && reg != 0)
3567 {
3568 /* Handle calls that pass values in multiple non-contiguous locations.
3569 The Irix 6 ABI has examples of this. */
3570 if (GET_CODE (reg) == PARALLEL)
3571 emit_group_load (reg, x, -1); /* ??? size? */
3572 else
3573 move_block_to_reg (REGNO (reg), x, partial, mode);
3574 }
3575
3576 if (extra && args_addr == 0 && where_pad == stack_direction)
3577 anti_adjust_stack (GEN_INT (extra));
3578
3579 if (alignment_pad && args_addr == 0)
3580 anti_adjust_stack (alignment_pad);
3581 }
3582 \f
3583 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3584 operations. */
3585
3586 static rtx
3587 get_subtarget (x)
3588 rtx x;
3589 {
3590 return ((x == 0
3591 /* Only registers can be subtargets. */
3592 || GET_CODE (x) != REG
3593 /* If the register is readonly, it can't be set more than once. */
3594 || RTX_UNCHANGING_P (x)
3595 /* Don't use hard regs to avoid extending their life. */
3596 || REGNO (x) < FIRST_PSEUDO_REGISTER
3597 /* Avoid subtargets inside loops,
3598 since they hide some invariant expressions. */
3599 || preserve_subexpressions_p ())
3600 ? 0 : x);
3601 }
3602
3603 /* Expand an assignment that stores the value of FROM into TO.
3604 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3605 (This may contain a QUEUED rtx;
3606 if the value is constant, this rtx is a constant.)
3607 Otherwise, the returned value is NULL_RTX.
3608
3609 SUGGEST_REG is no longer actually used.
3610 It used to mean, copy the value through a register
3611 and return that register, if that is possible.
3612 We now use WANT_VALUE to decide whether to do this. */
3613
3614 rtx
3615 expand_assignment (to, from, want_value, suggest_reg)
3616 tree to, from;
3617 int want_value;
3618 int suggest_reg ATTRIBUTE_UNUSED;
3619 {
3620 rtx to_rtx = 0;
3621 rtx result;
3622
3623 /* Don't crash if the lhs of the assignment was erroneous. */
3624
3625 if (TREE_CODE (to) == ERROR_MARK)
3626 {
3627 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3628 return want_value ? result : NULL_RTX;
3629 }
3630
3631 /* Assignment of a structure component needs special treatment
3632 if the structure component's rtx is not simply a MEM.
3633 Assignment of an array element at a constant index, and assignment of
3634 an array element in an unaligned packed structure field, has the same
3635 problem. */
3636
3637 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3638 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3639 {
3640 enum machine_mode mode1;
3641 HOST_WIDE_INT bitsize, bitpos;
3642 rtx orig_to_rtx;
3643 tree offset;
3644 int unsignedp;
3645 int volatilep = 0;
3646 tree tem;
3647
3648 push_temp_slots ();
3649 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3650 &unsignedp, &volatilep);
3651
3652 /* If we are going to use store_bit_field and extract_bit_field,
3653 make sure to_rtx will be safe for multiple use. */
3654
3655 if (mode1 == VOIDmode && want_value)
3656 tem = stabilize_reference (tem);
3657
3658 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3659
3660 if (offset != 0)
3661 {
3662 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3663
3664 if (GET_CODE (to_rtx) != MEM)
3665 abort ();
3666
3667 if (GET_MODE (offset_rtx) != ptr_mode)
3668 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3669
3670 #ifdef POINTERS_EXTEND_UNSIGNED
3671 if (GET_MODE (offset_rtx) != Pmode)
3672 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3673 #endif
3674
3675 /* A constant address in TO_RTX can have VOIDmode, we must not try
3676 to call force_reg for that case. Avoid that case. */
3677 if (GET_CODE (to_rtx) == MEM
3678 && GET_MODE (to_rtx) == BLKmode
3679 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3680 && bitsize > 0
3681 && (bitpos % bitsize) == 0
3682 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3683 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3684 {
3685 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3686 bitpos = 0;
3687 }
3688
3689 to_rtx = offset_address (to_rtx, offset_rtx,
3690 highest_pow2_factor (offset));
3691 }
3692
3693 if (GET_CODE (to_rtx) == MEM)
3694 {
3695 tree old_expr = MEM_EXPR (to_rtx);
3696
3697 /* If the field is at offset zero, we could have been given the
3698 DECL_RTX of the parent struct. Don't munge it. */
3699 to_rtx = shallow_copy_rtx (to_rtx);
3700
3701 set_mem_attributes (to_rtx, to, 0);
3702
3703 /* If we changed MEM_EXPR, that means we're now referencing
3704 the COMPONENT_REF, which means that MEM_OFFSET must be
3705 relative to that field. But we've not yet reflected BITPOS
3706 in TO_RTX. This will be done in store_field. Adjust for
3707 that by biasing MEM_OFFSET by -bitpos. */
3708 if (MEM_EXPR (to_rtx) != old_expr && MEM_OFFSET (to_rtx)
3709 && (bitpos / BITS_PER_UNIT) != 0)
3710 set_mem_offset (to_rtx, GEN_INT (INTVAL (MEM_OFFSET (to_rtx))
3711 - (bitpos / BITS_PER_UNIT)));
3712 }
3713
3714 /* Deal with volatile and readonly fields. The former is only done
3715 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3716 if (volatilep && GET_CODE (to_rtx) == MEM)
3717 {
3718 if (to_rtx == orig_to_rtx)
3719 to_rtx = copy_rtx (to_rtx);
3720 MEM_VOLATILE_P (to_rtx) = 1;
3721 }
3722
3723 if (TREE_CODE (to) == COMPONENT_REF
3724 && TREE_READONLY (TREE_OPERAND (to, 1)))
3725 {
3726 if (to_rtx == orig_to_rtx)
3727 to_rtx = copy_rtx (to_rtx);
3728 RTX_UNCHANGING_P (to_rtx) = 1;
3729 }
3730
3731 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3732 {
3733 if (to_rtx == orig_to_rtx)
3734 to_rtx = copy_rtx (to_rtx);
3735 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3736 }
3737
3738 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3739 (want_value
3740 /* Spurious cast for HPUX compiler. */
3741 ? ((enum machine_mode)
3742 TYPE_MODE (TREE_TYPE (to)))
3743 : VOIDmode),
3744 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3745
3746 preserve_temp_slots (result);
3747 free_temp_slots ();
3748 pop_temp_slots ();
3749
3750 /* If the value is meaningful, convert RESULT to the proper mode.
3751 Otherwise, return nothing. */
3752 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3753 TYPE_MODE (TREE_TYPE (from)),
3754 result,
3755 TREE_UNSIGNED (TREE_TYPE (to)))
3756 : NULL_RTX);
3757 }
3758
3759 /* If the rhs is a function call and its value is not an aggregate,
3760 call the function before we start to compute the lhs.
3761 This is needed for correct code for cases such as
3762 val = setjmp (buf) on machines where reference to val
3763 requires loading up part of an address in a separate insn.
3764
3765 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3766 since it might be a promoted variable where the zero- or sign- extension
3767 needs to be done. Handling this in the normal way is safe because no
3768 computation is done before the call. */
3769 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3770 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3771 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3772 && GET_CODE (DECL_RTL (to)) == REG))
3773 {
3774 rtx value;
3775
3776 push_temp_slots ();
3777 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3778 if (to_rtx == 0)
3779 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3780
3781 /* Handle calls that return values in multiple non-contiguous locations.
3782 The Irix 6 ABI has examples of this. */
3783 if (GET_CODE (to_rtx) == PARALLEL)
3784 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
3785 else if (GET_MODE (to_rtx) == BLKmode)
3786 emit_block_move (to_rtx, value, expr_size (from));
3787 else
3788 {
3789 #ifdef POINTERS_EXTEND_UNSIGNED
3790 if (POINTER_TYPE_P (TREE_TYPE (to))
3791 && GET_MODE (to_rtx) != GET_MODE (value))
3792 value = convert_memory_address (GET_MODE (to_rtx), value);
3793 #endif
3794 emit_move_insn (to_rtx, value);
3795 }
3796 preserve_temp_slots (to_rtx);
3797 free_temp_slots ();
3798 pop_temp_slots ();
3799 return want_value ? to_rtx : NULL_RTX;
3800 }
3801
3802 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3803 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3804
3805 if (to_rtx == 0)
3806 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3807
3808 /* Don't move directly into a return register. */
3809 if (TREE_CODE (to) == RESULT_DECL
3810 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3811 {
3812 rtx temp;
3813
3814 push_temp_slots ();
3815 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3816
3817 if (GET_CODE (to_rtx) == PARALLEL)
3818 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
3819 else
3820 emit_move_insn (to_rtx, temp);
3821
3822 preserve_temp_slots (to_rtx);
3823 free_temp_slots ();
3824 pop_temp_slots ();
3825 return want_value ? to_rtx : NULL_RTX;
3826 }
3827
3828 /* In case we are returning the contents of an object which overlaps
3829 the place the value is being stored, use a safe function when copying
3830 a value through a pointer into a structure value return block. */
3831 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3832 && current_function_returns_struct
3833 && !current_function_returns_pcc_struct)
3834 {
3835 rtx from_rtx, size;
3836
3837 push_temp_slots ();
3838 size = expr_size (from);
3839 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3840
3841 #ifdef TARGET_MEM_FUNCTIONS
3842 emit_library_call (memmove_libfunc, LCT_NORMAL,
3843 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3844 XEXP (from_rtx, 0), Pmode,
3845 convert_to_mode (TYPE_MODE (sizetype),
3846 size, TREE_UNSIGNED (sizetype)),
3847 TYPE_MODE (sizetype));
3848 #else
3849 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3850 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3851 XEXP (to_rtx, 0), Pmode,
3852 convert_to_mode (TYPE_MODE (integer_type_node),
3853 size, TREE_UNSIGNED (integer_type_node)),
3854 TYPE_MODE (integer_type_node));
3855 #endif
3856
3857 preserve_temp_slots (to_rtx);
3858 free_temp_slots ();
3859 pop_temp_slots ();
3860 return want_value ? to_rtx : NULL_RTX;
3861 }
3862
3863 /* Compute FROM and store the value in the rtx we got. */
3864
3865 push_temp_slots ();
3866 result = store_expr (from, to_rtx, want_value);
3867 preserve_temp_slots (result);
3868 free_temp_slots ();
3869 pop_temp_slots ();
3870 return want_value ? result : NULL_RTX;
3871 }
3872
3873 /* Generate code for computing expression EXP,
3874 and storing the value into TARGET.
3875 TARGET may contain a QUEUED rtx.
3876
3877 If WANT_VALUE is nonzero, return a copy of the value
3878 not in TARGET, so that we can be sure to use the proper
3879 value in a containing expression even if TARGET has something
3880 else stored in it. If possible, we copy the value through a pseudo
3881 and return that pseudo. Or, if the value is constant, we try to
3882 return the constant. In some cases, we return a pseudo
3883 copied *from* TARGET.
3884
3885 If the mode is BLKmode then we may return TARGET itself.
3886 It turns out that in BLKmode it doesn't cause a problem.
3887 because C has no operators that could combine two different
3888 assignments into the same BLKmode object with different values
3889 with no sequence point. Will other languages need this to
3890 be more thorough?
3891
3892 If WANT_VALUE is 0, we return NULL, to make sure
3893 to catch quickly any cases where the caller uses the value
3894 and fails to set WANT_VALUE. */
3895
3896 rtx
3897 store_expr (exp, target, want_value)
3898 tree exp;
3899 rtx target;
3900 int want_value;
3901 {
3902 rtx temp;
3903 int dont_return_target = 0;
3904 int dont_store_target = 0;
3905
3906 if (TREE_CODE (exp) == COMPOUND_EXPR)
3907 {
3908 /* Perform first part of compound expression, then assign from second
3909 part. */
3910 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3911 emit_queue ();
3912 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3913 }
3914 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3915 {
3916 /* For conditional expression, get safe form of the target. Then
3917 test the condition, doing the appropriate assignment on either
3918 side. This avoids the creation of unnecessary temporaries.
3919 For non-BLKmode, it is more efficient not to do this. */
3920
3921 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3922
3923 emit_queue ();
3924 target = protect_from_queue (target, 1);
3925
3926 do_pending_stack_adjust ();
3927 NO_DEFER_POP;
3928 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3929 start_cleanup_deferral ();
3930 store_expr (TREE_OPERAND (exp, 1), target, 0);
3931 end_cleanup_deferral ();
3932 emit_queue ();
3933 emit_jump_insn (gen_jump (lab2));
3934 emit_barrier ();
3935 emit_label (lab1);
3936 start_cleanup_deferral ();
3937 store_expr (TREE_OPERAND (exp, 2), target, 0);
3938 end_cleanup_deferral ();
3939 emit_queue ();
3940 emit_label (lab2);
3941 OK_DEFER_POP;
3942
3943 return want_value ? target : NULL_RTX;
3944 }
3945 else if (queued_subexp_p (target))
3946 /* If target contains a postincrement, let's not risk
3947 using it as the place to generate the rhs. */
3948 {
3949 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3950 {
3951 /* Expand EXP into a new pseudo. */
3952 temp = gen_reg_rtx (GET_MODE (target));
3953 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3954 }
3955 else
3956 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3957
3958 /* If target is volatile, ANSI requires accessing the value
3959 *from* the target, if it is accessed. So make that happen.
3960 In no case return the target itself. */
3961 if (! MEM_VOLATILE_P (target) && want_value)
3962 dont_return_target = 1;
3963 }
3964 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3965 && GET_MODE (target) != BLKmode)
3966 /* If target is in memory and caller wants value in a register instead,
3967 arrange that. Pass TARGET as target for expand_expr so that,
3968 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3969 We know expand_expr will not use the target in that case.
3970 Don't do this if TARGET is volatile because we are supposed
3971 to write it and then read it. */
3972 {
3973 temp = expand_expr (exp, target, GET_MODE (target), 0);
3974 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3975 {
3976 /* If TEMP is already in the desired TARGET, only copy it from
3977 memory and don't store it there again. */
3978 if (temp == target
3979 || (rtx_equal_p (temp, target)
3980 && ! side_effects_p (temp) && ! side_effects_p (target)))
3981 dont_store_target = 1;
3982 temp = copy_to_reg (temp);
3983 }
3984 dont_return_target = 1;
3985 }
3986 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3987 /* If this is an scalar in a register that is stored in a wider mode
3988 than the declared mode, compute the result into its declared mode
3989 and then convert to the wider mode. Our value is the computed
3990 expression. */
3991 {
3992 rtx inner_target = 0;
3993
3994 /* If we don't want a value, we can do the conversion inside EXP,
3995 which will often result in some optimizations. Do the conversion
3996 in two steps: first change the signedness, if needed, then
3997 the extend. But don't do this if the type of EXP is a subtype
3998 of something else since then the conversion might involve
3999 more than just converting modes. */
4000 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4001 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4002 {
4003 if (TREE_UNSIGNED (TREE_TYPE (exp))
4004 != SUBREG_PROMOTED_UNSIGNED_P (target))
4005 exp
4006 = convert
4007 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4008 TREE_TYPE (exp)),
4009 exp);
4010
4011 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4012 SUBREG_PROMOTED_UNSIGNED_P (target)),
4013 exp);
4014
4015 inner_target = SUBREG_REG (target);
4016 }
4017
4018 temp = expand_expr (exp, inner_target, VOIDmode, 0);
4019
4020 /* If TEMP is a volatile MEM and we want a result value, make
4021 the access now so it gets done only once. Likewise if
4022 it contains TARGET. */
4023 if (GET_CODE (temp) == MEM && want_value
4024 && (MEM_VOLATILE_P (temp)
4025 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4026 temp = copy_to_reg (temp);
4027
4028 /* If TEMP is a VOIDmode constant, use convert_modes to make
4029 sure that we properly convert it. */
4030 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4031 {
4032 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4033 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4034 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4035 GET_MODE (target), temp,
4036 SUBREG_PROMOTED_UNSIGNED_P (target));
4037 }
4038
4039 convert_move (SUBREG_REG (target), temp,
4040 SUBREG_PROMOTED_UNSIGNED_P (target));
4041
4042 /* If we promoted a constant, change the mode back down to match
4043 target. Otherwise, the caller might get confused by a result whose
4044 mode is larger than expected. */
4045
4046 if (want_value && GET_MODE (temp) != GET_MODE (target))
4047 {
4048 if (GET_MODE (temp) != VOIDmode)
4049 {
4050 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4051 SUBREG_PROMOTED_VAR_P (temp) = 1;
4052 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4053 SUBREG_PROMOTED_UNSIGNED_P (target));
4054 }
4055 else
4056 temp = convert_modes (GET_MODE (target),
4057 GET_MODE (SUBREG_REG (target)),
4058 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4059 }
4060
4061 return want_value ? temp : NULL_RTX;
4062 }
4063 else
4064 {
4065 temp = expand_expr (exp, target, GET_MODE (target), 0);
4066 /* Return TARGET if it's a specified hardware register.
4067 If TARGET is a volatile mem ref, either return TARGET
4068 or return a reg copied *from* TARGET; ANSI requires this.
4069
4070 Otherwise, if TEMP is not TARGET, return TEMP
4071 if it is constant (for efficiency),
4072 or if we really want the correct value. */
4073 if (!(target && GET_CODE (target) == REG
4074 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4075 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4076 && ! rtx_equal_p (temp, target)
4077 && (CONSTANT_P (temp) || want_value))
4078 dont_return_target = 1;
4079 }
4080
4081 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4082 the same as that of TARGET, adjust the constant. This is needed, for
4083 example, in case it is a CONST_DOUBLE and we want only a word-sized
4084 value. */
4085 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4086 && TREE_CODE (exp) != ERROR_MARK
4087 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4088 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4089 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4090
4091 /* If value was not generated in the target, store it there.
4092 Convert the value to TARGET's type first if necessary.
4093 If TEMP and TARGET compare equal according to rtx_equal_p, but
4094 one or both of them are volatile memory refs, we have to distinguish
4095 two cases:
4096 - expand_expr has used TARGET. In this case, we must not generate
4097 another copy. This can be detected by TARGET being equal according
4098 to == .
4099 - expand_expr has not used TARGET - that means that the source just
4100 happens to have the same RTX form. Since temp will have been created
4101 by expand_expr, it will compare unequal according to == .
4102 We must generate a copy in this case, to reach the correct number
4103 of volatile memory references. */
4104
4105 if ((! rtx_equal_p (temp, target)
4106 || (temp != target && (side_effects_p (temp)
4107 || side_effects_p (target))))
4108 && TREE_CODE (exp) != ERROR_MARK
4109 && ! dont_store_target)
4110 {
4111 target = protect_from_queue (target, 1);
4112 if (GET_MODE (temp) != GET_MODE (target)
4113 && GET_MODE (temp) != VOIDmode)
4114 {
4115 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4116 if (dont_return_target)
4117 {
4118 /* In this case, we will return TEMP,
4119 so make sure it has the proper mode.
4120 But don't forget to store the value into TARGET. */
4121 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4122 emit_move_insn (target, temp);
4123 }
4124 else
4125 convert_move (target, temp, unsignedp);
4126 }
4127
4128 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4129 {
4130 /* Handle copying a string constant into an array. The string
4131 constant may be shorter than the array. So copy just the string's
4132 actual length, and clear the rest. First get the size of the data
4133 type of the string, which is actually the size of the target. */
4134 rtx size = expr_size (exp);
4135
4136 if (GET_CODE (size) == CONST_INT
4137 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4138 emit_block_move (target, temp, size);
4139 else
4140 {
4141 /* Compute the size of the data to copy from the string. */
4142 tree copy_size
4143 = size_binop (MIN_EXPR,
4144 make_tree (sizetype, size),
4145 size_int (TREE_STRING_LENGTH (exp)));
4146 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4147 VOIDmode, 0);
4148 rtx label = 0;
4149
4150 /* Copy that much. */
4151 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4152 emit_block_move (target, temp, copy_size_rtx);
4153
4154 /* Figure out how much is left in TARGET that we have to clear.
4155 Do all calculations in ptr_mode. */
4156 if (GET_CODE (copy_size_rtx) == CONST_INT)
4157 {
4158 size = plus_constant (size, -INTVAL (copy_size_rtx));
4159 target = adjust_address (target, BLKmode,
4160 INTVAL (copy_size_rtx));
4161 }
4162 else
4163 {
4164 size = expand_binop (ptr_mode, sub_optab, size,
4165 copy_size_rtx, NULL_RTX, 0,
4166 OPTAB_LIB_WIDEN);
4167
4168 #ifdef POINTERS_EXTEND_UNSIGNED
4169 if (GET_MODE (copy_size_rtx) != Pmode)
4170 copy_size_rtx = convert_memory_address (Pmode,
4171 copy_size_rtx);
4172 #endif
4173
4174 target = offset_address (target, copy_size_rtx,
4175 highest_pow2_factor (copy_size));
4176 label = gen_label_rtx ();
4177 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4178 GET_MODE (size), 0, label);
4179 }
4180
4181 if (size != const0_rtx)
4182 clear_storage (target, size);
4183
4184 if (label)
4185 emit_label (label);
4186 }
4187 }
4188 /* Handle calls that return values in multiple non-contiguous locations.
4189 The Irix 6 ABI has examples of this. */
4190 else if (GET_CODE (target) == PARALLEL)
4191 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4192 else if (GET_MODE (temp) == BLKmode)
4193 emit_block_move (target, temp, expr_size (exp));
4194 else
4195 emit_move_insn (target, temp);
4196 }
4197
4198 /* If we don't want a value, return NULL_RTX. */
4199 if (! want_value)
4200 return NULL_RTX;
4201
4202 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4203 ??? The latter test doesn't seem to make sense. */
4204 else if (dont_return_target && GET_CODE (temp) != MEM)
4205 return temp;
4206
4207 /* Return TARGET itself if it is a hard register. */
4208 else if (want_value && GET_MODE (target) != BLKmode
4209 && ! (GET_CODE (target) == REG
4210 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4211 return copy_to_reg (target);
4212
4213 else
4214 return target;
4215 }
4216 \f
4217 /* Return 1 if EXP just contains zeros. */
4218
4219 static int
4220 is_zeros_p (exp)
4221 tree exp;
4222 {
4223 tree elt;
4224
4225 switch (TREE_CODE (exp))
4226 {
4227 case CONVERT_EXPR:
4228 case NOP_EXPR:
4229 case NON_LVALUE_EXPR:
4230 case VIEW_CONVERT_EXPR:
4231 return is_zeros_p (TREE_OPERAND (exp, 0));
4232
4233 case INTEGER_CST:
4234 return integer_zerop (exp);
4235
4236 case COMPLEX_CST:
4237 return
4238 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4239
4240 case REAL_CST:
4241 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4242
4243 case VECTOR_CST:
4244 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4245 elt = TREE_CHAIN (elt))
4246 if (!is_zeros_p (TREE_VALUE (elt)))
4247 return 0;
4248
4249 return 1;
4250
4251 case CONSTRUCTOR:
4252 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4253 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4254 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4255 if (! is_zeros_p (TREE_VALUE (elt)))
4256 return 0;
4257
4258 return 1;
4259
4260 default:
4261 return 0;
4262 }
4263 }
4264
4265 /* Return 1 if EXP contains mostly (3/4) zeros. */
4266
4267 static int
4268 mostly_zeros_p (exp)
4269 tree exp;
4270 {
4271 if (TREE_CODE (exp) == CONSTRUCTOR)
4272 {
4273 int elts = 0, zeros = 0;
4274 tree elt = CONSTRUCTOR_ELTS (exp);
4275 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4276 {
4277 /* If there are no ranges of true bits, it is all zero. */
4278 return elt == NULL_TREE;
4279 }
4280 for (; elt; elt = TREE_CHAIN (elt))
4281 {
4282 /* We do not handle the case where the index is a RANGE_EXPR,
4283 so the statistic will be somewhat inaccurate.
4284 We do make a more accurate count in store_constructor itself,
4285 so since this function is only used for nested array elements,
4286 this should be close enough. */
4287 if (mostly_zeros_p (TREE_VALUE (elt)))
4288 zeros++;
4289 elts++;
4290 }
4291
4292 return 4 * zeros >= 3 * elts;
4293 }
4294
4295 return is_zeros_p (exp);
4296 }
4297 \f
4298 /* Helper function for store_constructor.
4299 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4300 TYPE is the type of the CONSTRUCTOR, not the element type.
4301 CLEARED is as for store_constructor.
4302 ALIAS_SET is the alias set to use for any stores.
4303
4304 This provides a recursive shortcut back to store_constructor when it isn't
4305 necessary to go through store_field. This is so that we can pass through
4306 the cleared field to let store_constructor know that we may not have to
4307 clear a substructure if the outer structure has already been cleared. */
4308
4309 static void
4310 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4311 alias_set)
4312 rtx target;
4313 unsigned HOST_WIDE_INT bitsize;
4314 HOST_WIDE_INT bitpos;
4315 enum machine_mode mode;
4316 tree exp, type;
4317 int cleared;
4318 int alias_set;
4319 {
4320 if (TREE_CODE (exp) == CONSTRUCTOR
4321 && bitpos % BITS_PER_UNIT == 0
4322 /* If we have a non-zero bitpos for a register target, then we just
4323 let store_field do the bitfield handling. This is unlikely to
4324 generate unnecessary clear instructions anyways. */
4325 && (bitpos == 0 || GET_CODE (target) == MEM))
4326 {
4327 if (GET_CODE (target) == MEM)
4328 target
4329 = adjust_address (target,
4330 GET_MODE (target) == BLKmode
4331 || 0 != (bitpos
4332 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4333 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4334
4335
4336 /* Update the alias set, if required. */
4337 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4338 && MEM_ALIAS_SET (target) != 0)
4339 {
4340 target = copy_rtx (target);
4341 set_mem_alias_set (target, alias_set);
4342 }
4343
4344 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4345 }
4346 else
4347 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4348 alias_set);
4349 }
4350
4351 /* Store the value of constructor EXP into the rtx TARGET.
4352 TARGET is either a REG or a MEM; we know it cannot conflict, since
4353 safe_from_p has been called.
4354 CLEARED is true if TARGET is known to have been zero'd.
4355 SIZE is the number of bytes of TARGET we are allowed to modify: this
4356 may not be the same as the size of EXP if we are assigning to a field
4357 which has been packed to exclude padding bits. */
4358
4359 static void
4360 store_constructor (exp, target, cleared, size)
4361 tree exp;
4362 rtx target;
4363 int cleared;
4364 HOST_WIDE_INT size;
4365 {
4366 tree type = TREE_TYPE (exp);
4367 #ifdef WORD_REGISTER_OPERATIONS
4368 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4369 #endif
4370
4371 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4372 || TREE_CODE (type) == QUAL_UNION_TYPE)
4373 {
4374 tree elt;
4375
4376 /* We either clear the aggregate or indicate the value is dead. */
4377 if ((TREE_CODE (type) == UNION_TYPE
4378 || TREE_CODE (type) == QUAL_UNION_TYPE)
4379 && ! cleared
4380 && ! CONSTRUCTOR_ELTS (exp))
4381 /* If the constructor is empty, clear the union. */
4382 {
4383 clear_storage (target, expr_size (exp));
4384 cleared = 1;
4385 }
4386
4387 /* If we are building a static constructor into a register,
4388 set the initial value as zero so we can fold the value into
4389 a constant. But if more than one register is involved,
4390 this probably loses. */
4391 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4392 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4393 {
4394 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4395 cleared = 1;
4396 }
4397
4398 /* If the constructor has fewer fields than the structure
4399 or if we are initializing the structure to mostly zeros,
4400 clear the whole structure first. Don't do this if TARGET is a
4401 register whose mode size isn't equal to SIZE since clear_storage
4402 can't handle this case. */
4403 else if (! cleared && size > 0
4404 && ((list_length (CONSTRUCTOR_ELTS (exp))
4405 != fields_length (type))
4406 || mostly_zeros_p (exp))
4407 && (GET_CODE (target) != REG
4408 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4409 == size)))
4410 {
4411 clear_storage (target, GEN_INT (size));
4412 cleared = 1;
4413 }
4414
4415 if (! cleared)
4416 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4417
4418 /* Store each element of the constructor into
4419 the corresponding field of TARGET. */
4420
4421 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4422 {
4423 tree field = TREE_PURPOSE (elt);
4424 tree value = TREE_VALUE (elt);
4425 enum machine_mode mode;
4426 HOST_WIDE_INT bitsize;
4427 HOST_WIDE_INT bitpos = 0;
4428 int unsignedp;
4429 tree offset;
4430 rtx to_rtx = target;
4431
4432 /* Just ignore missing fields.
4433 We cleared the whole structure, above,
4434 if any fields are missing. */
4435 if (field == 0)
4436 continue;
4437
4438 if (cleared && is_zeros_p (value))
4439 continue;
4440
4441 if (host_integerp (DECL_SIZE (field), 1))
4442 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4443 else
4444 bitsize = -1;
4445
4446 unsignedp = TREE_UNSIGNED (field);
4447 mode = DECL_MODE (field);
4448 if (DECL_BIT_FIELD (field))
4449 mode = VOIDmode;
4450
4451 offset = DECL_FIELD_OFFSET (field);
4452 if (host_integerp (offset, 0)
4453 && host_integerp (bit_position (field), 0))
4454 {
4455 bitpos = int_bit_position (field);
4456 offset = 0;
4457 }
4458 else
4459 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4460
4461 if (offset)
4462 {
4463 rtx offset_rtx;
4464
4465 if (contains_placeholder_p (offset))
4466 offset = build (WITH_RECORD_EXPR, sizetype,
4467 offset, make_tree (TREE_TYPE (exp), target));
4468
4469 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4470 if (GET_CODE (to_rtx) != MEM)
4471 abort ();
4472
4473 if (GET_MODE (offset_rtx) != ptr_mode)
4474 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4475
4476 #ifdef POINTERS_EXTEND_UNSIGNED
4477 if (GET_MODE (offset_rtx) != Pmode)
4478 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4479 #endif
4480
4481 to_rtx = offset_address (to_rtx, offset_rtx,
4482 highest_pow2_factor (offset));
4483 }
4484
4485 if (TREE_READONLY (field))
4486 {
4487 if (GET_CODE (to_rtx) == MEM)
4488 to_rtx = copy_rtx (to_rtx);
4489
4490 RTX_UNCHANGING_P (to_rtx) = 1;
4491 }
4492
4493 #ifdef WORD_REGISTER_OPERATIONS
4494 /* If this initializes a field that is smaller than a word, at the
4495 start of a word, try to widen it to a full word.
4496 This special case allows us to output C++ member function
4497 initializations in a form that the optimizers can understand. */
4498 if (GET_CODE (target) == REG
4499 && bitsize < BITS_PER_WORD
4500 && bitpos % BITS_PER_WORD == 0
4501 && GET_MODE_CLASS (mode) == MODE_INT
4502 && TREE_CODE (value) == INTEGER_CST
4503 && exp_size >= 0
4504 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4505 {
4506 tree type = TREE_TYPE (value);
4507
4508 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4509 {
4510 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4511 value = convert (type, value);
4512 }
4513
4514 if (BYTES_BIG_ENDIAN)
4515 value
4516 = fold (build (LSHIFT_EXPR, type, value,
4517 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4518 bitsize = BITS_PER_WORD;
4519 mode = word_mode;
4520 }
4521 #endif
4522
4523 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4524 && DECL_NONADDRESSABLE_P (field))
4525 {
4526 to_rtx = copy_rtx (to_rtx);
4527 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4528 }
4529
4530 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4531 value, type, cleared,
4532 get_alias_set (TREE_TYPE (field)));
4533 }
4534 }
4535 else if (TREE_CODE (type) == ARRAY_TYPE
4536 || TREE_CODE (type) == VECTOR_TYPE)
4537 {
4538 tree elt;
4539 int i;
4540 int need_to_clear;
4541 tree domain = TYPE_DOMAIN (type);
4542 tree elttype = TREE_TYPE (type);
4543 int const_bounds_p;
4544 HOST_WIDE_INT minelt = 0;
4545 HOST_WIDE_INT maxelt = 0;
4546
4547 /* Vectors are like arrays, but the domain is stored via an array
4548 type indirectly. */
4549 if (TREE_CODE (type) == VECTOR_TYPE)
4550 {
4551 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4552 the same field as TYPE_DOMAIN, we are not guaranteed that
4553 it always will. */
4554 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4555 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4556 }
4557
4558 const_bounds_p = (TYPE_MIN_VALUE (domain)
4559 && TYPE_MAX_VALUE (domain)
4560 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4561 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4562
4563 /* If we have constant bounds for the range of the type, get them. */
4564 if (const_bounds_p)
4565 {
4566 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4567 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4568 }
4569
4570 /* If the constructor has fewer elements than the array,
4571 clear the whole array first. Similarly if this is
4572 static constructor of a non-BLKmode object. */
4573 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4574 need_to_clear = 1;
4575 else
4576 {
4577 HOST_WIDE_INT count = 0, zero_count = 0;
4578 need_to_clear = ! const_bounds_p;
4579
4580 /* This loop is a more accurate version of the loop in
4581 mostly_zeros_p (it handles RANGE_EXPR in an index).
4582 It is also needed to check for missing elements. */
4583 for (elt = CONSTRUCTOR_ELTS (exp);
4584 elt != NULL_TREE && ! need_to_clear;
4585 elt = TREE_CHAIN (elt))
4586 {
4587 tree index = TREE_PURPOSE (elt);
4588 HOST_WIDE_INT this_node_count;
4589
4590 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4591 {
4592 tree lo_index = TREE_OPERAND (index, 0);
4593 tree hi_index = TREE_OPERAND (index, 1);
4594
4595 if (! host_integerp (lo_index, 1)
4596 || ! host_integerp (hi_index, 1))
4597 {
4598 need_to_clear = 1;
4599 break;
4600 }
4601
4602 this_node_count = (tree_low_cst (hi_index, 1)
4603 - tree_low_cst (lo_index, 1) + 1);
4604 }
4605 else
4606 this_node_count = 1;
4607
4608 count += this_node_count;
4609 if (mostly_zeros_p (TREE_VALUE (elt)))
4610 zero_count += this_node_count;
4611 }
4612
4613 /* Clear the entire array first if there are any missing elements,
4614 or if the incidence of zero elements is >= 75%. */
4615 if (! need_to_clear
4616 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4617 need_to_clear = 1;
4618 }
4619
4620 if (need_to_clear && size > 0)
4621 {
4622 if (! cleared)
4623 {
4624 if (REG_P (target))
4625 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4626 else
4627 clear_storage (target, GEN_INT (size));
4628 }
4629 cleared = 1;
4630 }
4631 else if (REG_P (target))
4632 /* Inform later passes that the old value is dead. */
4633 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4634
4635 /* Store each element of the constructor into
4636 the corresponding element of TARGET, determined
4637 by counting the elements. */
4638 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4639 elt;
4640 elt = TREE_CHAIN (elt), i++)
4641 {
4642 enum machine_mode mode;
4643 HOST_WIDE_INT bitsize;
4644 HOST_WIDE_INT bitpos;
4645 int unsignedp;
4646 tree value = TREE_VALUE (elt);
4647 tree index = TREE_PURPOSE (elt);
4648 rtx xtarget = target;
4649
4650 if (cleared && is_zeros_p (value))
4651 continue;
4652
4653 unsignedp = TREE_UNSIGNED (elttype);
4654 mode = TYPE_MODE (elttype);
4655 if (mode == BLKmode)
4656 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4657 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4658 : -1);
4659 else
4660 bitsize = GET_MODE_BITSIZE (mode);
4661
4662 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4663 {
4664 tree lo_index = TREE_OPERAND (index, 0);
4665 tree hi_index = TREE_OPERAND (index, 1);
4666 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4667 struct nesting *loop;
4668 HOST_WIDE_INT lo, hi, count;
4669 tree position;
4670
4671 /* If the range is constant and "small", unroll the loop. */
4672 if (const_bounds_p
4673 && host_integerp (lo_index, 0)
4674 && host_integerp (hi_index, 0)
4675 && (lo = tree_low_cst (lo_index, 0),
4676 hi = tree_low_cst (hi_index, 0),
4677 count = hi - lo + 1,
4678 (GET_CODE (target) != MEM
4679 || count <= 2
4680 || (host_integerp (TYPE_SIZE (elttype), 1)
4681 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4682 <= 40 * 8)))))
4683 {
4684 lo -= minelt; hi -= minelt;
4685 for (; lo <= hi; lo++)
4686 {
4687 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4688
4689 if (GET_CODE (target) == MEM
4690 && !MEM_KEEP_ALIAS_SET_P (target)
4691 && TREE_CODE (type) == ARRAY_TYPE
4692 && TYPE_NONALIASED_COMPONENT (type))
4693 {
4694 target = copy_rtx (target);
4695 MEM_KEEP_ALIAS_SET_P (target) = 1;
4696 }
4697
4698 store_constructor_field
4699 (target, bitsize, bitpos, mode, value, type, cleared,
4700 get_alias_set (elttype));
4701 }
4702 }
4703 else
4704 {
4705 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4706 loop_top = gen_label_rtx ();
4707 loop_end = gen_label_rtx ();
4708
4709 unsignedp = TREE_UNSIGNED (domain);
4710
4711 index = build_decl (VAR_DECL, NULL_TREE, domain);
4712
4713 index_r
4714 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4715 &unsignedp, 0));
4716 SET_DECL_RTL (index, index_r);
4717 if (TREE_CODE (value) == SAVE_EXPR
4718 && SAVE_EXPR_RTL (value) == 0)
4719 {
4720 /* Make sure value gets expanded once before the
4721 loop. */
4722 expand_expr (value, const0_rtx, VOIDmode, 0);
4723 emit_queue ();
4724 }
4725 store_expr (lo_index, index_r, 0);
4726 loop = expand_start_loop (0);
4727
4728 /* Assign value to element index. */
4729 position
4730 = convert (ssizetype,
4731 fold (build (MINUS_EXPR, TREE_TYPE (index),
4732 index, TYPE_MIN_VALUE (domain))));
4733 position = size_binop (MULT_EXPR, position,
4734 convert (ssizetype,
4735 TYPE_SIZE_UNIT (elttype)));
4736
4737 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4738 xtarget = offset_address (target, pos_rtx,
4739 highest_pow2_factor (position));
4740 xtarget = adjust_address (xtarget, mode, 0);
4741 if (TREE_CODE (value) == CONSTRUCTOR)
4742 store_constructor (value, xtarget, cleared,
4743 bitsize / BITS_PER_UNIT);
4744 else
4745 store_expr (value, xtarget, 0);
4746
4747 expand_exit_loop_if_false (loop,
4748 build (LT_EXPR, integer_type_node,
4749 index, hi_index));
4750
4751 expand_increment (build (PREINCREMENT_EXPR,
4752 TREE_TYPE (index),
4753 index, integer_one_node), 0, 0);
4754 expand_end_loop ();
4755 emit_label (loop_end);
4756 }
4757 }
4758 else if ((index != 0 && ! host_integerp (index, 0))
4759 || ! host_integerp (TYPE_SIZE (elttype), 1))
4760 {
4761 tree position;
4762
4763 if (index == 0)
4764 index = ssize_int (1);
4765
4766 if (minelt)
4767 index = convert (ssizetype,
4768 fold (build (MINUS_EXPR, index,
4769 TYPE_MIN_VALUE (domain))));
4770
4771 position = size_binop (MULT_EXPR, index,
4772 convert (ssizetype,
4773 TYPE_SIZE_UNIT (elttype)));
4774 xtarget = offset_address (target,
4775 expand_expr (position, 0, VOIDmode, 0),
4776 highest_pow2_factor (position));
4777 xtarget = adjust_address (xtarget, mode, 0);
4778 store_expr (value, xtarget, 0);
4779 }
4780 else
4781 {
4782 if (index != 0)
4783 bitpos = ((tree_low_cst (index, 0) - minelt)
4784 * tree_low_cst (TYPE_SIZE (elttype), 1));
4785 else
4786 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4787
4788 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4789 && TREE_CODE (type) == ARRAY_TYPE
4790 && TYPE_NONALIASED_COMPONENT (type))
4791 {
4792 target = copy_rtx (target);
4793 MEM_KEEP_ALIAS_SET_P (target) = 1;
4794 }
4795
4796 store_constructor_field (target, bitsize, bitpos, mode, value,
4797 type, cleared, get_alias_set (elttype));
4798
4799 }
4800 }
4801 }
4802
4803 /* Set constructor assignments. */
4804 else if (TREE_CODE (type) == SET_TYPE)
4805 {
4806 tree elt = CONSTRUCTOR_ELTS (exp);
4807 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4808 tree domain = TYPE_DOMAIN (type);
4809 tree domain_min, domain_max, bitlength;
4810
4811 /* The default implementation strategy is to extract the constant
4812 parts of the constructor, use that to initialize the target,
4813 and then "or" in whatever non-constant ranges we need in addition.
4814
4815 If a large set is all zero or all ones, it is
4816 probably better to set it using memset (if available) or bzero.
4817 Also, if a large set has just a single range, it may also be
4818 better to first clear all the first clear the set (using
4819 bzero/memset), and set the bits we want. */
4820
4821 /* Check for all zeros. */
4822 if (elt == NULL_TREE && size > 0)
4823 {
4824 if (!cleared)
4825 clear_storage (target, GEN_INT (size));
4826 return;
4827 }
4828
4829 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4830 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4831 bitlength = size_binop (PLUS_EXPR,
4832 size_diffop (domain_max, domain_min),
4833 ssize_int (1));
4834
4835 nbits = tree_low_cst (bitlength, 1);
4836
4837 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4838 are "complicated" (more than one range), initialize (the
4839 constant parts) by copying from a constant. */
4840 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4841 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4842 {
4843 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4844 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4845 char *bit_buffer = (char *) alloca (nbits);
4846 HOST_WIDE_INT word = 0;
4847 unsigned int bit_pos = 0;
4848 unsigned int ibit = 0;
4849 unsigned int offset = 0; /* In bytes from beginning of set. */
4850
4851 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4852 for (;;)
4853 {
4854 if (bit_buffer[ibit])
4855 {
4856 if (BYTES_BIG_ENDIAN)
4857 word |= (1 << (set_word_size - 1 - bit_pos));
4858 else
4859 word |= 1 << bit_pos;
4860 }
4861
4862 bit_pos++; ibit++;
4863 if (bit_pos >= set_word_size || ibit == nbits)
4864 {
4865 if (word != 0 || ! cleared)
4866 {
4867 rtx datum = GEN_INT (word);
4868 rtx to_rtx;
4869
4870 /* The assumption here is that it is safe to use
4871 XEXP if the set is multi-word, but not if
4872 it's single-word. */
4873 if (GET_CODE (target) == MEM)
4874 to_rtx = adjust_address (target, mode, offset);
4875 else if (offset == 0)
4876 to_rtx = target;
4877 else
4878 abort ();
4879 emit_move_insn (to_rtx, datum);
4880 }
4881
4882 if (ibit == nbits)
4883 break;
4884 word = 0;
4885 bit_pos = 0;
4886 offset += set_word_size / BITS_PER_UNIT;
4887 }
4888 }
4889 }
4890 else if (!cleared)
4891 /* Don't bother clearing storage if the set is all ones. */
4892 if (TREE_CHAIN (elt) != NULL_TREE
4893 || (TREE_PURPOSE (elt) == NULL_TREE
4894 ? nbits != 1
4895 : ( ! host_integerp (TREE_VALUE (elt), 0)
4896 || ! host_integerp (TREE_PURPOSE (elt), 0)
4897 || (tree_low_cst (TREE_VALUE (elt), 0)
4898 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4899 != (HOST_WIDE_INT) nbits))))
4900 clear_storage (target, expr_size (exp));
4901
4902 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4903 {
4904 /* Start of range of element or NULL. */
4905 tree startbit = TREE_PURPOSE (elt);
4906 /* End of range of element, or element value. */
4907 tree endbit = TREE_VALUE (elt);
4908 #ifdef TARGET_MEM_FUNCTIONS
4909 HOST_WIDE_INT startb, endb;
4910 #endif
4911 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4912
4913 bitlength_rtx = expand_expr (bitlength,
4914 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4915
4916 /* Handle non-range tuple element like [ expr ]. */
4917 if (startbit == NULL_TREE)
4918 {
4919 startbit = save_expr (endbit);
4920 endbit = startbit;
4921 }
4922
4923 startbit = convert (sizetype, startbit);
4924 endbit = convert (sizetype, endbit);
4925 if (! integer_zerop (domain_min))
4926 {
4927 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4928 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4929 }
4930 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4931 EXPAND_CONST_ADDRESS);
4932 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4933 EXPAND_CONST_ADDRESS);
4934
4935 if (REG_P (target))
4936 {
4937 targetx
4938 = assign_temp
4939 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
4940 TYPE_QUAL_CONST)),
4941 0, 1, 1);
4942 emit_move_insn (targetx, target);
4943 }
4944
4945 else if (GET_CODE (target) == MEM)
4946 targetx = target;
4947 else
4948 abort ();
4949
4950 #ifdef TARGET_MEM_FUNCTIONS
4951 /* Optimization: If startbit and endbit are
4952 constants divisible by BITS_PER_UNIT,
4953 call memset instead. */
4954 if (TREE_CODE (startbit) == INTEGER_CST
4955 && TREE_CODE (endbit) == INTEGER_CST
4956 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4957 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4958 {
4959 emit_library_call (memset_libfunc, LCT_NORMAL,
4960 VOIDmode, 3,
4961 plus_constant (XEXP (targetx, 0),
4962 startb / BITS_PER_UNIT),
4963 Pmode,
4964 constm1_rtx, TYPE_MODE (integer_type_node),
4965 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4966 TYPE_MODE (sizetype));
4967 }
4968 else
4969 #endif
4970 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4971 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4972 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
4973 startbit_rtx, TYPE_MODE (sizetype),
4974 endbit_rtx, TYPE_MODE (sizetype));
4975
4976 if (REG_P (target))
4977 emit_move_insn (target, targetx);
4978 }
4979 }
4980
4981 else
4982 abort ();
4983 }
4984
4985 /* Store the value of EXP (an expression tree)
4986 into a subfield of TARGET which has mode MODE and occupies
4987 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4988 If MODE is VOIDmode, it means that we are storing into a bit-field.
4989
4990 If VALUE_MODE is VOIDmode, return nothing in particular.
4991 UNSIGNEDP is not used in this case.
4992
4993 Otherwise, return an rtx for the value stored. This rtx
4994 has mode VALUE_MODE if that is convenient to do.
4995 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4996
4997 TYPE is the type of the underlying object,
4998
4999 ALIAS_SET is the alias set for the destination. This value will
5000 (in general) be different from that for TARGET, since TARGET is a
5001 reference to the containing structure. */
5002
5003 static rtx
5004 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5005 alias_set)
5006 rtx target;
5007 HOST_WIDE_INT bitsize;
5008 HOST_WIDE_INT bitpos;
5009 enum machine_mode mode;
5010 tree exp;
5011 enum machine_mode value_mode;
5012 int unsignedp;
5013 tree type;
5014 int alias_set;
5015 {
5016 HOST_WIDE_INT width_mask = 0;
5017
5018 if (TREE_CODE (exp) == ERROR_MARK)
5019 return const0_rtx;
5020
5021 /* If we have nothing to store, do nothing unless the expression has
5022 side-effects. */
5023 if (bitsize == 0)
5024 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5025 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5026 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5027
5028 /* If we are storing into an unaligned field of an aligned union that is
5029 in a register, we may have the mode of TARGET being an integer mode but
5030 MODE == BLKmode. In that case, get an aligned object whose size and
5031 alignment are the same as TARGET and store TARGET into it (we can avoid
5032 the store if the field being stored is the entire width of TARGET). Then
5033 call ourselves recursively to store the field into a BLKmode version of
5034 that object. Finally, load from the object into TARGET. This is not
5035 very efficient in general, but should only be slightly more expensive
5036 than the otherwise-required unaligned accesses. Perhaps this can be
5037 cleaned up later. */
5038
5039 if (mode == BLKmode
5040 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5041 {
5042 rtx object
5043 = assign_temp
5044 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5045 0, 1, 1);
5046 rtx blk_object = adjust_address (object, BLKmode, 0);
5047
5048 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5049 emit_move_insn (object, target);
5050
5051 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5052 alias_set);
5053
5054 emit_move_insn (target, object);
5055
5056 /* We want to return the BLKmode version of the data. */
5057 return blk_object;
5058 }
5059
5060 if (GET_CODE (target) == CONCAT)
5061 {
5062 /* We're storing into a struct containing a single __complex. */
5063
5064 if (bitpos != 0)
5065 abort ();
5066 return store_expr (exp, target, 0);
5067 }
5068
5069 /* If the structure is in a register or if the component
5070 is a bit field, we cannot use addressing to access it.
5071 Use bit-field techniques or SUBREG to store in it. */
5072
5073 if (mode == VOIDmode
5074 || (mode != BLKmode && ! direct_store[(int) mode]
5075 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5076 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5077 || GET_CODE (target) == REG
5078 || GET_CODE (target) == SUBREG
5079 /* If the field isn't aligned enough to store as an ordinary memref,
5080 store it as a bit field. */
5081 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5082 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5083 || bitpos % GET_MODE_ALIGNMENT (mode)))
5084 /* If the RHS and field are a constant size and the size of the
5085 RHS isn't the same size as the bitfield, we must use bitfield
5086 operations. */
5087 || (bitsize >= 0
5088 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5089 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5090 {
5091 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5092
5093 /* If BITSIZE is narrower than the size of the type of EXP
5094 we will be narrowing TEMP. Normally, what's wanted are the
5095 low-order bits. However, if EXP's type is a record and this is
5096 big-endian machine, we want the upper BITSIZE bits. */
5097 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5098 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5099 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5100 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5101 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5102 - bitsize),
5103 temp, 1);
5104
5105 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5106 MODE. */
5107 if (mode != VOIDmode && mode != BLKmode
5108 && mode != TYPE_MODE (TREE_TYPE (exp)))
5109 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5110
5111 /* If the modes of TARGET and TEMP are both BLKmode, both
5112 must be in memory and BITPOS must be aligned on a byte
5113 boundary. If so, we simply do a block copy. */
5114 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5115 {
5116 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5117 || bitpos % BITS_PER_UNIT != 0)
5118 abort ();
5119
5120 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5121 emit_block_move (target, temp,
5122 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5123 / BITS_PER_UNIT));
5124
5125 return value_mode == VOIDmode ? const0_rtx : target;
5126 }
5127
5128 /* Store the value in the bitfield. */
5129 store_bit_field (target, bitsize, bitpos, mode, temp,
5130 int_size_in_bytes (type));
5131
5132 if (value_mode != VOIDmode)
5133 {
5134 /* The caller wants an rtx for the value.
5135 If possible, avoid refetching from the bitfield itself. */
5136 if (width_mask != 0
5137 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5138 {
5139 tree count;
5140 enum machine_mode tmode;
5141
5142 tmode = GET_MODE (temp);
5143 if (tmode == VOIDmode)
5144 tmode = value_mode;
5145
5146 if (unsignedp)
5147 return expand_and (tmode, temp,
5148 GEN_INT (trunc_int_for_mode (width_mask,
5149 tmode)),
5150 NULL_RTX);
5151
5152 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5153 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5154 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5155 }
5156
5157 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5158 NULL_RTX, value_mode, VOIDmode,
5159 int_size_in_bytes (type));
5160 }
5161 return const0_rtx;
5162 }
5163 else
5164 {
5165 rtx addr = XEXP (target, 0);
5166 rtx to_rtx = target;
5167
5168 /* If a value is wanted, it must be the lhs;
5169 so make the address stable for multiple use. */
5170
5171 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5172 && ! CONSTANT_ADDRESS_P (addr)
5173 /* A frame-pointer reference is already stable. */
5174 && ! (GET_CODE (addr) == PLUS
5175 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5176 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5177 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5178 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5179
5180 /* Now build a reference to just the desired component. */
5181
5182 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5183
5184 if (to_rtx == target)
5185 to_rtx = copy_rtx (to_rtx);
5186
5187 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5188 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5189 set_mem_alias_set (to_rtx, alias_set);
5190
5191 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5192 }
5193 }
5194 \f
5195 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5196 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5197 codes and find the ultimate containing object, which we return.
5198
5199 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5200 bit position, and *PUNSIGNEDP to the signedness of the field.
5201 If the position of the field is variable, we store a tree
5202 giving the variable offset (in units) in *POFFSET.
5203 This offset is in addition to the bit position.
5204 If the position is not variable, we store 0 in *POFFSET.
5205
5206 If any of the extraction expressions is volatile,
5207 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5208
5209 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5210 is a mode that can be used to access the field. In that case, *PBITSIZE
5211 is redundant.
5212
5213 If the field describes a variable-sized object, *PMODE is set to
5214 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5215 this case, but the address of the object can be found. */
5216
5217 tree
5218 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5219 punsignedp, pvolatilep)
5220 tree exp;
5221 HOST_WIDE_INT *pbitsize;
5222 HOST_WIDE_INT *pbitpos;
5223 tree *poffset;
5224 enum machine_mode *pmode;
5225 int *punsignedp;
5226 int *pvolatilep;
5227 {
5228 tree size_tree = 0;
5229 enum machine_mode mode = VOIDmode;
5230 tree offset = size_zero_node;
5231 tree bit_offset = bitsize_zero_node;
5232 tree placeholder_ptr = 0;
5233 tree tem;
5234
5235 /* First get the mode, signedness, and size. We do this from just the
5236 outermost expression. */
5237 if (TREE_CODE (exp) == COMPONENT_REF)
5238 {
5239 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5240 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5241 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5242
5243 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5244 }
5245 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5246 {
5247 size_tree = TREE_OPERAND (exp, 1);
5248 *punsignedp = TREE_UNSIGNED (exp);
5249 }
5250 else
5251 {
5252 mode = TYPE_MODE (TREE_TYPE (exp));
5253 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5254
5255 if (mode == BLKmode)
5256 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5257 else
5258 *pbitsize = GET_MODE_BITSIZE (mode);
5259 }
5260
5261 if (size_tree != 0)
5262 {
5263 if (! host_integerp (size_tree, 1))
5264 mode = BLKmode, *pbitsize = -1;
5265 else
5266 *pbitsize = tree_low_cst (size_tree, 1);
5267 }
5268
5269 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5270 and find the ultimate containing object. */
5271 while (1)
5272 {
5273 if (TREE_CODE (exp) == BIT_FIELD_REF)
5274 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5275 else if (TREE_CODE (exp) == COMPONENT_REF)
5276 {
5277 tree field = TREE_OPERAND (exp, 1);
5278 tree this_offset = DECL_FIELD_OFFSET (field);
5279
5280 /* If this field hasn't been filled in yet, don't go
5281 past it. This should only happen when folding expressions
5282 made during type construction. */
5283 if (this_offset == 0)
5284 break;
5285 else if (! TREE_CONSTANT (this_offset)
5286 && contains_placeholder_p (this_offset))
5287 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5288
5289 offset = size_binop (PLUS_EXPR, offset, this_offset);
5290 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5291 DECL_FIELD_BIT_OFFSET (field));
5292
5293 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5294 }
5295
5296 else if (TREE_CODE (exp) == ARRAY_REF
5297 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5298 {
5299 tree index = TREE_OPERAND (exp, 1);
5300 tree array = TREE_OPERAND (exp, 0);
5301 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5302 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5303 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5304
5305 /* We assume all arrays have sizes that are a multiple of a byte.
5306 First subtract the lower bound, if any, in the type of the
5307 index, then convert to sizetype and multiply by the size of the
5308 array element. */
5309 if (low_bound != 0 && ! integer_zerop (low_bound))
5310 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5311 index, low_bound));
5312
5313 /* If the index has a self-referential type, pass it to a
5314 WITH_RECORD_EXPR; if the component size is, pass our
5315 component to one. */
5316 if (! TREE_CONSTANT (index)
5317 && contains_placeholder_p (index))
5318 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5319 if (! TREE_CONSTANT (unit_size)
5320 && contains_placeholder_p (unit_size))
5321 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5322
5323 offset = size_binop (PLUS_EXPR, offset,
5324 size_binop (MULT_EXPR,
5325 convert (sizetype, index),
5326 unit_size));
5327 }
5328
5329 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5330 {
5331 tree new = find_placeholder (exp, &placeholder_ptr);
5332
5333 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5334 We might have been called from tree optimization where we
5335 haven't set up an object yet. */
5336 if (new == 0)
5337 break;
5338 else
5339 exp = new;
5340
5341 continue;
5342 }
5343 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5344 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5345 && ! ((TREE_CODE (exp) == NOP_EXPR
5346 || TREE_CODE (exp) == CONVERT_EXPR)
5347 && (TYPE_MODE (TREE_TYPE (exp))
5348 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5349 break;
5350
5351 /* If any reference in the chain is volatile, the effect is volatile. */
5352 if (TREE_THIS_VOLATILE (exp))
5353 *pvolatilep = 1;
5354
5355 exp = TREE_OPERAND (exp, 0);
5356 }
5357
5358 /* If OFFSET is constant, see if we can return the whole thing as a
5359 constant bit position. Otherwise, split it up. */
5360 if (host_integerp (offset, 0)
5361 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5362 bitsize_unit_node))
5363 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5364 && host_integerp (tem, 0))
5365 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5366 else
5367 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5368
5369 *pmode = mode;
5370 return exp;
5371 }
5372
5373 /* Return 1 if T is an expression that get_inner_reference handles. */
5374
5375 int
5376 handled_component_p (t)
5377 tree t;
5378 {
5379 switch (TREE_CODE (t))
5380 {
5381 case BIT_FIELD_REF:
5382 case COMPONENT_REF:
5383 case ARRAY_REF:
5384 case ARRAY_RANGE_REF:
5385 case NON_LVALUE_EXPR:
5386 case VIEW_CONVERT_EXPR:
5387 return 1;
5388
5389 case NOP_EXPR:
5390 case CONVERT_EXPR:
5391 return (TYPE_MODE (TREE_TYPE (t))
5392 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5393
5394 default:
5395 return 0;
5396 }
5397 }
5398 \f
5399 /* Given an rtx VALUE that may contain additions and multiplications, return
5400 an equivalent value that just refers to a register, memory, or constant.
5401 This is done by generating instructions to perform the arithmetic and
5402 returning a pseudo-register containing the value.
5403
5404 The returned value may be a REG, SUBREG, MEM or constant. */
5405
5406 rtx
5407 force_operand (value, target)
5408 rtx value, target;
5409 {
5410 optab binoptab = 0;
5411 /* Use a temporary to force order of execution of calls to
5412 `force_operand'. */
5413 rtx tmp;
5414 rtx op2;
5415 /* Use subtarget as the target for operand 0 of a binary operation. */
5416 rtx subtarget = get_subtarget (target);
5417
5418 /* Check for a PIC address load. */
5419 if ((GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5420 && XEXP (value, 0) == pic_offset_table_rtx
5421 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5422 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5423 || GET_CODE (XEXP (value, 1)) == CONST))
5424 {
5425 if (!subtarget)
5426 subtarget = gen_reg_rtx (GET_MODE (value));
5427 emit_move_insn (subtarget, value);
5428 return subtarget;
5429 }
5430
5431 if (GET_CODE (value) == PLUS)
5432 binoptab = add_optab;
5433 else if (GET_CODE (value) == MINUS)
5434 binoptab = sub_optab;
5435 else if (GET_CODE (value) == MULT)
5436 {
5437 op2 = XEXP (value, 1);
5438 if (!CONSTANT_P (op2)
5439 && !(GET_CODE (op2) == REG && op2 != subtarget))
5440 subtarget = 0;
5441 tmp = force_operand (XEXP (value, 0), subtarget);
5442 return expand_mult (GET_MODE (value), tmp,
5443 force_operand (op2, NULL_RTX),
5444 target, 1);
5445 }
5446
5447 if (binoptab)
5448 {
5449 op2 = XEXP (value, 1);
5450 if (!CONSTANT_P (op2)
5451 && !(GET_CODE (op2) == REG && op2 != subtarget))
5452 subtarget = 0;
5453 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5454 {
5455 binoptab = add_optab;
5456 op2 = negate_rtx (GET_MODE (value), op2);
5457 }
5458
5459 /* Check for an addition with OP2 a constant integer and our first
5460 operand a PLUS of a virtual register and something else. In that
5461 case, we want to emit the sum of the virtual register and the
5462 constant first and then add the other value. This allows virtual
5463 register instantiation to simply modify the constant rather than
5464 creating another one around this addition. */
5465 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5466 && GET_CODE (XEXP (value, 0)) == PLUS
5467 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5468 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5469 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5470 {
5471 rtx temp = expand_binop (GET_MODE (value), binoptab,
5472 XEXP (XEXP (value, 0), 0), op2,
5473 subtarget, 0, OPTAB_LIB_WIDEN);
5474 return expand_binop (GET_MODE (value), binoptab, temp,
5475 force_operand (XEXP (XEXP (value, 0), 1), 0),
5476 target, 0, OPTAB_LIB_WIDEN);
5477 }
5478
5479 tmp = force_operand (XEXP (value, 0), subtarget);
5480 return expand_binop (GET_MODE (value), binoptab, tmp,
5481 force_operand (op2, NULL_RTX),
5482 target, 0, OPTAB_LIB_WIDEN);
5483 /* We give UNSIGNEDP = 0 to expand_binop
5484 because the only operations we are expanding here are signed ones. */
5485 }
5486
5487 #ifdef INSN_SCHEDULING
5488 /* On machines that have insn scheduling, we want all memory reference to be
5489 explicit, so we need to deal with such paradoxical SUBREGs. */
5490 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5491 && (GET_MODE_SIZE (GET_MODE (value))
5492 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5493 value
5494 = simplify_gen_subreg (GET_MODE (value),
5495 force_reg (GET_MODE (SUBREG_REG (value)),
5496 force_operand (SUBREG_REG (value),
5497 NULL_RTX)),
5498 GET_MODE (SUBREG_REG (value)),
5499 SUBREG_BYTE (value));
5500 #endif
5501
5502 return value;
5503 }
5504 \f
5505 /* Subroutine of expand_expr: return nonzero iff there is no way that
5506 EXP can reference X, which is being modified. TOP_P is nonzero if this
5507 call is going to be used to determine whether we need a temporary
5508 for EXP, as opposed to a recursive call to this function.
5509
5510 It is always safe for this routine to return zero since it merely
5511 searches for optimization opportunities. */
5512
5513 int
5514 safe_from_p (x, exp, top_p)
5515 rtx x;
5516 tree exp;
5517 int top_p;
5518 {
5519 rtx exp_rtl = 0;
5520 int i, nops;
5521 static tree save_expr_list;
5522
5523 if (x == 0
5524 /* If EXP has varying size, we MUST use a target since we currently
5525 have no way of allocating temporaries of variable size
5526 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5527 So we assume here that something at a higher level has prevented a
5528 clash. This is somewhat bogus, but the best we can do. Only
5529 do this when X is BLKmode and when we are at the top level. */
5530 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5531 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5532 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5533 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5534 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5535 != INTEGER_CST)
5536 && GET_MODE (x) == BLKmode)
5537 /* If X is in the outgoing argument area, it is always safe. */
5538 || (GET_CODE (x) == MEM
5539 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5540 || (GET_CODE (XEXP (x, 0)) == PLUS
5541 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5542 return 1;
5543
5544 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5545 find the underlying pseudo. */
5546 if (GET_CODE (x) == SUBREG)
5547 {
5548 x = SUBREG_REG (x);
5549 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5550 return 0;
5551 }
5552
5553 /* A SAVE_EXPR might appear many times in the expression passed to the
5554 top-level safe_from_p call, and if it has a complex subexpression,
5555 examining it multiple times could result in a combinatorial explosion.
5556 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5557 with optimization took about 28 minutes to compile -- even though it was
5558 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5559 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5560 we have processed. Note that the only test of top_p was above. */
5561
5562 if (top_p)
5563 {
5564 int rtn;
5565 tree t;
5566
5567 save_expr_list = 0;
5568
5569 rtn = safe_from_p (x, exp, 0);
5570
5571 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5572 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5573
5574 return rtn;
5575 }
5576
5577 /* Now look at our tree code and possibly recurse. */
5578 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5579 {
5580 case 'd':
5581 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5582 break;
5583
5584 case 'c':
5585 return 1;
5586
5587 case 'x':
5588 if (TREE_CODE (exp) == TREE_LIST)
5589 return ((TREE_VALUE (exp) == 0
5590 || safe_from_p (x, TREE_VALUE (exp), 0))
5591 && (TREE_CHAIN (exp) == 0
5592 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5593 else if (TREE_CODE (exp) == ERROR_MARK)
5594 return 1; /* An already-visited SAVE_EXPR? */
5595 else
5596 return 0;
5597
5598 case '1':
5599 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5600
5601 case '2':
5602 case '<':
5603 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5604 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5605
5606 case 'e':
5607 case 'r':
5608 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5609 the expression. If it is set, we conflict iff we are that rtx or
5610 both are in memory. Otherwise, we check all operands of the
5611 expression recursively. */
5612
5613 switch (TREE_CODE (exp))
5614 {
5615 case ADDR_EXPR:
5616 /* If the operand is static or we are static, we can't conflict.
5617 Likewise if we don't conflict with the operand at all. */
5618 if (staticp (TREE_OPERAND (exp, 0))
5619 || TREE_STATIC (exp)
5620 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5621 return 1;
5622
5623 /* Otherwise, the only way this can conflict is if we are taking
5624 the address of a DECL a that address if part of X, which is
5625 very rare. */
5626 exp = TREE_OPERAND (exp, 0);
5627 if (DECL_P (exp))
5628 {
5629 if (!DECL_RTL_SET_P (exp)
5630 || GET_CODE (DECL_RTL (exp)) != MEM)
5631 return 0;
5632 else
5633 exp_rtl = XEXP (DECL_RTL (exp), 0);
5634 }
5635 break;
5636
5637 case INDIRECT_REF:
5638 if (GET_CODE (x) == MEM
5639 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5640 get_alias_set (exp)))
5641 return 0;
5642 break;
5643
5644 case CALL_EXPR:
5645 /* Assume that the call will clobber all hard registers and
5646 all of memory. */
5647 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5648 || GET_CODE (x) == MEM)
5649 return 0;
5650 break;
5651
5652 case RTL_EXPR:
5653 /* If a sequence exists, we would have to scan every instruction
5654 in the sequence to see if it was safe. This is probably not
5655 worthwhile. */
5656 if (RTL_EXPR_SEQUENCE (exp))
5657 return 0;
5658
5659 exp_rtl = RTL_EXPR_RTL (exp);
5660 break;
5661
5662 case WITH_CLEANUP_EXPR:
5663 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5664 break;
5665
5666 case CLEANUP_POINT_EXPR:
5667 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5668
5669 case SAVE_EXPR:
5670 exp_rtl = SAVE_EXPR_RTL (exp);
5671 if (exp_rtl)
5672 break;
5673
5674 /* If we've already scanned this, don't do it again. Otherwise,
5675 show we've scanned it and record for clearing the flag if we're
5676 going on. */
5677 if (TREE_PRIVATE (exp))
5678 return 1;
5679
5680 TREE_PRIVATE (exp) = 1;
5681 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5682 {
5683 TREE_PRIVATE (exp) = 0;
5684 return 0;
5685 }
5686
5687 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5688 return 1;
5689
5690 case BIND_EXPR:
5691 /* The only operand we look at is operand 1. The rest aren't
5692 part of the expression. */
5693 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5694
5695 case METHOD_CALL_EXPR:
5696 /* This takes an rtx argument, but shouldn't appear here. */
5697 abort ();
5698
5699 default:
5700 break;
5701 }
5702
5703 /* If we have an rtx, we do not need to scan our operands. */
5704 if (exp_rtl)
5705 break;
5706
5707 nops = first_rtl_op (TREE_CODE (exp));
5708 for (i = 0; i < nops; i++)
5709 if (TREE_OPERAND (exp, i) != 0
5710 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5711 return 0;
5712
5713 /* If this is a language-specific tree code, it may require
5714 special handling. */
5715 if ((unsigned int) TREE_CODE (exp)
5716 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5717 && !(*lang_hooks.safe_from_p) (x, exp))
5718 return 0;
5719 }
5720
5721 /* If we have an rtl, find any enclosed object. Then see if we conflict
5722 with it. */
5723 if (exp_rtl)
5724 {
5725 if (GET_CODE (exp_rtl) == SUBREG)
5726 {
5727 exp_rtl = SUBREG_REG (exp_rtl);
5728 if (GET_CODE (exp_rtl) == REG
5729 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5730 return 0;
5731 }
5732
5733 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5734 are memory and they conflict. */
5735 return ! (rtx_equal_p (x, exp_rtl)
5736 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5737 && true_dependence (exp_rtl, VOIDmode, x,
5738 rtx_addr_varies_p)));
5739 }
5740
5741 /* If we reach here, it is safe. */
5742 return 1;
5743 }
5744
5745 /* Subroutine of expand_expr: return rtx if EXP is a
5746 variable or parameter; else return 0. */
5747
5748 static rtx
5749 var_rtx (exp)
5750 tree exp;
5751 {
5752 STRIP_NOPS (exp);
5753 switch (TREE_CODE (exp))
5754 {
5755 case PARM_DECL:
5756 case VAR_DECL:
5757 return DECL_RTL (exp);
5758 default:
5759 return 0;
5760 }
5761 }
5762
5763 #ifdef MAX_INTEGER_COMPUTATION_MODE
5764
5765 void
5766 check_max_integer_computation_mode (exp)
5767 tree exp;
5768 {
5769 enum tree_code code;
5770 enum machine_mode mode;
5771
5772 /* Strip any NOPs that don't change the mode. */
5773 STRIP_NOPS (exp);
5774 code = TREE_CODE (exp);
5775
5776 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5777 if (code == NOP_EXPR
5778 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5779 return;
5780
5781 /* First check the type of the overall operation. We need only look at
5782 unary, binary and relational operations. */
5783 if (TREE_CODE_CLASS (code) == '1'
5784 || TREE_CODE_CLASS (code) == '2'
5785 || TREE_CODE_CLASS (code) == '<')
5786 {
5787 mode = TYPE_MODE (TREE_TYPE (exp));
5788 if (GET_MODE_CLASS (mode) == MODE_INT
5789 && mode > MAX_INTEGER_COMPUTATION_MODE)
5790 internal_error ("unsupported wide integer operation");
5791 }
5792
5793 /* Check operand of a unary op. */
5794 if (TREE_CODE_CLASS (code) == '1')
5795 {
5796 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5797 if (GET_MODE_CLASS (mode) == MODE_INT
5798 && mode > MAX_INTEGER_COMPUTATION_MODE)
5799 internal_error ("unsupported wide integer operation");
5800 }
5801
5802 /* Check operands of a binary/comparison op. */
5803 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5804 {
5805 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5806 if (GET_MODE_CLASS (mode) == MODE_INT
5807 && mode > MAX_INTEGER_COMPUTATION_MODE)
5808 internal_error ("unsupported wide integer operation");
5809
5810 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5811 if (GET_MODE_CLASS (mode) == MODE_INT
5812 && mode > MAX_INTEGER_COMPUTATION_MODE)
5813 internal_error ("unsupported wide integer operation");
5814 }
5815 }
5816 #endif
5817 \f
5818 /* Return the highest power of two that EXP is known to be a multiple of.
5819 This is used in updating alignment of MEMs in array references. */
5820
5821 static HOST_WIDE_INT
5822 highest_pow2_factor (exp)
5823 tree exp;
5824 {
5825 HOST_WIDE_INT c0, c1;
5826
5827 switch (TREE_CODE (exp))
5828 {
5829 case INTEGER_CST:
5830 /* We can find the lowest bit that's a one. If the low
5831 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5832 We need to handle this case since we can find it in a COND_EXPR,
5833 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
5834 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5835 later ICE. */
5836 if (TREE_CONSTANT_OVERFLOW (exp))
5837 return BIGGEST_ALIGNMENT;
5838 else
5839 {
5840 /* Note: tree_low_cst is intentionally not used here,
5841 we don't care about the upper bits. */
5842 c0 = TREE_INT_CST_LOW (exp);
5843 c0 &= -c0;
5844 return c0 ? c0 : BIGGEST_ALIGNMENT;
5845 }
5846 break;
5847
5848 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5849 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5850 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5851 return MIN (c0, c1);
5852
5853 case MULT_EXPR:
5854 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5855 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5856 return c0 * c1;
5857
5858 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5859 case CEIL_DIV_EXPR:
5860 if (integer_pow2p (TREE_OPERAND (exp, 1))
5861 && host_integerp (TREE_OPERAND (exp, 1), 1))
5862 {
5863 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5864 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5865 return MAX (1, c0 / c1);
5866 }
5867 break;
5868
5869 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5870 case SAVE_EXPR: case WITH_RECORD_EXPR:
5871 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5872
5873 case COMPOUND_EXPR:
5874 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5875
5876 case COND_EXPR:
5877 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5878 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5879 return MIN (c0, c1);
5880
5881 default:
5882 break;
5883 }
5884
5885 return 1;
5886 }
5887 \f
5888 /* Return an object on the placeholder list that matches EXP, a
5889 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
5890 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
5891 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
5892 is a location which initially points to a starting location in the
5893 placeholder list (zero means start of the list) and where a pointer into
5894 the placeholder list at which the object is found is placed. */
5895
5896 tree
5897 find_placeholder (exp, plist)
5898 tree exp;
5899 tree *plist;
5900 {
5901 tree type = TREE_TYPE (exp);
5902 tree placeholder_expr;
5903
5904 for (placeholder_expr
5905 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
5906 placeholder_expr != 0;
5907 placeholder_expr = TREE_CHAIN (placeholder_expr))
5908 {
5909 tree need_type = TYPE_MAIN_VARIANT (type);
5910 tree elt;
5911
5912 /* Find the outermost reference that is of the type we want. If none,
5913 see if any object has a type that is a pointer to the type we
5914 want. */
5915 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5916 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5917 || TREE_CODE (elt) == COND_EXPR)
5918 ? TREE_OPERAND (elt, 1)
5919 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5920 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5921 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5922 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5923 ? TREE_OPERAND (elt, 0) : 0))
5924 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5925 {
5926 if (plist)
5927 *plist = placeholder_expr;
5928 return elt;
5929 }
5930
5931 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5932 elt
5933 = ((TREE_CODE (elt) == COMPOUND_EXPR
5934 || TREE_CODE (elt) == COND_EXPR)
5935 ? TREE_OPERAND (elt, 1)
5936 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5937 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5938 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5939 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5940 ? TREE_OPERAND (elt, 0) : 0))
5941 if (POINTER_TYPE_P (TREE_TYPE (elt))
5942 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
5943 == need_type))
5944 {
5945 if (plist)
5946 *plist = placeholder_expr;
5947 return build1 (INDIRECT_REF, need_type, elt);
5948 }
5949 }
5950
5951 return 0;
5952 }
5953 \f
5954 /* expand_expr: generate code for computing expression EXP.
5955 An rtx for the computed value is returned. The value is never null.
5956 In the case of a void EXP, const0_rtx is returned.
5957
5958 The value may be stored in TARGET if TARGET is nonzero.
5959 TARGET is just a suggestion; callers must assume that
5960 the rtx returned may not be the same as TARGET.
5961
5962 If TARGET is CONST0_RTX, it means that the value will be ignored.
5963
5964 If TMODE is not VOIDmode, it suggests generating the
5965 result in mode TMODE. But this is done only when convenient.
5966 Otherwise, TMODE is ignored and the value generated in its natural mode.
5967 TMODE is just a suggestion; callers must assume that
5968 the rtx returned may not have mode TMODE.
5969
5970 Note that TARGET may have neither TMODE nor MODE. In that case, it
5971 probably will not be used.
5972
5973 If MODIFIER is EXPAND_SUM then when EXP is an addition
5974 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5975 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5976 products as above, or REG or MEM, or constant.
5977 Ordinarily in such cases we would output mul or add instructions
5978 and then return a pseudo reg containing the sum.
5979
5980 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5981 it also marks a label as absolutely required (it can't be dead).
5982 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5983 This is used for outputting expressions used in initializers.
5984
5985 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5986 with a constant address even if that address is not normally legitimate.
5987 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5988
5989 rtx
5990 expand_expr (exp, target, tmode, modifier)
5991 tree exp;
5992 rtx target;
5993 enum machine_mode tmode;
5994 enum expand_modifier modifier;
5995 {
5996 rtx op0, op1, temp;
5997 tree type = TREE_TYPE (exp);
5998 int unsignedp = TREE_UNSIGNED (type);
5999 enum machine_mode mode;
6000 enum tree_code code = TREE_CODE (exp);
6001 optab this_optab;
6002 rtx subtarget, original_target;
6003 int ignore;
6004 tree context;
6005
6006 /* Handle ERROR_MARK before anybody tries to access its type. */
6007 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6008 {
6009 op0 = CONST0_RTX (tmode);
6010 if (op0 != 0)
6011 return op0;
6012 return const0_rtx;
6013 }
6014
6015 mode = TYPE_MODE (type);
6016 /* Use subtarget as the target for operand 0 of a binary operation. */
6017 subtarget = get_subtarget (target);
6018 original_target = target;
6019 ignore = (target == const0_rtx
6020 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6021 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6022 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6023 && TREE_CODE (type) == VOID_TYPE));
6024
6025 /* If we are going to ignore this result, we need only do something
6026 if there is a side-effect somewhere in the expression. If there
6027 is, short-circuit the most common cases here. Note that we must
6028 not call expand_expr with anything but const0_rtx in case this
6029 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6030
6031 if (ignore)
6032 {
6033 if (! TREE_SIDE_EFFECTS (exp))
6034 return const0_rtx;
6035
6036 /* Ensure we reference a volatile object even if value is ignored, but
6037 don't do this if all we are doing is taking its address. */
6038 if (TREE_THIS_VOLATILE (exp)
6039 && TREE_CODE (exp) != FUNCTION_DECL
6040 && mode != VOIDmode && mode != BLKmode
6041 && modifier != EXPAND_CONST_ADDRESS)
6042 {
6043 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6044 if (GET_CODE (temp) == MEM)
6045 temp = copy_to_reg (temp);
6046 return const0_rtx;
6047 }
6048
6049 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6050 || code == INDIRECT_REF || code == BUFFER_REF)
6051 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6052 modifier);
6053
6054 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6055 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6056 {
6057 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6058 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6059 return const0_rtx;
6060 }
6061 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6062 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6063 /* If the second operand has no side effects, just evaluate
6064 the first. */
6065 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6066 modifier);
6067 else if (code == BIT_FIELD_REF)
6068 {
6069 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6070 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6071 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6072 return const0_rtx;
6073 }
6074
6075 target = 0;
6076 }
6077
6078 #ifdef MAX_INTEGER_COMPUTATION_MODE
6079 /* Only check stuff here if the mode we want is different from the mode
6080 of the expression; if it's the same, check_max_integer_computiation_mode
6081 will handle it. Do we really need to check this stuff at all? */
6082
6083 if (target
6084 && GET_MODE (target) != mode
6085 && TREE_CODE (exp) != INTEGER_CST
6086 && TREE_CODE (exp) != PARM_DECL
6087 && TREE_CODE (exp) != ARRAY_REF
6088 && TREE_CODE (exp) != ARRAY_RANGE_REF
6089 && TREE_CODE (exp) != COMPONENT_REF
6090 && TREE_CODE (exp) != BIT_FIELD_REF
6091 && TREE_CODE (exp) != INDIRECT_REF
6092 && TREE_CODE (exp) != CALL_EXPR
6093 && TREE_CODE (exp) != VAR_DECL
6094 && TREE_CODE (exp) != RTL_EXPR)
6095 {
6096 enum machine_mode mode = GET_MODE (target);
6097
6098 if (GET_MODE_CLASS (mode) == MODE_INT
6099 && mode > MAX_INTEGER_COMPUTATION_MODE)
6100 internal_error ("unsupported wide integer operation");
6101 }
6102
6103 if (tmode != mode
6104 && TREE_CODE (exp) != INTEGER_CST
6105 && TREE_CODE (exp) != PARM_DECL
6106 && TREE_CODE (exp) != ARRAY_REF
6107 && TREE_CODE (exp) != ARRAY_RANGE_REF
6108 && TREE_CODE (exp) != COMPONENT_REF
6109 && TREE_CODE (exp) != BIT_FIELD_REF
6110 && TREE_CODE (exp) != INDIRECT_REF
6111 && TREE_CODE (exp) != VAR_DECL
6112 && TREE_CODE (exp) != CALL_EXPR
6113 && TREE_CODE (exp) != RTL_EXPR
6114 && GET_MODE_CLASS (tmode) == MODE_INT
6115 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6116 internal_error ("unsupported wide integer operation");
6117
6118 check_max_integer_computation_mode (exp);
6119 #endif
6120
6121 /* If will do cse, generate all results into pseudo registers
6122 since 1) that allows cse to find more things
6123 and 2) otherwise cse could produce an insn the machine
6124 cannot support. And exception is a CONSTRUCTOR into a multi-word
6125 MEM: that's much more likely to be most efficient into the MEM. */
6126
6127 if (! cse_not_expected && mode != BLKmode && target
6128 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6129 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6130 target = subtarget;
6131
6132 switch (code)
6133 {
6134 case LABEL_DECL:
6135 {
6136 tree function = decl_function_context (exp);
6137 /* Handle using a label in a containing function. */
6138 if (function != current_function_decl
6139 && function != inline_function_decl && function != 0)
6140 {
6141 struct function *p = find_function_data (function);
6142 p->expr->x_forced_labels
6143 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6144 p->expr->x_forced_labels);
6145 }
6146 else
6147 {
6148 if (modifier == EXPAND_INITIALIZER)
6149 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6150 label_rtx (exp),
6151 forced_labels);
6152 }
6153
6154 temp = gen_rtx_MEM (FUNCTION_MODE,
6155 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6156 if (function != current_function_decl
6157 && function != inline_function_decl && function != 0)
6158 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6159 return temp;
6160 }
6161
6162 case PARM_DECL:
6163 if (DECL_RTL (exp) == 0)
6164 {
6165 error_with_decl (exp, "prior parameter's size depends on `%s'");
6166 return CONST0_RTX (mode);
6167 }
6168
6169 /* ... fall through ... */
6170
6171 case VAR_DECL:
6172 /* If a static var's type was incomplete when the decl was written,
6173 but the type is complete now, lay out the decl now. */
6174 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6175 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6176 {
6177 rtx value = DECL_RTL_IF_SET (exp);
6178
6179 layout_decl (exp, 0);
6180
6181 /* If the RTL was already set, update its mode and memory
6182 attributes. */
6183 if (value != 0)
6184 {
6185 PUT_MODE (value, DECL_MODE (exp));
6186 SET_DECL_RTL (exp, 0);
6187 set_mem_attributes (value, exp, 1);
6188 SET_DECL_RTL (exp, value);
6189 }
6190 }
6191
6192 /* ... fall through ... */
6193
6194 case FUNCTION_DECL:
6195 case RESULT_DECL:
6196 if (DECL_RTL (exp) == 0)
6197 abort ();
6198
6199 /* Ensure variable marked as used even if it doesn't go through
6200 a parser. If it hasn't be used yet, write out an external
6201 definition. */
6202 if (! TREE_USED (exp))
6203 {
6204 assemble_external (exp);
6205 TREE_USED (exp) = 1;
6206 }
6207
6208 /* Show we haven't gotten RTL for this yet. */
6209 temp = 0;
6210
6211 /* Handle variables inherited from containing functions. */
6212 context = decl_function_context (exp);
6213
6214 /* We treat inline_function_decl as an alias for the current function
6215 because that is the inline function whose vars, types, etc.
6216 are being merged into the current function.
6217 See expand_inline_function. */
6218
6219 if (context != 0 && context != current_function_decl
6220 && context != inline_function_decl
6221 /* If var is static, we don't need a static chain to access it. */
6222 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6223 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6224 {
6225 rtx addr;
6226
6227 /* Mark as non-local and addressable. */
6228 DECL_NONLOCAL (exp) = 1;
6229 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6230 abort ();
6231 mark_addressable (exp);
6232 if (GET_CODE (DECL_RTL (exp)) != MEM)
6233 abort ();
6234 addr = XEXP (DECL_RTL (exp), 0);
6235 if (GET_CODE (addr) == MEM)
6236 addr
6237 = replace_equiv_address (addr,
6238 fix_lexical_addr (XEXP (addr, 0), exp));
6239 else
6240 addr = fix_lexical_addr (addr, exp);
6241
6242 temp = replace_equiv_address (DECL_RTL (exp), addr);
6243 }
6244
6245 /* This is the case of an array whose size is to be determined
6246 from its initializer, while the initializer is still being parsed.
6247 See expand_decl. */
6248
6249 else if (GET_CODE (DECL_RTL (exp)) == MEM
6250 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6251 temp = validize_mem (DECL_RTL (exp));
6252
6253 /* If DECL_RTL is memory, we are in the normal case and either
6254 the address is not valid or it is not a register and -fforce-addr
6255 is specified, get the address into a register. */
6256
6257 else if (GET_CODE (DECL_RTL (exp)) == MEM
6258 && modifier != EXPAND_CONST_ADDRESS
6259 && modifier != EXPAND_SUM
6260 && modifier != EXPAND_INITIALIZER
6261 && (! memory_address_p (DECL_MODE (exp),
6262 XEXP (DECL_RTL (exp), 0))
6263 || (flag_force_addr
6264 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6265 temp = replace_equiv_address (DECL_RTL (exp),
6266 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6267
6268 /* If we got something, return it. But first, set the alignment
6269 if the address is a register. */
6270 if (temp != 0)
6271 {
6272 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6273 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6274
6275 return temp;
6276 }
6277
6278 /* If the mode of DECL_RTL does not match that of the decl, it
6279 must be a promoted value. We return a SUBREG of the wanted mode,
6280 but mark it so that we know that it was already extended. */
6281
6282 if (GET_CODE (DECL_RTL (exp)) == REG
6283 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6284 {
6285 /* Get the signedness used for this variable. Ensure we get the
6286 same mode we got when the variable was declared. */
6287 if (GET_MODE (DECL_RTL (exp))
6288 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6289 abort ();
6290
6291 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6292 SUBREG_PROMOTED_VAR_P (temp) = 1;
6293 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6294 return temp;
6295 }
6296
6297 return DECL_RTL (exp);
6298
6299 case INTEGER_CST:
6300 return immed_double_const (TREE_INT_CST_LOW (exp),
6301 TREE_INT_CST_HIGH (exp), mode);
6302
6303 case CONST_DECL:
6304 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6305
6306 case REAL_CST:
6307 /* If optimized, generate immediate CONST_DOUBLE
6308 which will be turned into memory by reload if necessary.
6309
6310 We used to force a register so that loop.c could see it. But
6311 this does not allow gen_* patterns to perform optimizations with
6312 the constants. It also produces two insns in cases like "x = 1.0;".
6313 On most machines, floating-point constants are not permitted in
6314 many insns, so we'd end up copying it to a register in any case.
6315
6316 Now, we do the copying in expand_binop, if appropriate. */
6317 return immed_real_const (exp);
6318
6319 case COMPLEX_CST:
6320 case STRING_CST:
6321 if (! TREE_CST_RTL (exp))
6322 output_constant_def (exp, 1);
6323
6324 /* TREE_CST_RTL probably contains a constant address.
6325 On RISC machines where a constant address isn't valid,
6326 make some insns to get that address into a register. */
6327 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6328 && modifier != EXPAND_CONST_ADDRESS
6329 && modifier != EXPAND_INITIALIZER
6330 && modifier != EXPAND_SUM
6331 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6332 || (flag_force_addr
6333 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6334 return replace_equiv_address (TREE_CST_RTL (exp),
6335 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6336 return TREE_CST_RTL (exp);
6337
6338 case EXPR_WITH_FILE_LOCATION:
6339 {
6340 rtx to_return;
6341 const char *saved_input_filename = input_filename;
6342 int saved_lineno = lineno;
6343 input_filename = EXPR_WFL_FILENAME (exp);
6344 lineno = EXPR_WFL_LINENO (exp);
6345 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6346 emit_line_note (input_filename, lineno);
6347 /* Possibly avoid switching back and forth here. */
6348 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6349 input_filename = saved_input_filename;
6350 lineno = saved_lineno;
6351 return to_return;
6352 }
6353
6354 case SAVE_EXPR:
6355 context = decl_function_context (exp);
6356
6357 /* If this SAVE_EXPR was at global context, assume we are an
6358 initialization function and move it into our context. */
6359 if (context == 0)
6360 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6361
6362 /* We treat inline_function_decl as an alias for the current function
6363 because that is the inline function whose vars, types, etc.
6364 are being merged into the current function.
6365 See expand_inline_function. */
6366 if (context == current_function_decl || context == inline_function_decl)
6367 context = 0;
6368
6369 /* If this is non-local, handle it. */
6370 if (context)
6371 {
6372 /* The following call just exists to abort if the context is
6373 not of a containing function. */
6374 find_function_data (context);
6375
6376 temp = SAVE_EXPR_RTL (exp);
6377 if (temp && GET_CODE (temp) == REG)
6378 {
6379 put_var_into_stack (exp);
6380 temp = SAVE_EXPR_RTL (exp);
6381 }
6382 if (temp == 0 || GET_CODE (temp) != MEM)
6383 abort ();
6384 return
6385 replace_equiv_address (temp,
6386 fix_lexical_addr (XEXP (temp, 0), exp));
6387 }
6388 if (SAVE_EXPR_RTL (exp) == 0)
6389 {
6390 if (mode == VOIDmode)
6391 temp = const0_rtx;
6392 else
6393 temp = assign_temp (build_qualified_type (type,
6394 (TYPE_QUALS (type)
6395 | TYPE_QUAL_CONST)),
6396 3, 0, 0);
6397
6398 SAVE_EXPR_RTL (exp) = temp;
6399 if (!optimize && GET_CODE (temp) == REG)
6400 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6401 save_expr_regs);
6402
6403 /* If the mode of TEMP does not match that of the expression, it
6404 must be a promoted value. We pass store_expr a SUBREG of the
6405 wanted mode but mark it so that we know that it was already
6406 extended. Note that `unsignedp' was modified above in
6407 this case. */
6408
6409 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6410 {
6411 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6412 SUBREG_PROMOTED_VAR_P (temp) = 1;
6413 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6414 }
6415
6416 if (temp == const0_rtx)
6417 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6418 else
6419 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6420
6421 TREE_USED (exp) = 1;
6422 }
6423
6424 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6425 must be a promoted value. We return a SUBREG of the wanted mode,
6426 but mark it so that we know that it was already extended. */
6427
6428 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6429 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6430 {
6431 /* Compute the signedness and make the proper SUBREG. */
6432 promote_mode (type, mode, &unsignedp, 0);
6433 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6434 SUBREG_PROMOTED_VAR_P (temp) = 1;
6435 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6436 return temp;
6437 }
6438
6439 return SAVE_EXPR_RTL (exp);
6440
6441 case UNSAVE_EXPR:
6442 {
6443 rtx temp;
6444 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6445 TREE_OPERAND (exp, 0)
6446 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6447 return temp;
6448 }
6449
6450 case PLACEHOLDER_EXPR:
6451 {
6452 tree old_list = placeholder_list;
6453 tree placeholder_expr = 0;
6454
6455 exp = find_placeholder (exp, &placeholder_expr);
6456 if (exp == 0)
6457 abort ();
6458
6459 placeholder_list = TREE_CHAIN (placeholder_expr);
6460 temp = expand_expr (exp, original_target, tmode, modifier);
6461 placeholder_list = old_list;
6462 return temp;
6463 }
6464
6465 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6466 abort ();
6467
6468 case WITH_RECORD_EXPR:
6469 /* Put the object on the placeholder list, expand our first operand,
6470 and pop the list. */
6471 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6472 placeholder_list);
6473 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6474 modifier);
6475 placeholder_list = TREE_CHAIN (placeholder_list);
6476 return target;
6477
6478 case GOTO_EXPR:
6479 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6480 expand_goto (TREE_OPERAND (exp, 0));
6481 else
6482 expand_computed_goto (TREE_OPERAND (exp, 0));
6483 return const0_rtx;
6484
6485 case EXIT_EXPR:
6486 expand_exit_loop_if_false (NULL,
6487 invert_truthvalue (TREE_OPERAND (exp, 0)));
6488 return const0_rtx;
6489
6490 case LABELED_BLOCK_EXPR:
6491 if (LABELED_BLOCK_BODY (exp))
6492 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6493 /* Should perhaps use expand_label, but this is simpler and safer. */
6494 do_pending_stack_adjust ();
6495 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6496 return const0_rtx;
6497
6498 case EXIT_BLOCK_EXPR:
6499 if (EXIT_BLOCK_RETURN (exp))
6500 sorry ("returned value in block_exit_expr");
6501 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6502 return const0_rtx;
6503
6504 case LOOP_EXPR:
6505 push_temp_slots ();
6506 expand_start_loop (1);
6507 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6508 expand_end_loop ();
6509 pop_temp_slots ();
6510
6511 return const0_rtx;
6512
6513 case BIND_EXPR:
6514 {
6515 tree vars = TREE_OPERAND (exp, 0);
6516 int vars_need_expansion = 0;
6517
6518 /* Need to open a binding contour here because
6519 if there are any cleanups they must be contained here. */
6520 expand_start_bindings (2);
6521
6522 /* Mark the corresponding BLOCK for output in its proper place. */
6523 if (TREE_OPERAND (exp, 2) != 0
6524 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6525 insert_block (TREE_OPERAND (exp, 2));
6526
6527 /* If VARS have not yet been expanded, expand them now. */
6528 while (vars)
6529 {
6530 if (!DECL_RTL_SET_P (vars))
6531 {
6532 vars_need_expansion = 1;
6533 expand_decl (vars);
6534 }
6535 expand_decl_init (vars);
6536 vars = TREE_CHAIN (vars);
6537 }
6538
6539 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6540
6541 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6542
6543 return temp;
6544 }
6545
6546 case RTL_EXPR:
6547 if (RTL_EXPR_SEQUENCE (exp))
6548 {
6549 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6550 abort ();
6551 emit_insns (RTL_EXPR_SEQUENCE (exp));
6552 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6553 }
6554 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6555 free_temps_for_rtl_expr (exp);
6556 return RTL_EXPR_RTL (exp);
6557
6558 case CONSTRUCTOR:
6559 /* If we don't need the result, just ensure we evaluate any
6560 subexpressions. */
6561 if (ignore)
6562 {
6563 tree elt;
6564
6565 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6566 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6567
6568 return const0_rtx;
6569 }
6570
6571 /* All elts simple constants => refer to a constant in memory. But
6572 if this is a non-BLKmode mode, let it store a field at a time
6573 since that should make a CONST_INT or CONST_DOUBLE when we
6574 fold. Likewise, if we have a target we can use, it is best to
6575 store directly into the target unless the type is large enough
6576 that memcpy will be used. If we are making an initializer and
6577 all operands are constant, put it in memory as well. */
6578 else if ((TREE_STATIC (exp)
6579 && ((mode == BLKmode
6580 && ! (target != 0 && safe_from_p (target, exp, 1)))
6581 || TREE_ADDRESSABLE (exp)
6582 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6583 && (! MOVE_BY_PIECES_P
6584 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6585 TYPE_ALIGN (type)))
6586 && ! mostly_zeros_p (exp))))
6587 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6588 {
6589 rtx constructor = output_constant_def (exp, 1);
6590
6591 if (modifier != EXPAND_CONST_ADDRESS
6592 && modifier != EXPAND_INITIALIZER
6593 && modifier != EXPAND_SUM)
6594 constructor = validize_mem (constructor);
6595
6596 return constructor;
6597 }
6598 else
6599 {
6600 /* Handle calls that pass values in multiple non-contiguous
6601 locations. The Irix 6 ABI has examples of this. */
6602 if (target == 0 || ! safe_from_p (target, exp, 1)
6603 || GET_CODE (target) == PARALLEL)
6604 target
6605 = assign_temp (build_qualified_type (type,
6606 (TYPE_QUALS (type)
6607 | (TREE_READONLY (exp)
6608 * TYPE_QUAL_CONST))),
6609 0, TREE_ADDRESSABLE (exp), 1);
6610
6611 store_constructor (exp, target, 0,
6612 int_size_in_bytes (TREE_TYPE (exp)));
6613 return target;
6614 }
6615
6616 case INDIRECT_REF:
6617 {
6618 tree exp1 = TREE_OPERAND (exp, 0);
6619 tree index;
6620 tree string = string_constant (exp1, &index);
6621
6622 /* Try to optimize reads from const strings. */
6623 if (string
6624 && TREE_CODE (string) == STRING_CST
6625 && TREE_CODE (index) == INTEGER_CST
6626 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6627 && GET_MODE_CLASS (mode) == MODE_INT
6628 && GET_MODE_SIZE (mode) == 1
6629 && modifier != EXPAND_WRITE)
6630 return
6631 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6632
6633 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6634 op0 = memory_address (mode, op0);
6635 temp = gen_rtx_MEM (mode, op0);
6636 set_mem_attributes (temp, exp, 0);
6637
6638 /* If we are writing to this object and its type is a record with
6639 readonly fields, we must mark it as readonly so it will
6640 conflict with readonly references to those fields. */
6641 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6642 RTX_UNCHANGING_P (temp) = 1;
6643
6644 return temp;
6645 }
6646
6647 case ARRAY_REF:
6648 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6649 abort ();
6650
6651 {
6652 tree array = TREE_OPERAND (exp, 0);
6653 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6654 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6655 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6656 HOST_WIDE_INT i;
6657
6658 /* Optimize the special-case of a zero lower bound.
6659
6660 We convert the low_bound to sizetype to avoid some problems
6661 with constant folding. (E.g. suppose the lower bound is 1,
6662 and its mode is QI. Without the conversion, (ARRAY
6663 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6664 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6665
6666 if (! integer_zerop (low_bound))
6667 index = size_diffop (index, convert (sizetype, low_bound));
6668
6669 /* Fold an expression like: "foo"[2].
6670 This is not done in fold so it won't happen inside &.
6671 Don't fold if this is for wide characters since it's too
6672 difficult to do correctly and this is a very rare case. */
6673
6674 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6675 && TREE_CODE (array) == STRING_CST
6676 && TREE_CODE (index) == INTEGER_CST
6677 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6678 && GET_MODE_CLASS (mode) == MODE_INT
6679 && GET_MODE_SIZE (mode) == 1)
6680 return
6681 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6682
6683 /* If this is a constant index into a constant array,
6684 just get the value from the array. Handle both the cases when
6685 we have an explicit constructor and when our operand is a variable
6686 that was declared const. */
6687
6688 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6689 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6690 && TREE_CODE (index) == INTEGER_CST
6691 && 0 > compare_tree_int (index,
6692 list_length (CONSTRUCTOR_ELTS
6693 (TREE_OPERAND (exp, 0)))))
6694 {
6695 tree elem;
6696
6697 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6698 i = TREE_INT_CST_LOW (index);
6699 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6700 ;
6701
6702 if (elem)
6703 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6704 modifier);
6705 }
6706
6707 else if (optimize >= 1
6708 && modifier != EXPAND_CONST_ADDRESS
6709 && modifier != EXPAND_INITIALIZER
6710 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6711 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6712 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6713 {
6714 if (TREE_CODE (index) == INTEGER_CST)
6715 {
6716 tree init = DECL_INITIAL (array);
6717
6718 if (TREE_CODE (init) == CONSTRUCTOR)
6719 {
6720 tree elem;
6721
6722 for (elem = CONSTRUCTOR_ELTS (init);
6723 (elem
6724 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6725 elem = TREE_CHAIN (elem))
6726 ;
6727
6728 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6729 return expand_expr (fold (TREE_VALUE (elem)), target,
6730 tmode, modifier);
6731 }
6732 else if (TREE_CODE (init) == STRING_CST
6733 && 0 > compare_tree_int (index,
6734 TREE_STRING_LENGTH (init)))
6735 {
6736 tree type = TREE_TYPE (TREE_TYPE (init));
6737 enum machine_mode mode = TYPE_MODE (type);
6738
6739 if (GET_MODE_CLASS (mode) == MODE_INT
6740 && GET_MODE_SIZE (mode) == 1)
6741 return (GEN_INT
6742 (TREE_STRING_POINTER
6743 (init)[TREE_INT_CST_LOW (index)]));
6744 }
6745 }
6746 }
6747 }
6748 /* Fall through. */
6749
6750 case COMPONENT_REF:
6751 case BIT_FIELD_REF:
6752 case ARRAY_RANGE_REF:
6753 /* If the operand is a CONSTRUCTOR, we can just extract the
6754 appropriate field if it is present. Don't do this if we have
6755 already written the data since we want to refer to that copy
6756 and varasm.c assumes that's what we'll do. */
6757 if (code == COMPONENT_REF
6758 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6759 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6760 {
6761 tree elt;
6762
6763 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6764 elt = TREE_CHAIN (elt))
6765 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6766 /* We can normally use the value of the field in the
6767 CONSTRUCTOR. However, if this is a bitfield in
6768 an integral mode that we can fit in a HOST_WIDE_INT,
6769 we must mask only the number of bits in the bitfield,
6770 since this is done implicitly by the constructor. If
6771 the bitfield does not meet either of those conditions,
6772 we can't do this optimization. */
6773 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6774 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6775 == MODE_INT)
6776 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6777 <= HOST_BITS_PER_WIDE_INT))))
6778 {
6779 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6780 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6781 {
6782 HOST_WIDE_INT bitsize
6783 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6784 enum machine_mode imode
6785 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6786
6787 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6788 {
6789 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6790 op0 = expand_and (imode, op0, op1, target);
6791 }
6792 else
6793 {
6794 tree count
6795 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6796 0);
6797
6798 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6799 target, 0);
6800 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6801 target, 0);
6802 }
6803 }
6804
6805 return op0;
6806 }
6807 }
6808
6809 {
6810 enum machine_mode mode1;
6811 HOST_WIDE_INT bitsize, bitpos;
6812 tree offset;
6813 int volatilep = 0;
6814 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6815 &mode1, &unsignedp, &volatilep);
6816 rtx orig_op0;
6817
6818 /* If we got back the original object, something is wrong. Perhaps
6819 we are evaluating an expression too early. In any event, don't
6820 infinitely recurse. */
6821 if (tem == exp)
6822 abort ();
6823
6824 /* If TEM's type is a union of variable size, pass TARGET to the inner
6825 computation, since it will need a temporary and TARGET is known
6826 to have to do. This occurs in unchecked conversion in Ada. */
6827
6828 orig_op0 = op0
6829 = expand_expr (tem,
6830 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6831 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6832 != INTEGER_CST)
6833 ? target : NULL_RTX),
6834 VOIDmode,
6835 (modifier == EXPAND_INITIALIZER
6836 || modifier == EXPAND_CONST_ADDRESS)
6837 ? modifier : EXPAND_NORMAL);
6838
6839 /* If this is a constant, put it into a register if it is a
6840 legitimate constant and OFFSET is 0 and memory if it isn't. */
6841 if (CONSTANT_P (op0))
6842 {
6843 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6844 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6845 && offset == 0)
6846 op0 = force_reg (mode, op0);
6847 else
6848 op0 = validize_mem (force_const_mem (mode, op0));
6849 }
6850
6851 if (offset != 0)
6852 {
6853 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
6854
6855 /* If this object is in a register, put it into memory.
6856 This case can't occur in C, but can in Ada if we have
6857 unchecked conversion of an expression from a scalar type to
6858 an array or record type. */
6859 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6860 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6861 {
6862 /* If the operand is a SAVE_EXPR, we can deal with this by
6863 forcing the SAVE_EXPR into memory. */
6864 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
6865 {
6866 put_var_into_stack (TREE_OPERAND (exp, 0));
6867 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
6868 }
6869 else
6870 {
6871 tree nt
6872 = build_qualified_type (TREE_TYPE (tem),
6873 (TYPE_QUALS (TREE_TYPE (tem))
6874 | TYPE_QUAL_CONST));
6875 rtx memloc = assign_temp (nt, 1, 1, 1);
6876
6877 emit_move_insn (memloc, op0);
6878 op0 = memloc;
6879 }
6880 }
6881
6882 if (GET_CODE (op0) != MEM)
6883 abort ();
6884
6885 if (GET_MODE (offset_rtx) != ptr_mode)
6886 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6887
6888 #ifdef POINTERS_EXTEND_UNSIGNED
6889 if (GET_MODE (offset_rtx) != Pmode)
6890 offset_rtx = convert_memory_address (Pmode, offset_rtx);
6891 #endif
6892
6893 /* A constant address in OP0 can have VOIDmode, we must not try
6894 to call force_reg for that case. Avoid that case. */
6895 if (GET_CODE (op0) == MEM
6896 && GET_MODE (op0) == BLKmode
6897 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6898 && bitsize != 0
6899 && (bitpos % bitsize) == 0
6900 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6901 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6902 {
6903 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6904 bitpos = 0;
6905 }
6906
6907 op0 = offset_address (op0, offset_rtx,
6908 highest_pow2_factor (offset));
6909 }
6910
6911 /* Don't forget about volatility even if this is a bitfield. */
6912 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6913 {
6914 if (op0 == orig_op0)
6915 op0 = copy_rtx (op0);
6916
6917 MEM_VOLATILE_P (op0) = 1;
6918 }
6919
6920 /* In cases where an aligned union has an unaligned object
6921 as a field, we might be extracting a BLKmode value from
6922 an integer-mode (e.g., SImode) object. Handle this case
6923 by doing the extract into an object as wide as the field
6924 (which we know to be the width of a basic mode), then
6925 storing into memory, and changing the mode to BLKmode. */
6926 if (mode1 == VOIDmode
6927 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6928 || (mode1 != BLKmode && ! direct_load[(int) mode1]
6929 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6930 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
6931 && modifier != EXPAND_CONST_ADDRESS
6932 && modifier != EXPAND_INITIALIZER)
6933 /* If the field isn't aligned enough to fetch as a memref,
6934 fetch it as a bit field. */
6935 || (mode1 != BLKmode
6936 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
6937 && ((TYPE_ALIGN (TREE_TYPE (tem))
6938 < GET_MODE_ALIGNMENT (mode))
6939 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6940 /* If the type and the field are a constant size and the
6941 size of the type isn't the same size as the bitfield,
6942 we must use bitfield operations. */
6943 || (bitsize >= 0
6944 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6945 == INTEGER_CST)
6946 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6947 bitsize)))
6948 {
6949 enum machine_mode ext_mode = mode;
6950
6951 if (ext_mode == BLKmode
6952 && ! (target != 0 && GET_CODE (op0) == MEM
6953 && GET_CODE (target) == MEM
6954 && bitpos % BITS_PER_UNIT == 0))
6955 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6956
6957 if (ext_mode == BLKmode)
6958 {
6959 /* In this case, BITPOS must start at a byte boundary and
6960 TARGET, if specified, must be a MEM. */
6961 if (GET_CODE (op0) != MEM
6962 || (target != 0 && GET_CODE (target) != MEM)
6963 || bitpos % BITS_PER_UNIT != 0)
6964 abort ();
6965
6966 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
6967 if (target == 0)
6968 target = assign_temp (type, 0, 1, 1);
6969
6970 emit_block_move (target, op0,
6971 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6972 / BITS_PER_UNIT));
6973
6974 return target;
6975 }
6976
6977 op0 = validize_mem (op0);
6978
6979 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6980 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
6981
6982 op0 = extract_bit_field (op0, bitsize, bitpos,
6983 unsignedp, target, ext_mode, ext_mode,
6984 int_size_in_bytes (TREE_TYPE (tem)));
6985
6986 /* If the result is a record type and BITSIZE is narrower than
6987 the mode of OP0, an integral mode, and this is a big endian
6988 machine, we must put the field into the high-order bits. */
6989 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6990 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6991 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
6992 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6993 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6994 - bitsize),
6995 op0, 1);
6996
6997 if (mode == BLKmode)
6998 {
6999 rtx new = assign_temp (build_qualified_type
7000 (type_for_mode (ext_mode, 0),
7001 TYPE_QUAL_CONST), 0, 1, 1);
7002
7003 emit_move_insn (new, op0);
7004 op0 = copy_rtx (new);
7005 PUT_MODE (op0, BLKmode);
7006 set_mem_attributes (op0, exp, 1);
7007 }
7008
7009 return op0;
7010 }
7011
7012 /* If the result is BLKmode, use that to access the object
7013 now as well. */
7014 if (mode == BLKmode)
7015 mode1 = BLKmode;
7016
7017 /* Get a reference to just this component. */
7018 if (modifier == EXPAND_CONST_ADDRESS
7019 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7020 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7021 else
7022 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7023
7024 if (op0 == orig_op0)
7025 op0 = copy_rtx (op0);
7026
7027 set_mem_attributes (op0, exp, 0);
7028 if (GET_CODE (XEXP (op0, 0)) == REG)
7029 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7030
7031 MEM_VOLATILE_P (op0) |= volatilep;
7032 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7033 || modifier == EXPAND_CONST_ADDRESS
7034 || modifier == EXPAND_INITIALIZER)
7035 return op0;
7036 else if (target == 0)
7037 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7038
7039 convert_move (target, op0, unsignedp);
7040 return target;
7041 }
7042
7043 case VTABLE_REF:
7044 {
7045 rtx insn, before = get_last_insn (), vtbl_ref;
7046
7047 /* Evaluate the interior expression. */
7048 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7049 tmode, modifier);
7050
7051 /* Get or create an instruction off which to hang a note. */
7052 if (REG_P (subtarget))
7053 {
7054 target = subtarget;
7055 insn = get_last_insn ();
7056 if (insn == before)
7057 abort ();
7058 if (! INSN_P (insn))
7059 insn = prev_nonnote_insn (insn);
7060 }
7061 else
7062 {
7063 target = gen_reg_rtx (GET_MODE (subtarget));
7064 insn = emit_move_insn (target, subtarget);
7065 }
7066
7067 /* Collect the data for the note. */
7068 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7069 vtbl_ref = plus_constant (vtbl_ref,
7070 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7071 /* Discard the initial CONST that was added. */
7072 vtbl_ref = XEXP (vtbl_ref, 0);
7073
7074 REG_NOTES (insn)
7075 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7076
7077 return target;
7078 }
7079
7080 /* Intended for a reference to a buffer of a file-object in Pascal.
7081 But it's not certain that a special tree code will really be
7082 necessary for these. INDIRECT_REF might work for them. */
7083 case BUFFER_REF:
7084 abort ();
7085
7086 case IN_EXPR:
7087 {
7088 /* Pascal set IN expression.
7089
7090 Algorithm:
7091 rlo = set_low - (set_low%bits_per_word);
7092 the_word = set [ (index - rlo)/bits_per_word ];
7093 bit_index = index % bits_per_word;
7094 bitmask = 1 << bit_index;
7095 return !!(the_word & bitmask); */
7096
7097 tree set = TREE_OPERAND (exp, 0);
7098 tree index = TREE_OPERAND (exp, 1);
7099 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7100 tree set_type = TREE_TYPE (set);
7101 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7102 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7103 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7104 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7105 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7106 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7107 rtx setaddr = XEXP (setval, 0);
7108 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7109 rtx rlow;
7110 rtx diff, quo, rem, addr, bit, result;
7111
7112 /* If domain is empty, answer is no. Likewise if index is constant
7113 and out of bounds. */
7114 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7115 && TREE_CODE (set_low_bound) == INTEGER_CST
7116 && tree_int_cst_lt (set_high_bound, set_low_bound))
7117 || (TREE_CODE (index) == INTEGER_CST
7118 && TREE_CODE (set_low_bound) == INTEGER_CST
7119 && tree_int_cst_lt (index, set_low_bound))
7120 || (TREE_CODE (set_high_bound) == INTEGER_CST
7121 && TREE_CODE (index) == INTEGER_CST
7122 && tree_int_cst_lt (set_high_bound, index))))
7123 return const0_rtx;
7124
7125 if (target == 0)
7126 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7127
7128 /* If we get here, we have to generate the code for both cases
7129 (in range and out of range). */
7130
7131 op0 = gen_label_rtx ();
7132 op1 = gen_label_rtx ();
7133
7134 if (! (GET_CODE (index_val) == CONST_INT
7135 && GET_CODE (lo_r) == CONST_INT))
7136 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7137 GET_MODE (index_val), iunsignedp, op1);
7138
7139 if (! (GET_CODE (index_val) == CONST_INT
7140 && GET_CODE (hi_r) == CONST_INT))
7141 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7142 GET_MODE (index_val), iunsignedp, op1);
7143
7144 /* Calculate the element number of bit zero in the first word
7145 of the set. */
7146 if (GET_CODE (lo_r) == CONST_INT)
7147 rlow = GEN_INT (INTVAL (lo_r)
7148 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7149 else
7150 rlow = expand_binop (index_mode, and_optab, lo_r,
7151 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7152 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7153
7154 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7155 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7156
7157 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7158 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7159 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7160 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7161
7162 addr = memory_address (byte_mode,
7163 expand_binop (index_mode, add_optab, diff,
7164 setaddr, NULL_RTX, iunsignedp,
7165 OPTAB_LIB_WIDEN));
7166
7167 /* Extract the bit we want to examine. */
7168 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7169 gen_rtx_MEM (byte_mode, addr),
7170 make_tree (TREE_TYPE (index), rem),
7171 NULL_RTX, 1);
7172 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7173 GET_MODE (target) == byte_mode ? target : 0,
7174 1, OPTAB_LIB_WIDEN);
7175
7176 if (result != target)
7177 convert_move (target, result, 1);
7178
7179 /* Output the code to handle the out-of-range case. */
7180 emit_jump (op0);
7181 emit_label (op1);
7182 emit_move_insn (target, const0_rtx);
7183 emit_label (op0);
7184 return target;
7185 }
7186
7187 case WITH_CLEANUP_EXPR:
7188 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7189 {
7190 WITH_CLEANUP_EXPR_RTL (exp)
7191 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7192 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7193
7194 /* That's it for this cleanup. */
7195 TREE_OPERAND (exp, 1) = 0;
7196 }
7197 return WITH_CLEANUP_EXPR_RTL (exp);
7198
7199 case CLEANUP_POINT_EXPR:
7200 {
7201 /* Start a new binding layer that will keep track of all cleanup
7202 actions to be performed. */
7203 expand_start_bindings (2);
7204
7205 target_temp_slot_level = temp_slot_level;
7206
7207 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7208 /* If we're going to use this value, load it up now. */
7209 if (! ignore)
7210 op0 = force_not_mem (op0);
7211 preserve_temp_slots (op0);
7212 expand_end_bindings (NULL_TREE, 0, 0);
7213 }
7214 return op0;
7215
7216 case CALL_EXPR:
7217 /* Check for a built-in function. */
7218 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7219 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7220 == FUNCTION_DECL)
7221 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7222 {
7223 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7224 == BUILT_IN_FRONTEND)
7225 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7226 else
7227 return expand_builtin (exp, target, subtarget, tmode, ignore);
7228 }
7229
7230 return expand_call (exp, target, ignore);
7231
7232 case NON_LVALUE_EXPR:
7233 case NOP_EXPR:
7234 case CONVERT_EXPR:
7235 case REFERENCE_EXPR:
7236 if (TREE_OPERAND (exp, 0) == error_mark_node)
7237 return const0_rtx;
7238
7239 if (TREE_CODE (type) == UNION_TYPE)
7240 {
7241 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7242
7243 /* If both input and output are BLKmode, this conversion isn't doing
7244 anything except possibly changing memory attribute. */
7245 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7246 {
7247 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7248 modifier);
7249
7250 result = copy_rtx (result);
7251 set_mem_attributes (result, exp, 0);
7252 return result;
7253 }
7254
7255 if (target == 0)
7256 target = assign_temp (type, 0, 1, 1);
7257
7258 if (GET_CODE (target) == MEM)
7259 /* Store data into beginning of memory target. */
7260 store_expr (TREE_OPERAND (exp, 0),
7261 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7262
7263 else if (GET_CODE (target) == REG)
7264 /* Store this field into a union of the proper type. */
7265 store_field (target,
7266 MIN ((int_size_in_bytes (TREE_TYPE
7267 (TREE_OPERAND (exp, 0)))
7268 * BITS_PER_UNIT),
7269 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7270 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7271 VOIDmode, 0, type, 0);
7272 else
7273 abort ();
7274
7275 /* Return the entire union. */
7276 return target;
7277 }
7278
7279 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7280 {
7281 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7282 modifier);
7283
7284 /* If the signedness of the conversion differs and OP0 is
7285 a promoted SUBREG, clear that indication since we now
7286 have to do the proper extension. */
7287 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7288 && GET_CODE (op0) == SUBREG)
7289 SUBREG_PROMOTED_VAR_P (op0) = 0;
7290
7291 return op0;
7292 }
7293
7294 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7295 if (GET_MODE (op0) == mode)
7296 return op0;
7297
7298 /* If OP0 is a constant, just convert it into the proper mode. */
7299 if (CONSTANT_P (op0))
7300 {
7301 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7302 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7303
7304 if (modifier == EXPAND_INITIALIZER)
7305 return simplify_gen_subreg (mode, op0, inner_mode,
7306 subreg_lowpart_offset (mode,
7307 inner_mode));
7308 else
7309 return convert_modes (mode, inner_mode, op0,
7310 TREE_UNSIGNED (inner_type));
7311 }
7312
7313 if (modifier == EXPAND_INITIALIZER)
7314 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7315
7316 if (target == 0)
7317 return
7318 convert_to_mode (mode, op0,
7319 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7320 else
7321 convert_move (target, op0,
7322 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7323 return target;
7324
7325 case VIEW_CONVERT_EXPR:
7326 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7327
7328 /* If the input and output modes are both the same, we are done.
7329 Otherwise, if neither mode is BLKmode and both are within a word, we
7330 can use gen_lowpart. If neither is true, make sure the operand is
7331 in memory and convert the MEM to the new mode. */
7332 if (TYPE_MODE (type) == GET_MODE (op0))
7333 ;
7334 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7335 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7336 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7337 op0 = gen_lowpart (TYPE_MODE (type), op0);
7338 else if (GET_CODE (op0) != MEM)
7339 {
7340 /* If the operand is not a MEM, force it into memory. Since we
7341 are going to be be changing the mode of the MEM, don't call
7342 force_const_mem for constants because we don't allow pool
7343 constants to change mode. */
7344 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7345
7346 if (TREE_ADDRESSABLE (exp))
7347 abort ();
7348
7349 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7350 target
7351 = assign_stack_temp_for_type
7352 (TYPE_MODE (inner_type),
7353 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7354
7355 emit_move_insn (target, op0);
7356 op0 = target;
7357 }
7358
7359 /* At this point, OP0 is in the correct mode. If the output type is such
7360 that the operand is known to be aligned, indicate that it is.
7361 Otherwise, we need only be concerned about alignment for non-BLKmode
7362 results. */
7363 if (GET_CODE (op0) == MEM)
7364 {
7365 op0 = copy_rtx (op0);
7366
7367 if (TYPE_ALIGN_OK (type))
7368 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7369 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7370 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7371 {
7372 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7373 HOST_WIDE_INT temp_size
7374 = MAX (int_size_in_bytes (inner_type),
7375 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7376 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7377 temp_size, 0, type);
7378 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7379
7380 if (TREE_ADDRESSABLE (exp))
7381 abort ();
7382
7383 if (GET_MODE (op0) == BLKmode)
7384 emit_block_move (new_with_op0_mode, op0,
7385 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))));
7386 else
7387 emit_move_insn (new_with_op0_mode, op0);
7388
7389 op0 = new;
7390 }
7391
7392 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7393 }
7394
7395 return op0;
7396
7397 case PLUS_EXPR:
7398 /* We come here from MINUS_EXPR when the second operand is a
7399 constant. */
7400 plus_expr:
7401 this_optab = ! unsignedp && flag_trapv
7402 && (GET_MODE_CLASS (mode) == MODE_INT)
7403 ? addv_optab : add_optab;
7404
7405 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7406 something else, make sure we add the register to the constant and
7407 then to the other thing. This case can occur during strength
7408 reduction and doing it this way will produce better code if the
7409 frame pointer or argument pointer is eliminated.
7410
7411 fold-const.c will ensure that the constant is always in the inner
7412 PLUS_EXPR, so the only case we need to do anything about is if
7413 sp, ap, or fp is our second argument, in which case we must swap
7414 the innermost first argument and our second argument. */
7415
7416 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7417 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7418 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7419 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7420 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7421 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7422 {
7423 tree t = TREE_OPERAND (exp, 1);
7424
7425 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7426 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7427 }
7428
7429 /* If the result is to be ptr_mode and we are adding an integer to
7430 something, we might be forming a constant. So try to use
7431 plus_constant. If it produces a sum and we can't accept it,
7432 use force_operand. This allows P = &ARR[const] to generate
7433 efficient code on machines where a SYMBOL_REF is not a valid
7434 address.
7435
7436 If this is an EXPAND_SUM call, always return the sum. */
7437 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7438 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7439 {
7440 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7441 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7442 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7443 {
7444 rtx constant_part;
7445
7446 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7447 EXPAND_SUM);
7448 /* Use immed_double_const to ensure that the constant is
7449 truncated according to the mode of OP1, then sign extended
7450 to a HOST_WIDE_INT. Using the constant directly can result
7451 in non-canonical RTL in a 64x32 cross compile. */
7452 constant_part
7453 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7454 (HOST_WIDE_INT) 0,
7455 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7456 op1 = plus_constant (op1, INTVAL (constant_part));
7457 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7458 op1 = force_operand (op1, target);
7459 return op1;
7460 }
7461
7462 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7463 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7464 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7465 {
7466 rtx constant_part;
7467
7468 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7469 EXPAND_SUM);
7470 if (! CONSTANT_P (op0))
7471 {
7472 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7473 VOIDmode, modifier);
7474 /* Don't go to both_summands if modifier
7475 says it's not right to return a PLUS. */
7476 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7477 goto binop2;
7478 goto both_summands;
7479 }
7480 /* Use immed_double_const to ensure that the constant is
7481 truncated according to the mode of OP1, then sign extended
7482 to a HOST_WIDE_INT. Using the constant directly can result
7483 in non-canonical RTL in a 64x32 cross compile. */
7484 constant_part
7485 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7486 (HOST_WIDE_INT) 0,
7487 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7488 op0 = plus_constant (op0, INTVAL (constant_part));
7489 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7490 op0 = force_operand (op0, target);
7491 return op0;
7492 }
7493 }
7494
7495 /* No sense saving up arithmetic to be done
7496 if it's all in the wrong mode to form part of an address.
7497 And force_operand won't know whether to sign-extend or
7498 zero-extend. */
7499 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7500 || mode != ptr_mode)
7501 goto binop;
7502
7503 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7504 subtarget = 0;
7505
7506 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7507 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7508
7509 both_summands:
7510 /* Make sure any term that's a sum with a constant comes last. */
7511 if (GET_CODE (op0) == PLUS
7512 && CONSTANT_P (XEXP (op0, 1)))
7513 {
7514 temp = op0;
7515 op0 = op1;
7516 op1 = temp;
7517 }
7518 /* If adding to a sum including a constant,
7519 associate it to put the constant outside. */
7520 if (GET_CODE (op1) == PLUS
7521 && CONSTANT_P (XEXP (op1, 1)))
7522 {
7523 rtx constant_term = const0_rtx;
7524
7525 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7526 if (temp != 0)
7527 op0 = temp;
7528 /* Ensure that MULT comes first if there is one. */
7529 else if (GET_CODE (op0) == MULT)
7530 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7531 else
7532 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7533
7534 /* Let's also eliminate constants from op0 if possible. */
7535 op0 = eliminate_constant_term (op0, &constant_term);
7536
7537 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7538 their sum should be a constant. Form it into OP1, since the
7539 result we want will then be OP0 + OP1. */
7540
7541 temp = simplify_binary_operation (PLUS, mode, constant_term,
7542 XEXP (op1, 1));
7543 if (temp != 0)
7544 op1 = temp;
7545 else
7546 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7547 }
7548
7549 /* Put a constant term last and put a multiplication first. */
7550 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7551 temp = op1, op1 = op0, op0 = temp;
7552
7553 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7554 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7555
7556 case MINUS_EXPR:
7557 /* For initializers, we are allowed to return a MINUS of two
7558 symbolic constants. Here we handle all cases when both operands
7559 are constant. */
7560 /* Handle difference of two symbolic constants,
7561 for the sake of an initializer. */
7562 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7563 && really_constant_p (TREE_OPERAND (exp, 0))
7564 && really_constant_p (TREE_OPERAND (exp, 1)))
7565 {
7566 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7567 modifier);
7568 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7569 modifier);
7570
7571 /* If the last operand is a CONST_INT, use plus_constant of
7572 the negated constant. Else make the MINUS. */
7573 if (GET_CODE (op1) == CONST_INT)
7574 return plus_constant (op0, - INTVAL (op1));
7575 else
7576 return gen_rtx_MINUS (mode, op0, op1);
7577 }
7578 /* Convert A - const to A + (-const). */
7579 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7580 {
7581 tree negated = fold (build1 (NEGATE_EXPR, type,
7582 TREE_OPERAND (exp, 1)));
7583
7584 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7585 /* If we can't negate the constant in TYPE, leave it alone and
7586 expand_binop will negate it for us. We used to try to do it
7587 here in the signed version of TYPE, but that doesn't work
7588 on POINTER_TYPEs. */;
7589 else
7590 {
7591 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7592 goto plus_expr;
7593 }
7594 }
7595 this_optab = ! unsignedp && flag_trapv
7596 && (GET_MODE_CLASS(mode) == MODE_INT)
7597 ? subv_optab : sub_optab;
7598 goto binop;
7599
7600 case MULT_EXPR:
7601 /* If first operand is constant, swap them.
7602 Thus the following special case checks need only
7603 check the second operand. */
7604 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7605 {
7606 tree t1 = TREE_OPERAND (exp, 0);
7607 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7608 TREE_OPERAND (exp, 1) = t1;
7609 }
7610
7611 /* Attempt to return something suitable for generating an
7612 indexed address, for machines that support that. */
7613
7614 if (modifier == EXPAND_SUM && mode == ptr_mode
7615 && host_integerp (TREE_OPERAND (exp, 1), 0))
7616 {
7617 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7618 EXPAND_SUM);
7619
7620 /* If we knew for certain that this is arithmetic for an array
7621 reference, and we knew the bounds of the array, then we could
7622 apply the distributive law across (PLUS X C) for constant C.
7623 Without such knowledge, we risk overflowing the computation
7624 when both X and C are large, but X+C isn't. */
7625 /* ??? Could perhaps special-case EXP being unsigned and C being
7626 positive. In that case we are certain that X+C is no smaller
7627 than X and so the transformed expression will overflow iff the
7628 original would have. */
7629
7630 if (GET_CODE (op0) != REG)
7631 op0 = force_operand (op0, NULL_RTX);
7632 if (GET_CODE (op0) != REG)
7633 op0 = copy_to_mode_reg (mode, op0);
7634
7635 return
7636 gen_rtx_MULT (mode, op0,
7637 GEN_INT (tree_low_cst (TREE_OPERAND (exp, 1), 0)));
7638 }
7639
7640 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7641 subtarget = 0;
7642
7643 /* Check for multiplying things that have been extended
7644 from a narrower type. If this machine supports multiplying
7645 in that narrower type with a result in the desired type,
7646 do it that way, and avoid the explicit type-conversion. */
7647 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7648 && TREE_CODE (type) == INTEGER_TYPE
7649 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7650 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7651 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7652 && int_fits_type_p (TREE_OPERAND (exp, 1),
7653 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7654 /* Don't use a widening multiply if a shift will do. */
7655 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7656 > HOST_BITS_PER_WIDE_INT)
7657 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7658 ||
7659 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7660 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7661 ==
7662 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7663 /* If both operands are extended, they must either both
7664 be zero-extended or both be sign-extended. */
7665 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7666 ==
7667 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7668 {
7669 enum machine_mode innermode
7670 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7671 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7672 ? smul_widen_optab : umul_widen_optab);
7673 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7674 ? umul_widen_optab : smul_widen_optab);
7675 if (mode == GET_MODE_WIDER_MODE (innermode))
7676 {
7677 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7678 {
7679 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7680 NULL_RTX, VOIDmode, 0);
7681 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7682 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7683 VOIDmode, 0);
7684 else
7685 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7686 NULL_RTX, VOIDmode, 0);
7687 goto binop2;
7688 }
7689 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7690 && innermode == word_mode)
7691 {
7692 rtx htem;
7693 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7694 NULL_RTX, VOIDmode, 0);
7695 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7696 op1 = convert_modes (innermode, mode,
7697 expand_expr (TREE_OPERAND (exp, 1),
7698 NULL_RTX, VOIDmode, 0),
7699 unsignedp);
7700 else
7701 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7702 NULL_RTX, VOIDmode, 0);
7703 temp = expand_binop (mode, other_optab, op0, op1, target,
7704 unsignedp, OPTAB_LIB_WIDEN);
7705 htem = expand_mult_highpart_adjust (innermode,
7706 gen_highpart (innermode, temp),
7707 op0, op1,
7708 gen_highpart (innermode, temp),
7709 unsignedp);
7710 emit_move_insn (gen_highpart (innermode, temp), htem);
7711 return temp;
7712 }
7713 }
7714 }
7715 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7716 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7717 return expand_mult (mode, op0, op1, target, unsignedp);
7718
7719 case TRUNC_DIV_EXPR:
7720 case FLOOR_DIV_EXPR:
7721 case CEIL_DIV_EXPR:
7722 case ROUND_DIV_EXPR:
7723 case EXACT_DIV_EXPR:
7724 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7725 subtarget = 0;
7726 /* Possible optimization: compute the dividend with EXPAND_SUM
7727 then if the divisor is constant can optimize the case
7728 where some terms of the dividend have coeffs divisible by it. */
7729 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7730 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7731 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7732
7733 case RDIV_EXPR:
7734 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7735 expensive divide. If not, combine will rebuild the original
7736 computation. */
7737 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7738 && !real_onep (TREE_OPERAND (exp, 0)))
7739 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7740 build (RDIV_EXPR, type,
7741 build_real (type, dconst1),
7742 TREE_OPERAND (exp, 1))),
7743 target, tmode, unsignedp);
7744 this_optab = sdiv_optab;
7745 goto binop;
7746
7747 case TRUNC_MOD_EXPR:
7748 case FLOOR_MOD_EXPR:
7749 case CEIL_MOD_EXPR:
7750 case ROUND_MOD_EXPR:
7751 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7752 subtarget = 0;
7753 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7754 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7755 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7756
7757 case FIX_ROUND_EXPR:
7758 case FIX_FLOOR_EXPR:
7759 case FIX_CEIL_EXPR:
7760 abort (); /* Not used for C. */
7761
7762 case FIX_TRUNC_EXPR:
7763 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7764 if (target == 0)
7765 target = gen_reg_rtx (mode);
7766 expand_fix (target, op0, unsignedp);
7767 return target;
7768
7769 case FLOAT_EXPR:
7770 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7771 if (target == 0)
7772 target = gen_reg_rtx (mode);
7773 /* expand_float can't figure out what to do if FROM has VOIDmode.
7774 So give it the correct mode. With -O, cse will optimize this. */
7775 if (GET_MODE (op0) == VOIDmode)
7776 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7777 op0);
7778 expand_float (target, op0,
7779 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7780 return target;
7781
7782 case NEGATE_EXPR:
7783 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7784 temp = expand_unop (mode,
7785 ! unsignedp && flag_trapv
7786 && (GET_MODE_CLASS(mode) == MODE_INT)
7787 ? negv_optab : neg_optab, op0, target, 0);
7788 if (temp == 0)
7789 abort ();
7790 return temp;
7791
7792 case ABS_EXPR:
7793 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7794
7795 /* Handle complex values specially. */
7796 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7797 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7798 return expand_complex_abs (mode, op0, target, unsignedp);
7799
7800 /* Unsigned abs is simply the operand. Testing here means we don't
7801 risk generating incorrect code below. */
7802 if (TREE_UNSIGNED (type))
7803 return op0;
7804
7805 return expand_abs (mode, op0, target, unsignedp,
7806 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7807
7808 case MAX_EXPR:
7809 case MIN_EXPR:
7810 target = original_target;
7811 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7812 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7813 || GET_MODE (target) != mode
7814 || (GET_CODE (target) == REG
7815 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7816 target = gen_reg_rtx (mode);
7817 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7818 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7819
7820 /* First try to do it with a special MIN or MAX instruction.
7821 If that does not win, use a conditional jump to select the proper
7822 value. */
7823 this_optab = (TREE_UNSIGNED (type)
7824 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7825 : (code == MIN_EXPR ? smin_optab : smax_optab));
7826
7827 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7828 OPTAB_WIDEN);
7829 if (temp != 0)
7830 return temp;
7831
7832 /* At this point, a MEM target is no longer useful; we will get better
7833 code without it. */
7834
7835 if (GET_CODE (target) == MEM)
7836 target = gen_reg_rtx (mode);
7837
7838 if (target != op0)
7839 emit_move_insn (target, op0);
7840
7841 op0 = gen_label_rtx ();
7842
7843 /* If this mode is an integer too wide to compare properly,
7844 compare word by word. Rely on cse to optimize constant cases. */
7845 if (GET_MODE_CLASS (mode) == MODE_INT
7846 && ! can_compare_p (GE, mode, ccp_jump))
7847 {
7848 if (code == MAX_EXPR)
7849 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7850 target, op1, NULL_RTX, op0);
7851 else
7852 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7853 op1, target, NULL_RTX, op0);
7854 }
7855 else
7856 {
7857 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7858 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7859 unsignedp, mode, NULL_RTX, NULL_RTX,
7860 op0);
7861 }
7862 emit_move_insn (target, op1);
7863 emit_label (op0);
7864 return target;
7865
7866 case BIT_NOT_EXPR:
7867 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7868 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7869 if (temp == 0)
7870 abort ();
7871 return temp;
7872
7873 case FFS_EXPR:
7874 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7875 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7876 if (temp == 0)
7877 abort ();
7878 return temp;
7879
7880 /* ??? Can optimize bitwise operations with one arg constant.
7881 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7882 and (a bitwise1 b) bitwise2 b (etc)
7883 but that is probably not worth while. */
7884
7885 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7886 boolean values when we want in all cases to compute both of them. In
7887 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7888 as actual zero-or-1 values and then bitwise anding. In cases where
7889 there cannot be any side effects, better code would be made by
7890 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7891 how to recognize those cases. */
7892
7893 case TRUTH_AND_EXPR:
7894 case BIT_AND_EXPR:
7895 this_optab = and_optab;
7896 goto binop;
7897
7898 case TRUTH_OR_EXPR:
7899 case BIT_IOR_EXPR:
7900 this_optab = ior_optab;
7901 goto binop;
7902
7903 case TRUTH_XOR_EXPR:
7904 case BIT_XOR_EXPR:
7905 this_optab = xor_optab;
7906 goto binop;
7907
7908 case LSHIFT_EXPR:
7909 case RSHIFT_EXPR:
7910 case LROTATE_EXPR:
7911 case RROTATE_EXPR:
7912 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7913 subtarget = 0;
7914 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7915 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7916 unsignedp);
7917
7918 /* Could determine the answer when only additive constants differ. Also,
7919 the addition of one can be handled by changing the condition. */
7920 case LT_EXPR:
7921 case LE_EXPR:
7922 case GT_EXPR:
7923 case GE_EXPR:
7924 case EQ_EXPR:
7925 case NE_EXPR:
7926 case UNORDERED_EXPR:
7927 case ORDERED_EXPR:
7928 case UNLT_EXPR:
7929 case UNLE_EXPR:
7930 case UNGT_EXPR:
7931 case UNGE_EXPR:
7932 case UNEQ_EXPR:
7933 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7934 if (temp != 0)
7935 return temp;
7936
7937 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7938 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7939 && original_target
7940 && GET_CODE (original_target) == REG
7941 && (GET_MODE (original_target)
7942 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7943 {
7944 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7945 VOIDmode, 0);
7946
7947 if (temp != original_target)
7948 temp = copy_to_reg (temp);
7949
7950 op1 = gen_label_rtx ();
7951 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7952 GET_MODE (temp), unsignedp, op1);
7953 emit_move_insn (temp, const1_rtx);
7954 emit_label (op1);
7955 return temp;
7956 }
7957
7958 /* If no set-flag instruction, must generate a conditional
7959 store into a temporary variable. Drop through
7960 and handle this like && and ||. */
7961
7962 case TRUTH_ANDIF_EXPR:
7963 case TRUTH_ORIF_EXPR:
7964 if (! ignore
7965 && (target == 0 || ! safe_from_p (target, exp, 1)
7966 /* Make sure we don't have a hard reg (such as function's return
7967 value) live across basic blocks, if not optimizing. */
7968 || (!optimize && GET_CODE (target) == REG
7969 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7970 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7971
7972 if (target)
7973 emit_clr_insn (target);
7974
7975 op1 = gen_label_rtx ();
7976 jumpifnot (exp, op1);
7977
7978 if (target)
7979 emit_0_to_1_insn (target);
7980
7981 emit_label (op1);
7982 return ignore ? const0_rtx : target;
7983
7984 case TRUTH_NOT_EXPR:
7985 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7986 /* The parser is careful to generate TRUTH_NOT_EXPR
7987 only with operands that are always zero or one. */
7988 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7989 target, 1, OPTAB_LIB_WIDEN);
7990 if (temp == 0)
7991 abort ();
7992 return temp;
7993
7994 case COMPOUND_EXPR:
7995 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7996 emit_queue ();
7997 return expand_expr (TREE_OPERAND (exp, 1),
7998 (ignore ? const0_rtx : target),
7999 VOIDmode, 0);
8000
8001 case COND_EXPR:
8002 /* If we would have a "singleton" (see below) were it not for a
8003 conversion in each arm, bring that conversion back out. */
8004 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8005 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8006 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8007 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8008 {
8009 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8010 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8011
8012 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8013 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8014 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8015 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8016 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8017 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8018 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8019 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8020 return expand_expr (build1 (NOP_EXPR, type,
8021 build (COND_EXPR, TREE_TYPE (iftrue),
8022 TREE_OPERAND (exp, 0),
8023 iftrue, iffalse)),
8024 target, tmode, modifier);
8025 }
8026
8027 {
8028 /* Note that COND_EXPRs whose type is a structure or union
8029 are required to be constructed to contain assignments of
8030 a temporary variable, so that we can evaluate them here
8031 for side effect only. If type is void, we must do likewise. */
8032
8033 /* If an arm of the branch requires a cleanup,
8034 only that cleanup is performed. */
8035
8036 tree singleton = 0;
8037 tree binary_op = 0, unary_op = 0;
8038
8039 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8040 convert it to our mode, if necessary. */
8041 if (integer_onep (TREE_OPERAND (exp, 1))
8042 && integer_zerop (TREE_OPERAND (exp, 2))
8043 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8044 {
8045 if (ignore)
8046 {
8047 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8048 modifier);
8049 return const0_rtx;
8050 }
8051
8052 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8053 if (GET_MODE (op0) == mode)
8054 return op0;
8055
8056 if (target == 0)
8057 target = gen_reg_rtx (mode);
8058 convert_move (target, op0, unsignedp);
8059 return target;
8060 }
8061
8062 /* Check for X ? A + B : A. If we have this, we can copy A to the
8063 output and conditionally add B. Similarly for unary operations.
8064 Don't do this if X has side-effects because those side effects
8065 might affect A or B and the "?" operation is a sequence point in
8066 ANSI. (operand_equal_p tests for side effects.) */
8067
8068 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8069 && operand_equal_p (TREE_OPERAND (exp, 2),
8070 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8071 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8072 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8073 && operand_equal_p (TREE_OPERAND (exp, 1),
8074 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8075 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8076 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8077 && operand_equal_p (TREE_OPERAND (exp, 2),
8078 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8079 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8080 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8081 && operand_equal_p (TREE_OPERAND (exp, 1),
8082 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8083 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8084
8085 /* If we are not to produce a result, we have no target. Otherwise,
8086 if a target was specified use it; it will not be used as an
8087 intermediate target unless it is safe. If no target, use a
8088 temporary. */
8089
8090 if (ignore)
8091 temp = 0;
8092 else if (original_target
8093 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8094 || (singleton && GET_CODE (original_target) == REG
8095 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8096 && original_target == var_rtx (singleton)))
8097 && GET_MODE (original_target) == mode
8098 #ifdef HAVE_conditional_move
8099 && (! can_conditionally_move_p (mode)
8100 || GET_CODE (original_target) == REG
8101 || TREE_ADDRESSABLE (type))
8102 #endif
8103 && (GET_CODE (original_target) != MEM
8104 || TREE_ADDRESSABLE (type)))
8105 temp = original_target;
8106 else if (TREE_ADDRESSABLE (type))
8107 abort ();
8108 else
8109 temp = assign_temp (type, 0, 0, 1);
8110
8111 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8112 do the test of X as a store-flag operation, do this as
8113 A + ((X != 0) << log C). Similarly for other simple binary
8114 operators. Only do for C == 1 if BRANCH_COST is low. */
8115 if (temp && singleton && binary_op
8116 && (TREE_CODE (binary_op) == PLUS_EXPR
8117 || TREE_CODE (binary_op) == MINUS_EXPR
8118 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8119 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8120 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8121 : integer_onep (TREE_OPERAND (binary_op, 1)))
8122 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8123 {
8124 rtx result;
8125 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8126 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8127 ? addv_optab : add_optab)
8128 : TREE_CODE (binary_op) == MINUS_EXPR
8129 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8130 ? subv_optab : sub_optab)
8131 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8132 : xor_optab);
8133
8134 /* If we had X ? A : A + 1, do this as A + (X == 0).
8135
8136 We have to invert the truth value here and then put it
8137 back later if do_store_flag fails. We cannot simply copy
8138 TREE_OPERAND (exp, 0) to another variable and modify that
8139 because invert_truthvalue can modify the tree pointed to
8140 by its argument. */
8141 if (singleton == TREE_OPERAND (exp, 1))
8142 TREE_OPERAND (exp, 0)
8143 = invert_truthvalue (TREE_OPERAND (exp, 0));
8144
8145 result = do_store_flag (TREE_OPERAND (exp, 0),
8146 (safe_from_p (temp, singleton, 1)
8147 ? temp : NULL_RTX),
8148 mode, BRANCH_COST <= 1);
8149
8150 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8151 result = expand_shift (LSHIFT_EXPR, mode, result,
8152 build_int_2 (tree_log2
8153 (TREE_OPERAND
8154 (binary_op, 1)),
8155 0),
8156 (safe_from_p (temp, singleton, 1)
8157 ? temp : NULL_RTX), 0);
8158
8159 if (result)
8160 {
8161 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8162 return expand_binop (mode, boptab, op1, result, temp,
8163 unsignedp, OPTAB_LIB_WIDEN);
8164 }
8165 else if (singleton == TREE_OPERAND (exp, 1))
8166 TREE_OPERAND (exp, 0)
8167 = invert_truthvalue (TREE_OPERAND (exp, 0));
8168 }
8169
8170 do_pending_stack_adjust ();
8171 NO_DEFER_POP;
8172 op0 = gen_label_rtx ();
8173
8174 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8175 {
8176 if (temp != 0)
8177 {
8178 /* If the target conflicts with the other operand of the
8179 binary op, we can't use it. Also, we can't use the target
8180 if it is a hard register, because evaluating the condition
8181 might clobber it. */
8182 if ((binary_op
8183 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8184 || (GET_CODE (temp) == REG
8185 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8186 temp = gen_reg_rtx (mode);
8187 store_expr (singleton, temp, 0);
8188 }
8189 else
8190 expand_expr (singleton,
8191 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8192 if (singleton == TREE_OPERAND (exp, 1))
8193 jumpif (TREE_OPERAND (exp, 0), op0);
8194 else
8195 jumpifnot (TREE_OPERAND (exp, 0), op0);
8196
8197 start_cleanup_deferral ();
8198 if (binary_op && temp == 0)
8199 /* Just touch the other operand. */
8200 expand_expr (TREE_OPERAND (binary_op, 1),
8201 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8202 else if (binary_op)
8203 store_expr (build (TREE_CODE (binary_op), type,
8204 make_tree (type, temp),
8205 TREE_OPERAND (binary_op, 1)),
8206 temp, 0);
8207 else
8208 store_expr (build1 (TREE_CODE (unary_op), type,
8209 make_tree (type, temp)),
8210 temp, 0);
8211 op1 = op0;
8212 }
8213 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8214 comparison operator. If we have one of these cases, set the
8215 output to A, branch on A (cse will merge these two references),
8216 then set the output to FOO. */
8217 else if (temp
8218 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8219 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8220 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8221 TREE_OPERAND (exp, 1), 0)
8222 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8223 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8224 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8225 {
8226 if (GET_CODE (temp) == REG
8227 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8228 temp = gen_reg_rtx (mode);
8229 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8230 jumpif (TREE_OPERAND (exp, 0), op0);
8231
8232 start_cleanup_deferral ();
8233 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8234 op1 = op0;
8235 }
8236 else if (temp
8237 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8238 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8239 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8240 TREE_OPERAND (exp, 2), 0)
8241 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8242 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8243 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8244 {
8245 if (GET_CODE (temp) == REG
8246 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8247 temp = gen_reg_rtx (mode);
8248 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8249 jumpifnot (TREE_OPERAND (exp, 0), op0);
8250
8251 start_cleanup_deferral ();
8252 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8253 op1 = op0;
8254 }
8255 else
8256 {
8257 op1 = gen_label_rtx ();
8258 jumpifnot (TREE_OPERAND (exp, 0), op0);
8259
8260 start_cleanup_deferral ();
8261
8262 /* One branch of the cond can be void, if it never returns. For
8263 example A ? throw : E */
8264 if (temp != 0
8265 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8266 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8267 else
8268 expand_expr (TREE_OPERAND (exp, 1),
8269 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8270 end_cleanup_deferral ();
8271 emit_queue ();
8272 emit_jump_insn (gen_jump (op1));
8273 emit_barrier ();
8274 emit_label (op0);
8275 start_cleanup_deferral ();
8276 if (temp != 0
8277 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8278 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8279 else
8280 expand_expr (TREE_OPERAND (exp, 2),
8281 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8282 }
8283
8284 end_cleanup_deferral ();
8285
8286 emit_queue ();
8287 emit_label (op1);
8288 OK_DEFER_POP;
8289
8290 return temp;
8291 }
8292
8293 case TARGET_EXPR:
8294 {
8295 /* Something needs to be initialized, but we didn't know
8296 where that thing was when building the tree. For example,
8297 it could be the return value of a function, or a parameter
8298 to a function which lays down in the stack, or a temporary
8299 variable which must be passed by reference.
8300
8301 We guarantee that the expression will either be constructed
8302 or copied into our original target. */
8303
8304 tree slot = TREE_OPERAND (exp, 0);
8305 tree cleanups = NULL_TREE;
8306 tree exp1;
8307
8308 if (TREE_CODE (slot) != VAR_DECL)
8309 abort ();
8310
8311 if (! ignore)
8312 target = original_target;
8313
8314 /* Set this here so that if we get a target that refers to a
8315 register variable that's already been used, put_reg_into_stack
8316 knows that it should fix up those uses. */
8317 TREE_USED (slot) = 1;
8318
8319 if (target == 0)
8320 {
8321 if (DECL_RTL_SET_P (slot))
8322 {
8323 target = DECL_RTL (slot);
8324 /* If we have already expanded the slot, so don't do
8325 it again. (mrs) */
8326 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8327 return target;
8328 }
8329 else
8330 {
8331 target = assign_temp (type, 2, 0, 1);
8332 /* All temp slots at this level must not conflict. */
8333 preserve_temp_slots (target);
8334 SET_DECL_RTL (slot, target);
8335 if (TREE_ADDRESSABLE (slot))
8336 put_var_into_stack (slot);
8337
8338 /* Since SLOT is not known to the called function
8339 to belong to its stack frame, we must build an explicit
8340 cleanup. This case occurs when we must build up a reference
8341 to pass the reference as an argument. In this case,
8342 it is very likely that such a reference need not be
8343 built here. */
8344
8345 if (TREE_OPERAND (exp, 2) == 0)
8346 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8347 cleanups = TREE_OPERAND (exp, 2);
8348 }
8349 }
8350 else
8351 {
8352 /* This case does occur, when expanding a parameter which
8353 needs to be constructed on the stack. The target
8354 is the actual stack address that we want to initialize.
8355 The function we call will perform the cleanup in this case. */
8356
8357 /* If we have already assigned it space, use that space,
8358 not target that we were passed in, as our target
8359 parameter is only a hint. */
8360 if (DECL_RTL_SET_P (slot))
8361 {
8362 target = DECL_RTL (slot);
8363 /* If we have already expanded the slot, so don't do
8364 it again. (mrs) */
8365 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8366 return target;
8367 }
8368 else
8369 {
8370 SET_DECL_RTL (slot, target);
8371 /* If we must have an addressable slot, then make sure that
8372 the RTL that we just stored in slot is OK. */
8373 if (TREE_ADDRESSABLE (slot))
8374 put_var_into_stack (slot);
8375 }
8376 }
8377
8378 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8379 /* Mark it as expanded. */
8380 TREE_OPERAND (exp, 1) = NULL_TREE;
8381
8382 store_expr (exp1, target, 0);
8383
8384 expand_decl_cleanup (NULL_TREE, cleanups);
8385
8386 return target;
8387 }
8388
8389 case INIT_EXPR:
8390 {
8391 tree lhs = TREE_OPERAND (exp, 0);
8392 tree rhs = TREE_OPERAND (exp, 1);
8393
8394 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8395 return temp;
8396 }
8397
8398 case MODIFY_EXPR:
8399 {
8400 /* If lhs is complex, expand calls in rhs before computing it.
8401 That's so we don't compute a pointer and save it over a
8402 call. If lhs is simple, compute it first so we can give it
8403 as a target if the rhs is just a call. This avoids an
8404 extra temp and copy and that prevents a partial-subsumption
8405 which makes bad code. Actually we could treat
8406 component_ref's of vars like vars. */
8407
8408 tree lhs = TREE_OPERAND (exp, 0);
8409 tree rhs = TREE_OPERAND (exp, 1);
8410
8411 temp = 0;
8412
8413 /* Check for |= or &= of a bitfield of size one into another bitfield
8414 of size 1. In this case, (unless we need the result of the
8415 assignment) we can do this more efficiently with a
8416 test followed by an assignment, if necessary.
8417
8418 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8419 things change so we do, this code should be enhanced to
8420 support it. */
8421 if (ignore
8422 && TREE_CODE (lhs) == COMPONENT_REF
8423 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8424 || TREE_CODE (rhs) == BIT_AND_EXPR)
8425 && TREE_OPERAND (rhs, 0) == lhs
8426 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8427 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8428 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8429 {
8430 rtx label = gen_label_rtx ();
8431
8432 do_jump (TREE_OPERAND (rhs, 1),
8433 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8434 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8435 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8436 (TREE_CODE (rhs) == BIT_IOR_EXPR
8437 ? integer_one_node
8438 : integer_zero_node)),
8439 0, 0);
8440 do_pending_stack_adjust ();
8441 emit_label (label);
8442 return const0_rtx;
8443 }
8444
8445 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8446
8447 return temp;
8448 }
8449
8450 case RETURN_EXPR:
8451 if (!TREE_OPERAND (exp, 0))
8452 expand_null_return ();
8453 else
8454 expand_return (TREE_OPERAND (exp, 0));
8455 return const0_rtx;
8456
8457 case PREINCREMENT_EXPR:
8458 case PREDECREMENT_EXPR:
8459 return expand_increment (exp, 0, ignore);
8460
8461 case POSTINCREMENT_EXPR:
8462 case POSTDECREMENT_EXPR:
8463 /* Faster to treat as pre-increment if result is not used. */
8464 return expand_increment (exp, ! ignore, ignore);
8465
8466 case ADDR_EXPR:
8467 /* Are we taking the address of a nested function? */
8468 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8469 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8470 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8471 && ! TREE_STATIC (exp))
8472 {
8473 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8474 op0 = force_operand (op0, target);
8475 }
8476 /* If we are taking the address of something erroneous, just
8477 return a zero. */
8478 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8479 return const0_rtx;
8480 /* If we are taking the address of a constant and are at the
8481 top level, we have to use output_constant_def since we can't
8482 call force_const_mem at top level. */
8483 else if (cfun == 0
8484 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8485 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8486 == 'c')))
8487 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8488 else
8489 {
8490 /* We make sure to pass const0_rtx down if we came in with
8491 ignore set, to avoid doing the cleanups twice for something. */
8492 op0 = expand_expr (TREE_OPERAND (exp, 0),
8493 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8494 (modifier == EXPAND_INITIALIZER
8495 ? modifier : EXPAND_CONST_ADDRESS));
8496
8497 /* If we are going to ignore the result, OP0 will have been set
8498 to const0_rtx, so just return it. Don't get confused and
8499 think we are taking the address of the constant. */
8500 if (ignore)
8501 return op0;
8502
8503 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8504 clever and returns a REG when given a MEM. */
8505 op0 = protect_from_queue (op0, 1);
8506
8507 /* We would like the object in memory. If it is a constant, we can
8508 have it be statically allocated into memory. For a non-constant,
8509 we need to allocate some memory and store the value into it. */
8510
8511 if (CONSTANT_P (op0))
8512 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8513 op0);
8514 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8515 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8516 || GET_CODE (op0) == PARALLEL)
8517 {
8518 /* If the operand is a SAVE_EXPR, we can deal with this by
8519 forcing the SAVE_EXPR into memory. */
8520 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8521 {
8522 put_var_into_stack (TREE_OPERAND (exp, 0));
8523 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8524 }
8525 else
8526 {
8527 /* If this object is in a register, it can't be BLKmode. */
8528 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8529 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8530
8531 if (GET_CODE (op0) == PARALLEL)
8532 /* Handle calls that pass values in multiple
8533 non-contiguous locations. The Irix 6 ABI has examples
8534 of this. */
8535 emit_group_store (memloc, op0,
8536 int_size_in_bytes (inner_type));
8537 else
8538 emit_move_insn (memloc, op0);
8539
8540 op0 = memloc;
8541 }
8542 }
8543
8544 if (GET_CODE (op0) != MEM)
8545 abort ();
8546
8547 mark_temp_addr_taken (op0);
8548 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8549 {
8550 op0 = XEXP (op0, 0);
8551 #ifdef POINTERS_EXTEND_UNSIGNED
8552 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8553 && mode == ptr_mode)
8554 op0 = convert_memory_address (ptr_mode, op0);
8555 #endif
8556 return op0;
8557 }
8558
8559 /* If OP0 is not aligned as least as much as the type requires, we
8560 need to make a temporary, copy OP0 to it, and take the address of
8561 the temporary. We want to use the alignment of the type, not of
8562 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8563 the test for BLKmode means that can't happen. The test for
8564 BLKmode is because we never make mis-aligned MEMs with
8565 non-BLKmode.
8566
8567 We don't need to do this at all if the machine doesn't have
8568 strict alignment. */
8569 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8570 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8571 > MEM_ALIGN (op0))
8572 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8573 {
8574 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8575 rtx new
8576 = assign_stack_temp_for_type
8577 (TYPE_MODE (inner_type),
8578 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8579 : int_size_in_bytes (inner_type),
8580 1, build_qualified_type (inner_type,
8581 (TYPE_QUALS (inner_type)
8582 | TYPE_QUAL_CONST)));
8583
8584 if (TYPE_ALIGN_OK (inner_type))
8585 abort ();
8586
8587 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
8588 op0 = new;
8589 }
8590
8591 op0 = force_operand (XEXP (op0, 0), target);
8592 }
8593
8594 if (flag_force_addr
8595 && GET_CODE (op0) != REG
8596 && modifier != EXPAND_CONST_ADDRESS
8597 && modifier != EXPAND_INITIALIZER
8598 && modifier != EXPAND_SUM)
8599 op0 = force_reg (Pmode, op0);
8600
8601 if (GET_CODE (op0) == REG
8602 && ! REG_USERVAR_P (op0))
8603 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8604
8605 #ifdef POINTERS_EXTEND_UNSIGNED
8606 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8607 && mode == ptr_mode)
8608 op0 = convert_memory_address (ptr_mode, op0);
8609 #endif
8610
8611 return op0;
8612
8613 case ENTRY_VALUE_EXPR:
8614 abort ();
8615
8616 /* COMPLEX type for Extended Pascal & Fortran */
8617 case COMPLEX_EXPR:
8618 {
8619 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8620 rtx insns;
8621
8622 /* Get the rtx code of the operands. */
8623 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8624 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8625
8626 if (! target)
8627 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8628
8629 start_sequence ();
8630
8631 /* Move the real (op0) and imaginary (op1) parts to their location. */
8632 emit_move_insn (gen_realpart (mode, target), op0);
8633 emit_move_insn (gen_imagpart (mode, target), op1);
8634
8635 insns = get_insns ();
8636 end_sequence ();
8637
8638 /* Complex construction should appear as a single unit. */
8639 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8640 each with a separate pseudo as destination.
8641 It's not correct for flow to treat them as a unit. */
8642 if (GET_CODE (target) != CONCAT)
8643 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8644 else
8645 emit_insns (insns);
8646
8647 return target;
8648 }
8649
8650 case REALPART_EXPR:
8651 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8652 return gen_realpart (mode, op0);
8653
8654 case IMAGPART_EXPR:
8655 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8656 return gen_imagpart (mode, op0);
8657
8658 case CONJ_EXPR:
8659 {
8660 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8661 rtx imag_t;
8662 rtx insns;
8663
8664 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8665
8666 if (! target)
8667 target = gen_reg_rtx (mode);
8668
8669 start_sequence ();
8670
8671 /* Store the realpart and the negated imagpart to target. */
8672 emit_move_insn (gen_realpart (partmode, target),
8673 gen_realpart (partmode, op0));
8674
8675 imag_t = gen_imagpart (partmode, target);
8676 temp = expand_unop (partmode,
8677 ! unsignedp && flag_trapv
8678 && (GET_MODE_CLASS(partmode) == MODE_INT)
8679 ? negv_optab : neg_optab,
8680 gen_imagpart (partmode, op0), imag_t, 0);
8681 if (temp != imag_t)
8682 emit_move_insn (imag_t, temp);
8683
8684 insns = get_insns ();
8685 end_sequence ();
8686
8687 /* Conjugate should appear as a single unit
8688 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8689 each with a separate pseudo as destination.
8690 It's not correct for flow to treat them as a unit. */
8691 if (GET_CODE (target) != CONCAT)
8692 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8693 else
8694 emit_insns (insns);
8695
8696 return target;
8697 }
8698
8699 case TRY_CATCH_EXPR:
8700 {
8701 tree handler = TREE_OPERAND (exp, 1);
8702
8703 expand_eh_region_start ();
8704
8705 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8706
8707 expand_eh_region_end_cleanup (handler);
8708
8709 return op0;
8710 }
8711
8712 case TRY_FINALLY_EXPR:
8713 {
8714 tree try_block = TREE_OPERAND (exp, 0);
8715 tree finally_block = TREE_OPERAND (exp, 1);
8716 rtx finally_label = gen_label_rtx ();
8717 rtx done_label = gen_label_rtx ();
8718 rtx return_link = gen_reg_rtx (Pmode);
8719 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8720 (tree) finally_label, (tree) return_link);
8721 TREE_SIDE_EFFECTS (cleanup) = 1;
8722
8723 /* Start a new binding layer that will keep track of all cleanup
8724 actions to be performed. */
8725 expand_start_bindings (2);
8726
8727 target_temp_slot_level = temp_slot_level;
8728
8729 expand_decl_cleanup (NULL_TREE, cleanup);
8730 op0 = expand_expr (try_block, target, tmode, modifier);
8731
8732 preserve_temp_slots (op0);
8733 expand_end_bindings (NULL_TREE, 0, 0);
8734 emit_jump (done_label);
8735 emit_label (finally_label);
8736 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8737 emit_indirect_jump (return_link);
8738 emit_label (done_label);
8739 return op0;
8740 }
8741
8742 case GOTO_SUBROUTINE_EXPR:
8743 {
8744 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8745 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8746 rtx return_address = gen_label_rtx ();
8747 emit_move_insn (return_link,
8748 gen_rtx_LABEL_REF (Pmode, return_address));
8749 emit_jump (subr);
8750 emit_label (return_address);
8751 return const0_rtx;
8752 }
8753
8754 case VA_ARG_EXPR:
8755 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8756
8757 case EXC_PTR_EXPR:
8758 return get_exception_pointer (cfun);
8759
8760 case FDESC_EXPR:
8761 /* Function descriptors are not valid except for as
8762 initialization constants, and should not be expanded. */
8763 abort ();
8764
8765 default:
8766 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8767 }
8768
8769 /* Here to do an ordinary binary operator, generating an instruction
8770 from the optab already placed in `this_optab'. */
8771 binop:
8772 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8773 subtarget = 0;
8774 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8775 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8776 binop2:
8777 temp = expand_binop (mode, this_optab, op0, op1, target,
8778 unsignedp, OPTAB_LIB_WIDEN);
8779 if (temp == 0)
8780 abort ();
8781 return temp;
8782 }
8783 \f
8784 /* Return the tree node if a ARG corresponds to a string constant or zero
8785 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8786 in bytes within the string that ARG is accessing. The type of the
8787 offset will be `sizetype'. */
8788
8789 tree
8790 string_constant (arg, ptr_offset)
8791 tree arg;
8792 tree *ptr_offset;
8793 {
8794 STRIP_NOPS (arg);
8795
8796 if (TREE_CODE (arg) == ADDR_EXPR
8797 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8798 {
8799 *ptr_offset = size_zero_node;
8800 return TREE_OPERAND (arg, 0);
8801 }
8802 else if (TREE_CODE (arg) == PLUS_EXPR)
8803 {
8804 tree arg0 = TREE_OPERAND (arg, 0);
8805 tree arg1 = TREE_OPERAND (arg, 1);
8806
8807 STRIP_NOPS (arg0);
8808 STRIP_NOPS (arg1);
8809
8810 if (TREE_CODE (arg0) == ADDR_EXPR
8811 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8812 {
8813 *ptr_offset = convert (sizetype, arg1);
8814 return TREE_OPERAND (arg0, 0);
8815 }
8816 else if (TREE_CODE (arg1) == ADDR_EXPR
8817 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8818 {
8819 *ptr_offset = convert (sizetype, arg0);
8820 return TREE_OPERAND (arg1, 0);
8821 }
8822 }
8823
8824 return 0;
8825 }
8826 \f
8827 /* Expand code for a post- or pre- increment or decrement
8828 and return the RTX for the result.
8829 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8830
8831 static rtx
8832 expand_increment (exp, post, ignore)
8833 tree exp;
8834 int post, ignore;
8835 {
8836 rtx op0, op1;
8837 rtx temp, value;
8838 tree incremented = TREE_OPERAND (exp, 0);
8839 optab this_optab = add_optab;
8840 int icode;
8841 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8842 int op0_is_copy = 0;
8843 int single_insn = 0;
8844 /* 1 means we can't store into OP0 directly,
8845 because it is a subreg narrower than a word,
8846 and we don't dare clobber the rest of the word. */
8847 int bad_subreg = 0;
8848
8849 /* Stabilize any component ref that might need to be
8850 evaluated more than once below. */
8851 if (!post
8852 || TREE_CODE (incremented) == BIT_FIELD_REF
8853 || (TREE_CODE (incremented) == COMPONENT_REF
8854 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8855 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8856 incremented = stabilize_reference (incremented);
8857 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8858 ones into save exprs so that they don't accidentally get evaluated
8859 more than once by the code below. */
8860 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8861 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8862 incremented = save_expr (incremented);
8863
8864 /* Compute the operands as RTX.
8865 Note whether OP0 is the actual lvalue or a copy of it:
8866 I believe it is a copy iff it is a register or subreg
8867 and insns were generated in computing it. */
8868
8869 temp = get_last_insn ();
8870 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
8871
8872 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8873 in place but instead must do sign- or zero-extension during assignment,
8874 so we copy it into a new register and let the code below use it as
8875 a copy.
8876
8877 Note that we can safely modify this SUBREG since it is know not to be
8878 shared (it was made by the expand_expr call above). */
8879
8880 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8881 {
8882 if (post)
8883 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8884 else
8885 bad_subreg = 1;
8886 }
8887 else if (GET_CODE (op0) == SUBREG
8888 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8889 {
8890 /* We cannot increment this SUBREG in place. If we are
8891 post-incrementing, get a copy of the old value. Otherwise,
8892 just mark that we cannot increment in place. */
8893 if (post)
8894 op0 = copy_to_reg (op0);
8895 else
8896 bad_subreg = 1;
8897 }
8898
8899 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8900 && temp != get_last_insn ());
8901 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8902
8903 /* Decide whether incrementing or decrementing. */
8904 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8905 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8906 this_optab = sub_optab;
8907
8908 /* Convert decrement by a constant into a negative increment. */
8909 if (this_optab == sub_optab
8910 && GET_CODE (op1) == CONST_INT)
8911 {
8912 op1 = GEN_INT (-INTVAL (op1));
8913 this_optab = add_optab;
8914 }
8915
8916 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
8917 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
8918
8919 /* For a preincrement, see if we can do this with a single instruction. */
8920 if (!post)
8921 {
8922 icode = (int) this_optab->handlers[(int) mode].insn_code;
8923 if (icode != (int) CODE_FOR_nothing
8924 /* Make sure that OP0 is valid for operands 0 and 1
8925 of the insn we want to queue. */
8926 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8927 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8928 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8929 single_insn = 1;
8930 }
8931
8932 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8933 then we cannot just increment OP0. We must therefore contrive to
8934 increment the original value. Then, for postincrement, we can return
8935 OP0 since it is a copy of the old value. For preincrement, expand here
8936 unless we can do it with a single insn.
8937
8938 Likewise if storing directly into OP0 would clobber high bits
8939 we need to preserve (bad_subreg). */
8940 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8941 {
8942 /* This is the easiest way to increment the value wherever it is.
8943 Problems with multiple evaluation of INCREMENTED are prevented
8944 because either (1) it is a component_ref or preincrement,
8945 in which case it was stabilized above, or (2) it is an array_ref
8946 with constant index in an array in a register, which is
8947 safe to reevaluate. */
8948 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8949 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8950 ? MINUS_EXPR : PLUS_EXPR),
8951 TREE_TYPE (exp),
8952 incremented,
8953 TREE_OPERAND (exp, 1));
8954
8955 while (TREE_CODE (incremented) == NOP_EXPR
8956 || TREE_CODE (incremented) == CONVERT_EXPR)
8957 {
8958 newexp = convert (TREE_TYPE (incremented), newexp);
8959 incremented = TREE_OPERAND (incremented, 0);
8960 }
8961
8962 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8963 return post ? op0 : temp;
8964 }
8965
8966 if (post)
8967 {
8968 /* We have a true reference to the value in OP0.
8969 If there is an insn to add or subtract in this mode, queue it.
8970 Queueing the increment insn avoids the register shuffling
8971 that often results if we must increment now and first save
8972 the old value for subsequent use. */
8973
8974 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8975 op0 = stabilize (op0);
8976 #endif
8977
8978 icode = (int) this_optab->handlers[(int) mode].insn_code;
8979 if (icode != (int) CODE_FOR_nothing
8980 /* Make sure that OP0 is valid for operands 0 and 1
8981 of the insn we want to queue. */
8982 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8983 && (*insn_data[icode].operand[1].predicate) (op0, mode))
8984 {
8985 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8986 op1 = force_reg (mode, op1);
8987
8988 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8989 }
8990 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8991 {
8992 rtx addr = (general_operand (XEXP (op0, 0), mode)
8993 ? force_reg (Pmode, XEXP (op0, 0))
8994 : copy_to_reg (XEXP (op0, 0)));
8995 rtx temp, result;
8996
8997 op0 = replace_equiv_address (op0, addr);
8998 temp = force_reg (GET_MODE (op0), op0);
8999 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9000 op1 = force_reg (mode, op1);
9001
9002 /* The increment queue is LIFO, thus we have to `queue'
9003 the instructions in reverse order. */
9004 enqueue_insn (op0, gen_move_insn (op0, temp));
9005 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9006 return result;
9007 }
9008 }
9009
9010 /* Preincrement, or we can't increment with one simple insn. */
9011 if (post)
9012 /* Save a copy of the value before inc or dec, to return it later. */
9013 temp = value = copy_to_reg (op0);
9014 else
9015 /* Arrange to return the incremented value. */
9016 /* Copy the rtx because expand_binop will protect from the queue,
9017 and the results of that would be invalid for us to return
9018 if our caller does emit_queue before using our result. */
9019 temp = copy_rtx (value = op0);
9020
9021 /* Increment however we can. */
9022 op1 = expand_binop (mode, this_optab, value, op1, op0,
9023 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9024
9025 /* Make sure the value is stored into OP0. */
9026 if (op1 != op0)
9027 emit_move_insn (op0, op1);
9028
9029 return temp;
9030 }
9031 \f
9032 /* At the start of a function, record that we have no previously-pushed
9033 arguments waiting to be popped. */
9034
9035 void
9036 init_pending_stack_adjust ()
9037 {
9038 pending_stack_adjust = 0;
9039 }
9040
9041 /* When exiting from function, if safe, clear out any pending stack adjust
9042 so the adjustment won't get done.
9043
9044 Note, if the current function calls alloca, then it must have a
9045 frame pointer regardless of the value of flag_omit_frame_pointer. */
9046
9047 void
9048 clear_pending_stack_adjust ()
9049 {
9050 #ifdef EXIT_IGNORE_STACK
9051 if (optimize > 0
9052 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9053 && EXIT_IGNORE_STACK
9054 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9055 && ! flag_inline_functions)
9056 {
9057 stack_pointer_delta -= pending_stack_adjust,
9058 pending_stack_adjust = 0;
9059 }
9060 #endif
9061 }
9062
9063 /* Pop any previously-pushed arguments that have not been popped yet. */
9064
9065 void
9066 do_pending_stack_adjust ()
9067 {
9068 if (inhibit_defer_pop == 0)
9069 {
9070 if (pending_stack_adjust != 0)
9071 adjust_stack (GEN_INT (pending_stack_adjust));
9072 pending_stack_adjust = 0;
9073 }
9074 }
9075 \f
9076 /* Expand conditional expressions. */
9077
9078 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9079 LABEL is an rtx of code CODE_LABEL, in this function and all the
9080 functions here. */
9081
9082 void
9083 jumpifnot (exp, label)
9084 tree exp;
9085 rtx label;
9086 {
9087 do_jump (exp, label, NULL_RTX);
9088 }
9089
9090 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9091
9092 void
9093 jumpif (exp, label)
9094 tree exp;
9095 rtx label;
9096 {
9097 do_jump (exp, NULL_RTX, label);
9098 }
9099
9100 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9101 the result is zero, or IF_TRUE_LABEL if the result is one.
9102 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9103 meaning fall through in that case.
9104
9105 do_jump always does any pending stack adjust except when it does not
9106 actually perform a jump. An example where there is no jump
9107 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9108
9109 This function is responsible for optimizing cases such as
9110 &&, || and comparison operators in EXP. */
9111
9112 void
9113 do_jump (exp, if_false_label, if_true_label)
9114 tree exp;
9115 rtx if_false_label, if_true_label;
9116 {
9117 enum tree_code code = TREE_CODE (exp);
9118 /* Some cases need to create a label to jump to
9119 in order to properly fall through.
9120 These cases set DROP_THROUGH_LABEL nonzero. */
9121 rtx drop_through_label = 0;
9122 rtx temp;
9123 int i;
9124 tree type;
9125 enum machine_mode mode;
9126
9127 #ifdef MAX_INTEGER_COMPUTATION_MODE
9128 check_max_integer_computation_mode (exp);
9129 #endif
9130
9131 emit_queue ();
9132
9133 switch (code)
9134 {
9135 case ERROR_MARK:
9136 break;
9137
9138 case INTEGER_CST:
9139 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9140 if (temp)
9141 emit_jump (temp);
9142 break;
9143
9144 #if 0
9145 /* This is not true with #pragma weak */
9146 case ADDR_EXPR:
9147 /* The address of something can never be zero. */
9148 if (if_true_label)
9149 emit_jump (if_true_label);
9150 break;
9151 #endif
9152
9153 case NOP_EXPR:
9154 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9155 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9156 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9157 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9158 goto normal;
9159 case CONVERT_EXPR:
9160 /* If we are narrowing the operand, we have to do the compare in the
9161 narrower mode. */
9162 if ((TYPE_PRECISION (TREE_TYPE (exp))
9163 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9164 goto normal;
9165 case NON_LVALUE_EXPR:
9166 case REFERENCE_EXPR:
9167 case ABS_EXPR:
9168 case NEGATE_EXPR:
9169 case LROTATE_EXPR:
9170 case RROTATE_EXPR:
9171 /* These cannot change zero->non-zero or vice versa. */
9172 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9173 break;
9174
9175 case WITH_RECORD_EXPR:
9176 /* Put the object on the placeholder list, recurse through our first
9177 operand, and pop the list. */
9178 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9179 placeholder_list);
9180 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9181 placeholder_list = TREE_CHAIN (placeholder_list);
9182 break;
9183
9184 #if 0
9185 /* This is never less insns than evaluating the PLUS_EXPR followed by
9186 a test and can be longer if the test is eliminated. */
9187 case PLUS_EXPR:
9188 /* Reduce to minus. */
9189 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9190 TREE_OPERAND (exp, 0),
9191 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9192 TREE_OPERAND (exp, 1))));
9193 /* Process as MINUS. */
9194 #endif
9195
9196 case MINUS_EXPR:
9197 /* Non-zero iff operands of minus differ. */
9198 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9199 TREE_OPERAND (exp, 0),
9200 TREE_OPERAND (exp, 1)),
9201 NE, NE, if_false_label, if_true_label);
9202 break;
9203
9204 case BIT_AND_EXPR:
9205 /* If we are AND'ing with a small constant, do this comparison in the
9206 smallest type that fits. If the machine doesn't have comparisons
9207 that small, it will be converted back to the wider comparison.
9208 This helps if we are testing the sign bit of a narrower object.
9209 combine can't do this for us because it can't know whether a
9210 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9211
9212 if (! SLOW_BYTE_ACCESS
9213 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9214 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9215 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9216 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9217 && (type = type_for_mode (mode, 1)) != 0
9218 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9219 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9220 != CODE_FOR_nothing))
9221 {
9222 do_jump (convert (type, exp), if_false_label, if_true_label);
9223 break;
9224 }
9225 goto normal;
9226
9227 case TRUTH_NOT_EXPR:
9228 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9229 break;
9230
9231 case TRUTH_ANDIF_EXPR:
9232 if (if_false_label == 0)
9233 if_false_label = drop_through_label = gen_label_rtx ();
9234 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9235 start_cleanup_deferral ();
9236 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9237 end_cleanup_deferral ();
9238 break;
9239
9240 case TRUTH_ORIF_EXPR:
9241 if (if_true_label == 0)
9242 if_true_label = drop_through_label = gen_label_rtx ();
9243 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9244 start_cleanup_deferral ();
9245 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9246 end_cleanup_deferral ();
9247 break;
9248
9249 case COMPOUND_EXPR:
9250 push_temp_slots ();
9251 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9252 preserve_temp_slots (NULL_RTX);
9253 free_temp_slots ();
9254 pop_temp_slots ();
9255 emit_queue ();
9256 do_pending_stack_adjust ();
9257 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9258 break;
9259
9260 case COMPONENT_REF:
9261 case BIT_FIELD_REF:
9262 case ARRAY_REF:
9263 case ARRAY_RANGE_REF:
9264 {
9265 HOST_WIDE_INT bitsize, bitpos;
9266 int unsignedp;
9267 enum machine_mode mode;
9268 tree type;
9269 tree offset;
9270 int volatilep = 0;
9271
9272 /* Get description of this reference. We don't actually care
9273 about the underlying object here. */
9274 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9275 &unsignedp, &volatilep);
9276
9277 type = type_for_size (bitsize, unsignedp);
9278 if (! SLOW_BYTE_ACCESS
9279 && type != 0 && bitsize >= 0
9280 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9281 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9282 != CODE_FOR_nothing))
9283 {
9284 do_jump (convert (type, exp), if_false_label, if_true_label);
9285 break;
9286 }
9287 goto normal;
9288 }
9289
9290 case COND_EXPR:
9291 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9292 if (integer_onep (TREE_OPERAND (exp, 1))
9293 && integer_zerop (TREE_OPERAND (exp, 2)))
9294 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9295
9296 else if (integer_zerop (TREE_OPERAND (exp, 1))
9297 && integer_onep (TREE_OPERAND (exp, 2)))
9298 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9299
9300 else
9301 {
9302 rtx label1 = gen_label_rtx ();
9303 drop_through_label = gen_label_rtx ();
9304
9305 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9306
9307 start_cleanup_deferral ();
9308 /* Now the THEN-expression. */
9309 do_jump (TREE_OPERAND (exp, 1),
9310 if_false_label ? if_false_label : drop_through_label,
9311 if_true_label ? if_true_label : drop_through_label);
9312 /* In case the do_jump just above never jumps. */
9313 do_pending_stack_adjust ();
9314 emit_label (label1);
9315
9316 /* Now the ELSE-expression. */
9317 do_jump (TREE_OPERAND (exp, 2),
9318 if_false_label ? if_false_label : drop_through_label,
9319 if_true_label ? if_true_label : drop_through_label);
9320 end_cleanup_deferral ();
9321 }
9322 break;
9323
9324 case EQ_EXPR:
9325 {
9326 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9327
9328 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9329 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9330 {
9331 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9332 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9333 do_jump
9334 (fold
9335 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9336 fold (build (EQ_EXPR, TREE_TYPE (exp),
9337 fold (build1 (REALPART_EXPR,
9338 TREE_TYPE (inner_type),
9339 exp0)),
9340 fold (build1 (REALPART_EXPR,
9341 TREE_TYPE (inner_type),
9342 exp1)))),
9343 fold (build (EQ_EXPR, TREE_TYPE (exp),
9344 fold (build1 (IMAGPART_EXPR,
9345 TREE_TYPE (inner_type),
9346 exp0)),
9347 fold (build1 (IMAGPART_EXPR,
9348 TREE_TYPE (inner_type),
9349 exp1)))))),
9350 if_false_label, if_true_label);
9351 }
9352
9353 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9354 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9355
9356 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9357 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9358 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9359 else
9360 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9361 break;
9362 }
9363
9364 case NE_EXPR:
9365 {
9366 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9367
9368 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9369 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9370 {
9371 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9372 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9373 do_jump
9374 (fold
9375 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9376 fold (build (NE_EXPR, TREE_TYPE (exp),
9377 fold (build1 (REALPART_EXPR,
9378 TREE_TYPE (inner_type),
9379 exp0)),
9380 fold (build1 (REALPART_EXPR,
9381 TREE_TYPE (inner_type),
9382 exp1)))),
9383 fold (build (NE_EXPR, TREE_TYPE (exp),
9384 fold (build1 (IMAGPART_EXPR,
9385 TREE_TYPE (inner_type),
9386 exp0)),
9387 fold (build1 (IMAGPART_EXPR,
9388 TREE_TYPE (inner_type),
9389 exp1)))))),
9390 if_false_label, if_true_label);
9391 }
9392
9393 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9394 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9395
9396 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9397 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9398 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9399 else
9400 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9401 break;
9402 }
9403
9404 case LT_EXPR:
9405 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9406 if (GET_MODE_CLASS (mode) == MODE_INT
9407 && ! can_compare_p (LT, mode, ccp_jump))
9408 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9409 else
9410 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9411 break;
9412
9413 case LE_EXPR:
9414 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9415 if (GET_MODE_CLASS (mode) == MODE_INT
9416 && ! can_compare_p (LE, mode, ccp_jump))
9417 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9418 else
9419 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9420 break;
9421
9422 case GT_EXPR:
9423 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9424 if (GET_MODE_CLASS (mode) == MODE_INT
9425 && ! can_compare_p (GT, mode, ccp_jump))
9426 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9427 else
9428 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9429 break;
9430
9431 case GE_EXPR:
9432 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9433 if (GET_MODE_CLASS (mode) == MODE_INT
9434 && ! can_compare_p (GE, mode, ccp_jump))
9435 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9436 else
9437 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9438 break;
9439
9440 case UNORDERED_EXPR:
9441 case ORDERED_EXPR:
9442 {
9443 enum rtx_code cmp, rcmp;
9444 int do_rev;
9445
9446 if (code == UNORDERED_EXPR)
9447 cmp = UNORDERED, rcmp = ORDERED;
9448 else
9449 cmp = ORDERED, rcmp = UNORDERED;
9450 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9451
9452 do_rev = 0;
9453 if (! can_compare_p (cmp, mode, ccp_jump)
9454 && (can_compare_p (rcmp, mode, ccp_jump)
9455 /* If the target doesn't provide either UNORDERED or ORDERED
9456 comparisons, canonicalize on UNORDERED for the library. */
9457 || rcmp == UNORDERED))
9458 do_rev = 1;
9459
9460 if (! do_rev)
9461 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9462 else
9463 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9464 }
9465 break;
9466
9467 {
9468 enum rtx_code rcode1;
9469 enum tree_code tcode2;
9470
9471 case UNLT_EXPR:
9472 rcode1 = UNLT;
9473 tcode2 = LT_EXPR;
9474 goto unordered_bcc;
9475 case UNLE_EXPR:
9476 rcode1 = UNLE;
9477 tcode2 = LE_EXPR;
9478 goto unordered_bcc;
9479 case UNGT_EXPR:
9480 rcode1 = UNGT;
9481 tcode2 = GT_EXPR;
9482 goto unordered_bcc;
9483 case UNGE_EXPR:
9484 rcode1 = UNGE;
9485 tcode2 = GE_EXPR;
9486 goto unordered_bcc;
9487 case UNEQ_EXPR:
9488 rcode1 = UNEQ;
9489 tcode2 = EQ_EXPR;
9490 goto unordered_bcc;
9491
9492 unordered_bcc:
9493 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9494 if (can_compare_p (rcode1, mode, ccp_jump))
9495 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9496 if_true_label);
9497 else
9498 {
9499 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9500 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9501 tree cmp0, cmp1;
9502
9503 /* If the target doesn't support combined unordered
9504 compares, decompose into UNORDERED + comparison. */
9505 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9506 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9507 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9508 do_jump (exp, if_false_label, if_true_label);
9509 }
9510 }
9511 break;
9512
9513 /* Special case:
9514 __builtin_expect (<test>, 0) and
9515 __builtin_expect (<test>, 1)
9516
9517 We need to do this here, so that <test> is not converted to a SCC
9518 operation on machines that use condition code registers and COMPARE
9519 like the PowerPC, and then the jump is done based on whether the SCC
9520 operation produced a 1 or 0. */
9521 case CALL_EXPR:
9522 /* Check for a built-in function. */
9523 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9524 {
9525 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9526 tree arglist = TREE_OPERAND (exp, 1);
9527
9528 if (TREE_CODE (fndecl) == FUNCTION_DECL
9529 && DECL_BUILT_IN (fndecl)
9530 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9531 && arglist != NULL_TREE
9532 && TREE_CHAIN (arglist) != NULL_TREE)
9533 {
9534 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9535 if_true_label);
9536
9537 if (seq != NULL_RTX)
9538 {
9539 emit_insn (seq);
9540 return;
9541 }
9542 }
9543 }
9544 /* fall through and generate the normal code. */
9545
9546 default:
9547 normal:
9548 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9549 #if 0
9550 /* This is not needed any more and causes poor code since it causes
9551 comparisons and tests from non-SI objects to have different code
9552 sequences. */
9553 /* Copy to register to avoid generating bad insns by cse
9554 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9555 if (!cse_not_expected && GET_CODE (temp) == MEM)
9556 temp = copy_to_reg (temp);
9557 #endif
9558 do_pending_stack_adjust ();
9559 /* Do any postincrements in the expression that was tested. */
9560 emit_queue ();
9561
9562 if (GET_CODE (temp) == CONST_INT
9563 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9564 || GET_CODE (temp) == LABEL_REF)
9565 {
9566 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9567 if (target)
9568 emit_jump (target);
9569 }
9570 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9571 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9572 /* Note swapping the labels gives us not-equal. */
9573 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9574 else if (GET_MODE (temp) != VOIDmode)
9575 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9576 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9577 GET_MODE (temp), NULL_RTX,
9578 if_false_label, if_true_label);
9579 else
9580 abort ();
9581 }
9582
9583 if (drop_through_label)
9584 {
9585 /* If do_jump produces code that might be jumped around,
9586 do any stack adjusts from that code, before the place
9587 where control merges in. */
9588 do_pending_stack_adjust ();
9589 emit_label (drop_through_label);
9590 }
9591 }
9592 \f
9593 /* Given a comparison expression EXP for values too wide to be compared
9594 with one insn, test the comparison and jump to the appropriate label.
9595 The code of EXP is ignored; we always test GT if SWAP is 0,
9596 and LT if SWAP is 1. */
9597
9598 static void
9599 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9600 tree exp;
9601 int swap;
9602 rtx if_false_label, if_true_label;
9603 {
9604 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9605 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9606 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9607 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9608
9609 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9610 }
9611
9612 /* Compare OP0 with OP1, word at a time, in mode MODE.
9613 UNSIGNEDP says to do unsigned comparison.
9614 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9615
9616 void
9617 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9618 enum machine_mode mode;
9619 int unsignedp;
9620 rtx op0, op1;
9621 rtx if_false_label, if_true_label;
9622 {
9623 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9624 rtx drop_through_label = 0;
9625 int i;
9626
9627 if (! if_true_label || ! if_false_label)
9628 drop_through_label = gen_label_rtx ();
9629 if (! if_true_label)
9630 if_true_label = drop_through_label;
9631 if (! if_false_label)
9632 if_false_label = drop_through_label;
9633
9634 /* Compare a word at a time, high order first. */
9635 for (i = 0; i < nwords; i++)
9636 {
9637 rtx op0_word, op1_word;
9638
9639 if (WORDS_BIG_ENDIAN)
9640 {
9641 op0_word = operand_subword_force (op0, i, mode);
9642 op1_word = operand_subword_force (op1, i, mode);
9643 }
9644 else
9645 {
9646 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9647 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9648 }
9649
9650 /* All but high-order word must be compared as unsigned. */
9651 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9652 (unsignedp || i > 0), word_mode, NULL_RTX,
9653 NULL_RTX, if_true_label);
9654
9655 /* Consider lower words only if these are equal. */
9656 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9657 NULL_RTX, NULL_RTX, if_false_label);
9658 }
9659
9660 if (if_false_label)
9661 emit_jump (if_false_label);
9662 if (drop_through_label)
9663 emit_label (drop_through_label);
9664 }
9665
9666 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9667 with one insn, test the comparison and jump to the appropriate label. */
9668
9669 static void
9670 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9671 tree exp;
9672 rtx if_false_label, if_true_label;
9673 {
9674 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9675 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9676 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9677 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9678 int i;
9679 rtx drop_through_label = 0;
9680
9681 if (! if_false_label)
9682 drop_through_label = if_false_label = gen_label_rtx ();
9683
9684 for (i = 0; i < nwords; i++)
9685 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9686 operand_subword_force (op1, i, mode),
9687 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9688 word_mode, NULL_RTX, if_false_label, NULL_RTX);
9689
9690 if (if_true_label)
9691 emit_jump (if_true_label);
9692 if (drop_through_label)
9693 emit_label (drop_through_label);
9694 }
9695 \f
9696 /* Jump according to whether OP0 is 0.
9697 We assume that OP0 has an integer mode that is too wide
9698 for the available compare insns. */
9699
9700 void
9701 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9702 rtx op0;
9703 rtx if_false_label, if_true_label;
9704 {
9705 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9706 rtx part;
9707 int i;
9708 rtx drop_through_label = 0;
9709
9710 /* The fastest way of doing this comparison on almost any machine is to
9711 "or" all the words and compare the result. If all have to be loaded
9712 from memory and this is a very wide item, it's possible this may
9713 be slower, but that's highly unlikely. */
9714
9715 part = gen_reg_rtx (word_mode);
9716 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9717 for (i = 1; i < nwords && part != 0; i++)
9718 part = expand_binop (word_mode, ior_optab, part,
9719 operand_subword_force (op0, i, GET_MODE (op0)),
9720 part, 1, OPTAB_WIDEN);
9721
9722 if (part != 0)
9723 {
9724 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9725 NULL_RTX, if_false_label, if_true_label);
9726
9727 return;
9728 }
9729
9730 /* If we couldn't do the "or" simply, do this with a series of compares. */
9731 if (! if_false_label)
9732 drop_through_label = if_false_label = gen_label_rtx ();
9733
9734 for (i = 0; i < nwords; i++)
9735 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9736 const0_rtx, EQ, 1, word_mode, NULL_RTX,
9737 if_false_label, NULL_RTX);
9738
9739 if (if_true_label)
9740 emit_jump (if_true_label);
9741
9742 if (drop_through_label)
9743 emit_label (drop_through_label);
9744 }
9745 \f
9746 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9747 (including code to compute the values to be compared)
9748 and set (CC0) according to the result.
9749 The decision as to signed or unsigned comparison must be made by the caller.
9750
9751 We force a stack adjustment unless there are currently
9752 things pushed on the stack that aren't yet used.
9753
9754 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9755 compared. */
9756
9757 rtx
9758 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
9759 rtx op0, op1;
9760 enum rtx_code code;
9761 int unsignedp;
9762 enum machine_mode mode;
9763 rtx size;
9764 {
9765 rtx tem;
9766
9767 /* If one operand is constant, make it the second one. Only do this
9768 if the other operand is not constant as well. */
9769
9770 if (swap_commutative_operands_p (op0, op1))
9771 {
9772 tem = op0;
9773 op0 = op1;
9774 op1 = tem;
9775 code = swap_condition (code);
9776 }
9777
9778 if (flag_force_mem)
9779 {
9780 op0 = force_not_mem (op0);
9781 op1 = force_not_mem (op1);
9782 }
9783
9784 do_pending_stack_adjust ();
9785
9786 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9787 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9788 return tem;
9789
9790 #if 0
9791 /* There's no need to do this now that combine.c can eliminate lots of
9792 sign extensions. This can be less efficient in certain cases on other
9793 machines. */
9794
9795 /* If this is a signed equality comparison, we can do it as an
9796 unsigned comparison since zero-extension is cheaper than sign
9797 extension and comparisons with zero are done as unsigned. This is
9798 the case even on machines that can do fast sign extension, since
9799 zero-extension is easier to combine with other operations than
9800 sign-extension is. If we are comparing against a constant, we must
9801 convert it to what it would look like unsigned. */
9802 if ((code == EQ || code == NE) && ! unsignedp
9803 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9804 {
9805 if (GET_CODE (op1) == CONST_INT
9806 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9807 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9808 unsignedp = 1;
9809 }
9810 #endif
9811
9812 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
9813
9814 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9815 }
9816
9817 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9818 The decision as to signed or unsigned comparison must be made by the caller.
9819
9820 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9821 compared. */
9822
9823 void
9824 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
9825 if_false_label, if_true_label)
9826 rtx op0, op1;
9827 enum rtx_code code;
9828 int unsignedp;
9829 enum machine_mode mode;
9830 rtx size;
9831 rtx if_false_label, if_true_label;
9832 {
9833 rtx tem;
9834 int dummy_true_label = 0;
9835
9836 /* Reverse the comparison if that is safe and we want to jump if it is
9837 false. */
9838 if (! if_true_label && ! FLOAT_MODE_P (mode))
9839 {
9840 if_true_label = if_false_label;
9841 if_false_label = 0;
9842 code = reverse_condition (code);
9843 }
9844
9845 /* If one operand is constant, make it the second one. Only do this
9846 if the other operand is not constant as well. */
9847
9848 if (swap_commutative_operands_p (op0, op1))
9849 {
9850 tem = op0;
9851 op0 = op1;
9852 op1 = tem;
9853 code = swap_condition (code);
9854 }
9855
9856 if (flag_force_mem)
9857 {
9858 op0 = force_not_mem (op0);
9859 op1 = force_not_mem (op1);
9860 }
9861
9862 do_pending_stack_adjust ();
9863
9864 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9865 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9866 {
9867 if (tem == const_true_rtx)
9868 {
9869 if (if_true_label)
9870 emit_jump (if_true_label);
9871 }
9872 else
9873 {
9874 if (if_false_label)
9875 emit_jump (if_false_label);
9876 }
9877 return;
9878 }
9879
9880 #if 0
9881 /* There's no need to do this now that combine.c can eliminate lots of
9882 sign extensions. This can be less efficient in certain cases on other
9883 machines. */
9884
9885 /* If this is a signed equality comparison, we can do it as an
9886 unsigned comparison since zero-extension is cheaper than sign
9887 extension and comparisons with zero are done as unsigned. This is
9888 the case even on machines that can do fast sign extension, since
9889 zero-extension is easier to combine with other operations than
9890 sign-extension is. If we are comparing against a constant, we must
9891 convert it to what it would look like unsigned. */
9892 if ((code == EQ || code == NE) && ! unsignedp
9893 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9894 {
9895 if (GET_CODE (op1) == CONST_INT
9896 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9897 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9898 unsignedp = 1;
9899 }
9900 #endif
9901
9902 if (! if_true_label)
9903 {
9904 dummy_true_label = 1;
9905 if_true_label = gen_label_rtx ();
9906 }
9907
9908 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
9909 if_true_label);
9910
9911 if (if_false_label)
9912 emit_jump (if_false_label);
9913 if (dummy_true_label)
9914 emit_label (if_true_label);
9915 }
9916
9917 /* Generate code for a comparison expression EXP (including code to compute
9918 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9919 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9920 generated code will drop through.
9921 SIGNED_CODE should be the rtx operation for this comparison for
9922 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9923
9924 We force a stack adjustment unless there are currently
9925 things pushed on the stack that aren't yet used. */
9926
9927 static void
9928 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9929 if_true_label)
9930 tree exp;
9931 enum rtx_code signed_code, unsigned_code;
9932 rtx if_false_label, if_true_label;
9933 {
9934 rtx op0, op1;
9935 tree type;
9936 enum machine_mode mode;
9937 int unsignedp;
9938 enum rtx_code code;
9939
9940 /* Don't crash if the comparison was erroneous. */
9941 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9942 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9943 return;
9944
9945 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9946 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
9947 return;
9948
9949 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9950 mode = TYPE_MODE (type);
9951 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
9952 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
9953 || (GET_MODE_BITSIZE (mode)
9954 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
9955 1)))))))
9956 {
9957 /* op0 might have been replaced by promoted constant, in which
9958 case the type of second argument should be used. */
9959 type = TREE_TYPE (TREE_OPERAND (exp, 1));
9960 mode = TYPE_MODE (type);
9961 }
9962 unsignedp = TREE_UNSIGNED (type);
9963 code = unsignedp ? unsigned_code : signed_code;
9964
9965 #ifdef HAVE_canonicalize_funcptr_for_compare
9966 /* If function pointers need to be "canonicalized" before they can
9967 be reliably compared, then canonicalize them. */
9968 if (HAVE_canonicalize_funcptr_for_compare
9969 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9970 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9971 == FUNCTION_TYPE))
9972 {
9973 rtx new_op0 = gen_reg_rtx (mode);
9974
9975 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
9976 op0 = new_op0;
9977 }
9978
9979 if (HAVE_canonicalize_funcptr_for_compare
9980 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9981 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9982 == FUNCTION_TYPE))
9983 {
9984 rtx new_op1 = gen_reg_rtx (mode);
9985
9986 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
9987 op1 = new_op1;
9988 }
9989 #endif
9990
9991 /* Do any postincrements in the expression that was tested. */
9992 emit_queue ();
9993
9994 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
9995 ((mode == BLKmode)
9996 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
9997 if_false_label, if_true_label);
9998 }
9999 \f
10000 /* Generate code to calculate EXP using a store-flag instruction
10001 and return an rtx for the result. EXP is either a comparison
10002 or a TRUTH_NOT_EXPR whose operand is a comparison.
10003
10004 If TARGET is nonzero, store the result there if convenient.
10005
10006 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10007 cheap.
10008
10009 Return zero if there is no suitable set-flag instruction
10010 available on this machine.
10011
10012 Once expand_expr has been called on the arguments of the comparison,
10013 we are committed to doing the store flag, since it is not safe to
10014 re-evaluate the expression. We emit the store-flag insn by calling
10015 emit_store_flag, but only expand the arguments if we have a reason
10016 to believe that emit_store_flag will be successful. If we think that
10017 it will, but it isn't, we have to simulate the store-flag with a
10018 set/jump/set sequence. */
10019
10020 static rtx
10021 do_store_flag (exp, target, mode, only_cheap)
10022 tree exp;
10023 rtx target;
10024 enum machine_mode mode;
10025 int only_cheap;
10026 {
10027 enum rtx_code code;
10028 tree arg0, arg1, type;
10029 tree tem;
10030 enum machine_mode operand_mode;
10031 int invert = 0;
10032 int unsignedp;
10033 rtx op0, op1;
10034 enum insn_code icode;
10035 rtx subtarget = target;
10036 rtx result, label;
10037
10038 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10039 result at the end. We can't simply invert the test since it would
10040 have already been inverted if it were valid. This case occurs for
10041 some floating-point comparisons. */
10042
10043 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10044 invert = 1, exp = TREE_OPERAND (exp, 0);
10045
10046 arg0 = TREE_OPERAND (exp, 0);
10047 arg1 = TREE_OPERAND (exp, 1);
10048
10049 /* Don't crash if the comparison was erroneous. */
10050 if (arg0 == error_mark_node || arg1 == error_mark_node)
10051 return const0_rtx;
10052
10053 type = TREE_TYPE (arg0);
10054 operand_mode = TYPE_MODE (type);
10055 unsignedp = TREE_UNSIGNED (type);
10056
10057 /* We won't bother with BLKmode store-flag operations because it would mean
10058 passing a lot of information to emit_store_flag. */
10059 if (operand_mode == BLKmode)
10060 return 0;
10061
10062 /* We won't bother with store-flag operations involving function pointers
10063 when function pointers must be canonicalized before comparisons. */
10064 #ifdef HAVE_canonicalize_funcptr_for_compare
10065 if (HAVE_canonicalize_funcptr_for_compare
10066 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10067 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10068 == FUNCTION_TYPE))
10069 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10070 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10071 == FUNCTION_TYPE))))
10072 return 0;
10073 #endif
10074
10075 STRIP_NOPS (arg0);
10076 STRIP_NOPS (arg1);
10077
10078 /* Get the rtx comparison code to use. We know that EXP is a comparison
10079 operation of some type. Some comparisons against 1 and -1 can be
10080 converted to comparisons with zero. Do so here so that the tests
10081 below will be aware that we have a comparison with zero. These
10082 tests will not catch constants in the first operand, but constants
10083 are rarely passed as the first operand. */
10084
10085 switch (TREE_CODE (exp))
10086 {
10087 case EQ_EXPR:
10088 code = EQ;
10089 break;
10090 case NE_EXPR:
10091 code = NE;
10092 break;
10093 case LT_EXPR:
10094 if (integer_onep (arg1))
10095 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10096 else
10097 code = unsignedp ? LTU : LT;
10098 break;
10099 case LE_EXPR:
10100 if (! unsignedp && integer_all_onesp (arg1))
10101 arg1 = integer_zero_node, code = LT;
10102 else
10103 code = unsignedp ? LEU : LE;
10104 break;
10105 case GT_EXPR:
10106 if (! unsignedp && integer_all_onesp (arg1))
10107 arg1 = integer_zero_node, code = GE;
10108 else
10109 code = unsignedp ? GTU : GT;
10110 break;
10111 case GE_EXPR:
10112 if (integer_onep (arg1))
10113 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10114 else
10115 code = unsignedp ? GEU : GE;
10116 break;
10117
10118 case UNORDERED_EXPR:
10119 code = UNORDERED;
10120 break;
10121 case ORDERED_EXPR:
10122 code = ORDERED;
10123 break;
10124 case UNLT_EXPR:
10125 code = UNLT;
10126 break;
10127 case UNLE_EXPR:
10128 code = UNLE;
10129 break;
10130 case UNGT_EXPR:
10131 code = UNGT;
10132 break;
10133 case UNGE_EXPR:
10134 code = UNGE;
10135 break;
10136 case UNEQ_EXPR:
10137 code = UNEQ;
10138 break;
10139
10140 default:
10141 abort ();
10142 }
10143
10144 /* Put a constant second. */
10145 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10146 {
10147 tem = arg0; arg0 = arg1; arg1 = tem;
10148 code = swap_condition (code);
10149 }
10150
10151 /* If this is an equality or inequality test of a single bit, we can
10152 do this by shifting the bit being tested to the low-order bit and
10153 masking the result with the constant 1. If the condition was EQ,
10154 we xor it with 1. This does not require an scc insn and is faster
10155 than an scc insn even if we have it. */
10156
10157 if ((code == NE || code == EQ)
10158 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10159 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10160 {
10161 tree inner = TREE_OPERAND (arg0, 0);
10162 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10163 int ops_unsignedp;
10164
10165 /* If INNER is a right shift of a constant and it plus BITNUM does
10166 not overflow, adjust BITNUM and INNER. */
10167
10168 if (TREE_CODE (inner) == RSHIFT_EXPR
10169 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10170 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10171 && bitnum < TYPE_PRECISION (type)
10172 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10173 bitnum - TYPE_PRECISION (type)))
10174 {
10175 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10176 inner = TREE_OPERAND (inner, 0);
10177 }
10178
10179 /* If we are going to be able to omit the AND below, we must do our
10180 operations as unsigned. If we must use the AND, we have a choice.
10181 Normally unsigned is faster, but for some machines signed is. */
10182 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10183 #ifdef LOAD_EXTEND_OP
10184 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10185 #else
10186 : 1
10187 #endif
10188 );
10189
10190 if (! get_subtarget (subtarget)
10191 || GET_MODE (subtarget) != operand_mode
10192 || ! safe_from_p (subtarget, inner, 1))
10193 subtarget = 0;
10194
10195 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10196
10197 if (bitnum != 0)
10198 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10199 size_int (bitnum), subtarget, ops_unsignedp);
10200
10201 if (GET_MODE (op0) != mode)
10202 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10203
10204 if ((code == EQ && ! invert) || (code == NE && invert))
10205 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10206 ops_unsignedp, OPTAB_LIB_WIDEN);
10207
10208 /* Put the AND last so it can combine with more things. */
10209 if (bitnum != TYPE_PRECISION (type) - 1)
10210 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10211
10212 return op0;
10213 }
10214
10215 /* Now see if we are likely to be able to do this. Return if not. */
10216 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10217 return 0;
10218
10219 icode = setcc_gen_code[(int) code];
10220 if (icode == CODE_FOR_nothing
10221 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10222 {
10223 /* We can only do this if it is one of the special cases that
10224 can be handled without an scc insn. */
10225 if ((code == LT && integer_zerop (arg1))
10226 || (! only_cheap && code == GE && integer_zerop (arg1)))
10227 ;
10228 else if (BRANCH_COST >= 0
10229 && ! only_cheap && (code == NE || code == EQ)
10230 && TREE_CODE (type) != REAL_TYPE
10231 && ((abs_optab->handlers[(int) operand_mode].insn_code
10232 != CODE_FOR_nothing)
10233 || (ffs_optab->handlers[(int) operand_mode].insn_code
10234 != CODE_FOR_nothing)))
10235 ;
10236 else
10237 return 0;
10238 }
10239
10240 if (! get_subtarget (target)
10241 || GET_MODE (subtarget) != operand_mode
10242 || ! safe_from_p (subtarget, arg1, 1))
10243 subtarget = 0;
10244
10245 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10246 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10247
10248 if (target == 0)
10249 target = gen_reg_rtx (mode);
10250
10251 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10252 because, if the emit_store_flag does anything it will succeed and
10253 OP0 and OP1 will not be used subsequently. */
10254
10255 result = emit_store_flag (target, code,
10256 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10257 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10258 operand_mode, unsignedp, 1);
10259
10260 if (result)
10261 {
10262 if (invert)
10263 result = expand_binop (mode, xor_optab, result, const1_rtx,
10264 result, 0, OPTAB_LIB_WIDEN);
10265 return result;
10266 }
10267
10268 /* If this failed, we have to do this with set/compare/jump/set code. */
10269 if (GET_CODE (target) != REG
10270 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10271 target = gen_reg_rtx (GET_MODE (target));
10272
10273 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10274 result = compare_from_rtx (op0, op1, code, unsignedp,
10275 operand_mode, NULL_RTX);
10276 if (GET_CODE (result) == CONST_INT)
10277 return (((result == const0_rtx && ! invert)
10278 || (result != const0_rtx && invert))
10279 ? const0_rtx : const1_rtx);
10280
10281 /* The code of RESULT may not match CODE if compare_from_rtx
10282 decided to swap its operands and reverse the original code.
10283
10284 We know that compare_from_rtx returns either a CONST_INT or
10285 a new comparison code, so it is safe to just extract the
10286 code from RESULT. */
10287 code = GET_CODE (result);
10288
10289 label = gen_label_rtx ();
10290 if (bcc_gen_fctn[(int) code] == 0)
10291 abort ();
10292
10293 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10294 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10295 emit_label (label);
10296
10297 return target;
10298 }
10299 \f
10300
10301 /* Stubs in case we haven't got a casesi insn. */
10302 #ifndef HAVE_casesi
10303 # define HAVE_casesi 0
10304 # define gen_casesi(a, b, c, d, e) (0)
10305 # define CODE_FOR_casesi CODE_FOR_nothing
10306 #endif
10307
10308 /* If the machine does not have a case insn that compares the bounds,
10309 this means extra overhead for dispatch tables, which raises the
10310 threshold for using them. */
10311 #ifndef CASE_VALUES_THRESHOLD
10312 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10313 #endif /* CASE_VALUES_THRESHOLD */
10314
10315 unsigned int
10316 case_values_threshold ()
10317 {
10318 return CASE_VALUES_THRESHOLD;
10319 }
10320
10321 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10322 0 otherwise (i.e. if there is no casesi instruction). */
10323 int
10324 try_casesi (index_type, index_expr, minval, range,
10325 table_label, default_label)
10326 tree index_type, index_expr, minval, range;
10327 rtx table_label ATTRIBUTE_UNUSED;
10328 rtx default_label;
10329 {
10330 enum machine_mode index_mode = SImode;
10331 int index_bits = GET_MODE_BITSIZE (index_mode);
10332 rtx op1, op2, index;
10333 enum machine_mode op_mode;
10334
10335 if (! HAVE_casesi)
10336 return 0;
10337
10338 /* Convert the index to SImode. */
10339 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10340 {
10341 enum machine_mode omode = TYPE_MODE (index_type);
10342 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10343
10344 /* We must handle the endpoints in the original mode. */
10345 index_expr = build (MINUS_EXPR, index_type,
10346 index_expr, minval);
10347 minval = integer_zero_node;
10348 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10349 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10350 omode, 1, default_label);
10351 /* Now we can safely truncate. */
10352 index = convert_to_mode (index_mode, index, 0);
10353 }
10354 else
10355 {
10356 if (TYPE_MODE (index_type) != index_mode)
10357 {
10358 index_expr = convert (type_for_size (index_bits, 0),
10359 index_expr);
10360 index_type = TREE_TYPE (index_expr);
10361 }
10362
10363 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10364 }
10365 emit_queue ();
10366 index = protect_from_queue (index, 0);
10367 do_pending_stack_adjust ();
10368
10369 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10370 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10371 (index, op_mode))
10372 index = copy_to_mode_reg (op_mode, index);
10373
10374 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10375
10376 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10377 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10378 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10379 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10380 (op1, op_mode))
10381 op1 = copy_to_mode_reg (op_mode, op1);
10382
10383 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10384
10385 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10386 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10387 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10388 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10389 (op2, op_mode))
10390 op2 = copy_to_mode_reg (op_mode, op2);
10391
10392 emit_jump_insn (gen_casesi (index, op1, op2,
10393 table_label, default_label));
10394 return 1;
10395 }
10396
10397 /* Attempt to generate a tablejump instruction; same concept. */
10398 #ifndef HAVE_tablejump
10399 #define HAVE_tablejump 0
10400 #define gen_tablejump(x, y) (0)
10401 #endif
10402
10403 /* Subroutine of the next function.
10404
10405 INDEX is the value being switched on, with the lowest value
10406 in the table already subtracted.
10407 MODE is its expected mode (needed if INDEX is constant).
10408 RANGE is the length of the jump table.
10409 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10410
10411 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10412 index value is out of range. */
10413
10414 static void
10415 do_tablejump (index, mode, range, table_label, default_label)
10416 rtx index, range, table_label, default_label;
10417 enum machine_mode mode;
10418 {
10419 rtx temp, vector;
10420
10421 /* Do an unsigned comparison (in the proper mode) between the index
10422 expression and the value which represents the length of the range.
10423 Since we just finished subtracting the lower bound of the range
10424 from the index expression, this comparison allows us to simultaneously
10425 check that the original index expression value is both greater than
10426 or equal to the minimum value of the range and less than or equal to
10427 the maximum value of the range. */
10428
10429 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10430 default_label);
10431
10432 /* If index is in range, it must fit in Pmode.
10433 Convert to Pmode so we can index with it. */
10434 if (mode != Pmode)
10435 index = convert_to_mode (Pmode, index, 1);
10436
10437 /* Don't let a MEM slip thru, because then INDEX that comes
10438 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10439 and break_out_memory_refs will go to work on it and mess it up. */
10440 #ifdef PIC_CASE_VECTOR_ADDRESS
10441 if (flag_pic && GET_CODE (index) != REG)
10442 index = copy_to_mode_reg (Pmode, index);
10443 #endif
10444
10445 /* If flag_force_addr were to affect this address
10446 it could interfere with the tricky assumptions made
10447 about addresses that contain label-refs,
10448 which may be valid only very near the tablejump itself. */
10449 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10450 GET_MODE_SIZE, because this indicates how large insns are. The other
10451 uses should all be Pmode, because they are addresses. This code
10452 could fail if addresses and insns are not the same size. */
10453 index = gen_rtx_PLUS (Pmode,
10454 gen_rtx_MULT (Pmode, index,
10455 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10456 gen_rtx_LABEL_REF (Pmode, table_label));
10457 #ifdef PIC_CASE_VECTOR_ADDRESS
10458 if (flag_pic)
10459 index = PIC_CASE_VECTOR_ADDRESS (index);
10460 else
10461 #endif
10462 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10463 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10464 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10465 RTX_UNCHANGING_P (vector) = 1;
10466 convert_move (temp, vector, 0);
10467
10468 emit_jump_insn (gen_tablejump (temp, table_label));
10469
10470 /* If we are generating PIC code or if the table is PC-relative, the
10471 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10472 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10473 emit_barrier ();
10474 }
10475
10476 int
10477 try_tablejump (index_type, index_expr, minval, range,
10478 table_label, default_label)
10479 tree index_type, index_expr, minval, range;
10480 rtx table_label, default_label;
10481 {
10482 rtx index;
10483
10484 if (! HAVE_tablejump)
10485 return 0;
10486
10487 index_expr = fold (build (MINUS_EXPR, index_type,
10488 convert (index_type, index_expr),
10489 convert (index_type, minval)));
10490 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10491 emit_queue ();
10492 index = protect_from_queue (index, 0);
10493 do_pending_stack_adjust ();
10494
10495 do_tablejump (index, TYPE_MODE (index_type),
10496 convert_modes (TYPE_MODE (index_type),
10497 TYPE_MODE (TREE_TYPE (range)),
10498 expand_expr (range, NULL_RTX,
10499 VOIDmode, 0),
10500 TREE_UNSIGNED (TREE_TYPE (range))),
10501 table_label, default_label);
10502 return 1;
10503 }