5884217e90ec1da9ebb9fda105b962d0a5bcda6c
[gcc.git] / gcc / expr.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "obstack.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
36 #include "expr.h"
37 #include "optabs.h"
38 #include "libfuncs.h"
39 #include "recog.h"
40 #include "reload.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "langhooks.h"
46 #include "intl.h"
47 #include "tm_p.h"
48
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
51
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
54
55 #ifdef PUSH_ROUNDING
56
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first. */
59 #endif
60
61 #endif
62
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
66 #else
67 #define STACK_PUSH_CODE PRE_INC
68 #endif
69 #endif
70
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
74 #endif
75
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
82 int cse_not_expected;
83
84 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
85 static tree placeholder_list = 0;
86
87 /* This structure is used by move_by_pieces to describe the move to
88 be performed. */
89 struct move_by_pieces
90 {
91 rtx to;
92 rtx to_addr;
93 int autinc_to;
94 int explicit_inc_to;
95 rtx from;
96 rtx from_addr;
97 int autinc_from;
98 int explicit_inc_from;
99 unsigned HOST_WIDE_INT len;
100 HOST_WIDE_INT offset;
101 int reverse;
102 };
103
104 /* This structure is used by store_by_pieces to describe the clear to
105 be performed. */
106
107 struct store_by_pieces
108 {
109 rtx to;
110 rtx to_addr;
111 int autinc_to;
112 int explicit_inc_to;
113 unsigned HOST_WIDE_INT len;
114 HOST_WIDE_INT offset;
115 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
116 PTR constfundata;
117 int reverse;
118 };
119
120 extern struct obstack permanent_obstack;
121
122 static rtx enqueue_insn PARAMS ((rtx, rtx));
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
124 PARAMS ((unsigned HOST_WIDE_INT,
125 unsigned int));
126 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *));
128 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
129 enum machine_mode));
130 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
131 unsigned int));
132 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
133 unsigned int));
134 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
135 enum machine_mode,
136 struct store_by_pieces *));
137 static rtx get_subtarget PARAMS ((rtx));
138 static int is_zeros_p PARAMS ((tree));
139 static int mostly_zeros_p PARAMS ((tree));
140 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
142 tree, tree, int, int));
143 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
144 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
145 HOST_WIDE_INT, enum machine_mode,
146 tree, enum machine_mode, int, tree,
147 int));
148 static rtx var_rtx PARAMS ((tree));
149 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
150 static rtx expand_increment PARAMS ((tree, int, int));
151 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
152 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
153 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
154 rtx, rtx));
155 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
156 #ifdef PUSH_ROUNDING
157 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
158 #endif
159 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
160
161 /* Record for each mode whether we can move a register directly to or
162 from an object of that mode in memory. If we can't, we won't try
163 to use that mode directly when accessing a field of that mode. */
164
165 static char direct_load[NUM_MACHINE_MODES];
166 static char direct_store[NUM_MACHINE_MODES];
167
168 /* If a memory-to-memory move would take MOVE_RATIO or more simple
169 move-instruction sequences, we will do a movstr or libcall instead. */
170
171 #ifndef MOVE_RATIO
172 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
173 #define MOVE_RATIO 2
174 #else
175 /* If we are optimizing for space (-Os), cut down the default move ratio. */
176 #define MOVE_RATIO (optimize_size ? 3 : 15)
177 #endif
178 #endif
179
180 /* This macro is used to determine whether move_by_pieces should be called
181 to perform a structure copy. */
182 #ifndef MOVE_BY_PIECES_P
183 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
185 #endif
186
187 /* This array records the insn_code of insns to perform block moves. */
188 enum insn_code movstr_optab[NUM_MACHINE_MODES];
189
190 /* This array records the insn_code of insns to perform block clears. */
191 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
192
193 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
194
195 #ifndef SLOW_UNALIGNED_ACCESS
196 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
197 #endif
198 \f
199 /* This is run once per compilation to set up which modes can be used
200 directly in memory and to initialize the block move optab. */
201
202 void
203 init_expr_once ()
204 {
205 rtx insn, pat;
206 enum machine_mode mode;
207 int num_clobbers;
208 rtx mem, mem1;
209
210 start_sequence ();
211
212 /* Try indexing by frame ptr and try by stack ptr.
213 It is known that on the Convex the stack ptr isn't a valid index.
214 With luck, one or the other is valid on any machine. */
215 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
216 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
217
218 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
219 pat = PATTERN (insn);
220
221 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
222 mode = (enum machine_mode) ((int) mode + 1))
223 {
224 int regno;
225 rtx reg;
226
227 direct_load[(int) mode] = direct_store[(int) mode] = 0;
228 PUT_MODE (mem, mode);
229 PUT_MODE (mem1, mode);
230
231 /* See if there is some register that can be used in this mode and
232 directly loaded or stored from memory. */
233
234 if (mode != VOIDmode && mode != BLKmode)
235 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
236 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
237 regno++)
238 {
239 if (! HARD_REGNO_MODE_OK (regno, mode))
240 continue;
241
242 reg = gen_rtx_REG (mode, regno);
243
244 SET_SRC (pat) = mem;
245 SET_DEST (pat) = reg;
246 if (recog (pat, insn, &num_clobbers) >= 0)
247 direct_load[(int) mode] = 1;
248
249 SET_SRC (pat) = mem1;
250 SET_DEST (pat) = reg;
251 if (recog (pat, insn, &num_clobbers) >= 0)
252 direct_load[(int) mode] = 1;
253
254 SET_SRC (pat) = reg;
255 SET_DEST (pat) = mem;
256 if (recog (pat, insn, &num_clobbers) >= 0)
257 direct_store[(int) mode] = 1;
258
259 SET_SRC (pat) = reg;
260 SET_DEST (pat) = mem1;
261 if (recog (pat, insn, &num_clobbers) >= 0)
262 direct_store[(int) mode] = 1;
263 }
264 }
265
266 end_sequence ();
267 }
268
269 /* This is run at the start of compiling a function. */
270
271 void
272 init_expr ()
273 {
274 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
275
276 pending_chain = 0;
277 pending_stack_adjust = 0;
278 stack_pointer_delta = 0;
279 inhibit_defer_pop = 0;
280 saveregs_value = 0;
281 apply_args_value = 0;
282 forced_labels = 0;
283 }
284
285 void
286 mark_expr_status (p)
287 struct expr_status *p;
288 {
289 if (p == NULL)
290 return;
291
292 ggc_mark_rtx (p->x_saveregs_value);
293 ggc_mark_rtx (p->x_apply_args_value);
294 ggc_mark_rtx (p->x_forced_labels);
295 }
296
297 void
298 free_expr_status (f)
299 struct function *f;
300 {
301 free (f->expr);
302 f->expr = NULL;
303 }
304
305 /* Small sanity check that the queue is empty at the end of a function. */
306
307 void
308 finish_expr_for_function ()
309 {
310 if (pending_chain)
311 abort ();
312 }
313 \f
314 /* Manage the queue of increment instructions to be output
315 for POSTINCREMENT_EXPR expressions, etc. */
316
317 /* Queue up to increment (or change) VAR later. BODY says how:
318 BODY should be the same thing you would pass to emit_insn
319 to increment right away. It will go to emit_insn later on.
320
321 The value is a QUEUED expression to be used in place of VAR
322 where you want to guarantee the pre-incrementation value of VAR. */
323
324 static rtx
325 enqueue_insn (var, body)
326 rtx var, body;
327 {
328 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
329 body, pending_chain);
330 return pending_chain;
331 }
332
333 /* Use protect_from_queue to convert a QUEUED expression
334 into something that you can put immediately into an instruction.
335 If the queued incrementation has not happened yet,
336 protect_from_queue returns the variable itself.
337 If the incrementation has happened, protect_from_queue returns a temp
338 that contains a copy of the old value of the variable.
339
340 Any time an rtx which might possibly be a QUEUED is to be put
341 into an instruction, it must be passed through protect_from_queue first.
342 QUEUED expressions are not meaningful in instructions.
343
344 Do not pass a value through protect_from_queue and then hold
345 on to it for a while before putting it in an instruction!
346 If the queue is flushed in between, incorrect code will result. */
347
348 rtx
349 protect_from_queue (x, modify)
350 rtx x;
351 int modify;
352 {
353 RTX_CODE code = GET_CODE (x);
354
355 #if 0 /* A QUEUED can hang around after the queue is forced out. */
356 /* Shortcut for most common case. */
357 if (pending_chain == 0)
358 return x;
359 #endif
360
361 if (code != QUEUED)
362 {
363 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
364 use of autoincrement. Make a copy of the contents of the memory
365 location rather than a copy of the address, but not if the value is
366 of mode BLKmode. Don't modify X in place since it might be
367 shared. */
368 if (code == MEM && GET_MODE (x) != BLKmode
369 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
370 {
371 rtx y = XEXP (x, 0);
372 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
373
374 if (QUEUED_INSN (y))
375 {
376 rtx temp = gen_reg_rtx (GET_MODE (x));
377
378 emit_insn_before (gen_move_insn (temp, new),
379 QUEUED_INSN (y));
380 return temp;
381 }
382
383 /* Copy the address into a pseudo, so that the returned value
384 remains correct across calls to emit_queue. */
385 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
386 }
387
388 /* Otherwise, recursively protect the subexpressions of all
389 the kinds of rtx's that can contain a QUEUED. */
390 if (code == MEM)
391 {
392 rtx tem = protect_from_queue (XEXP (x, 0), 0);
393 if (tem != XEXP (x, 0))
394 {
395 x = copy_rtx (x);
396 XEXP (x, 0) = tem;
397 }
398 }
399 else if (code == PLUS || code == MULT)
400 {
401 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
402 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
403 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
404 {
405 x = copy_rtx (x);
406 XEXP (x, 0) = new0;
407 XEXP (x, 1) = new1;
408 }
409 }
410 return x;
411 }
412 /* If the increment has not happened, use the variable itself. Copy it
413 into a new pseudo so that the value remains correct across calls to
414 emit_queue. */
415 if (QUEUED_INSN (x) == 0)
416 return copy_to_reg (QUEUED_VAR (x));
417 /* If the increment has happened and a pre-increment copy exists,
418 use that copy. */
419 if (QUEUED_COPY (x) != 0)
420 return QUEUED_COPY (x);
421 /* The increment has happened but we haven't set up a pre-increment copy.
422 Set one up now, and use it. */
423 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
424 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
425 QUEUED_INSN (x));
426 return QUEUED_COPY (x);
427 }
428
429 /* Return nonzero if X contains a QUEUED expression:
430 if it contains anything that will be altered by a queued increment.
431 We handle only combinations of MEM, PLUS, MINUS and MULT operators
432 since memory addresses generally contain only those. */
433
434 int
435 queued_subexp_p (x)
436 rtx x;
437 {
438 enum rtx_code code = GET_CODE (x);
439 switch (code)
440 {
441 case QUEUED:
442 return 1;
443 case MEM:
444 return queued_subexp_p (XEXP (x, 0));
445 case MULT:
446 case PLUS:
447 case MINUS:
448 return (queued_subexp_p (XEXP (x, 0))
449 || queued_subexp_p (XEXP (x, 1)));
450 default:
451 return 0;
452 }
453 }
454
455 /* Perform all the pending incrementations. */
456
457 void
458 emit_queue ()
459 {
460 rtx p;
461 while ((p = pending_chain))
462 {
463 rtx body = QUEUED_BODY (p);
464
465 if (GET_CODE (body) == SEQUENCE)
466 {
467 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
468 emit_insn (QUEUED_BODY (p));
469 }
470 else
471 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
472 pending_chain = QUEUED_NEXT (p);
473 }
474 }
475 \f
476 /* Copy data from FROM to TO, where the machine modes are not the same.
477 Both modes may be integer, or both may be floating.
478 UNSIGNEDP should be nonzero if FROM is an unsigned type.
479 This causes zero-extension instead of sign-extension. */
480
481 void
482 convert_move (to, from, unsignedp)
483 rtx to, from;
484 int unsignedp;
485 {
486 enum machine_mode to_mode = GET_MODE (to);
487 enum machine_mode from_mode = GET_MODE (from);
488 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
489 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
490 enum insn_code code;
491 rtx libcall;
492
493 /* rtx code for making an equivalent value. */
494 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
495
496 to = protect_from_queue (to, 1);
497 from = protect_from_queue (from, 0);
498
499 if (to_real != from_real)
500 abort ();
501
502 /* If FROM is a SUBREG that indicates that we have already done at least
503 the required extension, strip it. We don't handle such SUBREGs as
504 TO here. */
505
506 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
507 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
508 >= GET_MODE_SIZE (to_mode))
509 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
510 from = gen_lowpart (to_mode, from), from_mode = to_mode;
511
512 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
513 abort ();
514
515 if (to_mode == from_mode
516 || (from_mode == VOIDmode && CONSTANT_P (from)))
517 {
518 emit_move_insn (to, from);
519 return;
520 }
521
522 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
523 {
524 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
525 abort ();
526
527 if (VECTOR_MODE_P (to_mode))
528 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
529 else
530 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
531
532 emit_move_insn (to, from);
533 return;
534 }
535
536 if (to_real != from_real)
537 abort ();
538
539 if (to_real)
540 {
541 rtx value, insns;
542
543 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
544 {
545 /* Try converting directly if the insn is supported. */
546 if ((code = can_extend_p (to_mode, from_mode, 0))
547 != CODE_FOR_nothing)
548 {
549 emit_unop_insn (code, to, from, UNKNOWN);
550 return;
551 }
552 }
553
554 #ifdef HAVE_trunchfqf2
555 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
556 {
557 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
558 return;
559 }
560 #endif
561 #ifdef HAVE_trunctqfqf2
562 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
563 {
564 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
565 return;
566 }
567 #endif
568 #ifdef HAVE_truncsfqf2
569 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
570 {
571 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
572 return;
573 }
574 #endif
575 #ifdef HAVE_truncdfqf2
576 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
577 {
578 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
579 return;
580 }
581 #endif
582 #ifdef HAVE_truncxfqf2
583 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
584 {
585 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
586 return;
587 }
588 #endif
589 #ifdef HAVE_trunctfqf2
590 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
591 {
592 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
593 return;
594 }
595 #endif
596
597 #ifdef HAVE_trunctqfhf2
598 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
599 {
600 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
601 return;
602 }
603 #endif
604 #ifdef HAVE_truncsfhf2
605 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
606 {
607 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
608 return;
609 }
610 #endif
611 #ifdef HAVE_truncdfhf2
612 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
613 {
614 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
615 return;
616 }
617 #endif
618 #ifdef HAVE_truncxfhf2
619 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
620 {
621 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
622 return;
623 }
624 #endif
625 #ifdef HAVE_trunctfhf2
626 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
627 {
628 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
629 return;
630 }
631 #endif
632
633 #ifdef HAVE_truncsftqf2
634 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
635 {
636 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
637 return;
638 }
639 #endif
640 #ifdef HAVE_truncdftqf2
641 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
642 {
643 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
644 return;
645 }
646 #endif
647 #ifdef HAVE_truncxftqf2
648 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
649 {
650 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
651 return;
652 }
653 #endif
654 #ifdef HAVE_trunctftqf2
655 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
656 {
657 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
658 return;
659 }
660 #endif
661
662 #ifdef HAVE_truncdfsf2
663 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
664 {
665 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
666 return;
667 }
668 #endif
669 #ifdef HAVE_truncxfsf2
670 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
671 {
672 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
673 return;
674 }
675 #endif
676 #ifdef HAVE_trunctfsf2
677 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
678 {
679 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
680 return;
681 }
682 #endif
683 #ifdef HAVE_truncxfdf2
684 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
685 {
686 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
687 return;
688 }
689 #endif
690 #ifdef HAVE_trunctfdf2
691 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
692 {
693 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
694 return;
695 }
696 #endif
697
698 libcall = (rtx) 0;
699 switch (from_mode)
700 {
701 case SFmode:
702 switch (to_mode)
703 {
704 case DFmode:
705 libcall = extendsfdf2_libfunc;
706 break;
707
708 case XFmode:
709 libcall = extendsfxf2_libfunc;
710 break;
711
712 case TFmode:
713 libcall = extendsftf2_libfunc;
714 break;
715
716 default:
717 break;
718 }
719 break;
720
721 case DFmode:
722 switch (to_mode)
723 {
724 case SFmode:
725 libcall = truncdfsf2_libfunc;
726 break;
727
728 case XFmode:
729 libcall = extenddfxf2_libfunc;
730 break;
731
732 case TFmode:
733 libcall = extenddftf2_libfunc;
734 break;
735
736 default:
737 break;
738 }
739 break;
740
741 case XFmode:
742 switch (to_mode)
743 {
744 case SFmode:
745 libcall = truncxfsf2_libfunc;
746 break;
747
748 case DFmode:
749 libcall = truncxfdf2_libfunc;
750 break;
751
752 default:
753 break;
754 }
755 break;
756
757 case TFmode:
758 switch (to_mode)
759 {
760 case SFmode:
761 libcall = trunctfsf2_libfunc;
762 break;
763
764 case DFmode:
765 libcall = trunctfdf2_libfunc;
766 break;
767
768 default:
769 break;
770 }
771 break;
772
773 default:
774 break;
775 }
776
777 if (libcall == (rtx) 0)
778 /* This conversion is not implemented yet. */
779 abort ();
780
781 start_sequence ();
782 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
783 1, from, from_mode);
784 insns = get_insns ();
785 end_sequence ();
786 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
787 from));
788 return;
789 }
790
791 /* Now both modes are integers. */
792
793 /* Handle expanding beyond a word. */
794 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
795 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
796 {
797 rtx insns;
798 rtx lowpart;
799 rtx fill_value;
800 rtx lowfrom;
801 int i;
802 enum machine_mode lowpart_mode;
803 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
804
805 /* Try converting directly if the insn is supported. */
806 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
807 != CODE_FOR_nothing)
808 {
809 /* If FROM is a SUBREG, put it into a register. Do this
810 so that we always generate the same set of insns for
811 better cse'ing; if an intermediate assignment occurred,
812 we won't be doing the operation directly on the SUBREG. */
813 if (optimize > 0 && GET_CODE (from) == SUBREG)
814 from = force_reg (from_mode, from);
815 emit_unop_insn (code, to, from, equiv_code);
816 return;
817 }
818 /* Next, try converting via full word. */
819 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
820 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
821 != CODE_FOR_nothing))
822 {
823 if (GET_CODE (to) == REG)
824 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
825 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
826 emit_unop_insn (code, to,
827 gen_lowpart (word_mode, to), equiv_code);
828 return;
829 }
830
831 /* No special multiword conversion insn; do it by hand. */
832 start_sequence ();
833
834 /* Since we will turn this into a no conflict block, we must ensure
835 that the source does not overlap the target. */
836
837 if (reg_overlap_mentioned_p (to, from))
838 from = force_reg (from_mode, from);
839
840 /* Get a copy of FROM widened to a word, if necessary. */
841 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
842 lowpart_mode = word_mode;
843 else
844 lowpart_mode = from_mode;
845
846 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
847
848 lowpart = gen_lowpart (lowpart_mode, to);
849 emit_move_insn (lowpart, lowfrom);
850
851 /* Compute the value to put in each remaining word. */
852 if (unsignedp)
853 fill_value = const0_rtx;
854 else
855 {
856 #ifdef HAVE_slt
857 if (HAVE_slt
858 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
859 && STORE_FLAG_VALUE == -1)
860 {
861 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
862 lowpart_mode, 0);
863 fill_value = gen_reg_rtx (word_mode);
864 emit_insn (gen_slt (fill_value));
865 }
866 else
867 #endif
868 {
869 fill_value
870 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
871 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
872 NULL_RTX, 0);
873 fill_value = convert_to_mode (word_mode, fill_value, 1);
874 }
875 }
876
877 /* Fill the remaining words. */
878 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
879 {
880 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
881 rtx subword = operand_subword (to, index, 1, to_mode);
882
883 if (subword == 0)
884 abort ();
885
886 if (fill_value != subword)
887 emit_move_insn (subword, fill_value);
888 }
889
890 insns = get_insns ();
891 end_sequence ();
892
893 emit_no_conflict_block (insns, to, from, NULL_RTX,
894 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
895 return;
896 }
897
898 /* Truncating multi-word to a word or less. */
899 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
900 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
901 {
902 if (!((GET_CODE (from) == MEM
903 && ! MEM_VOLATILE_P (from)
904 && direct_load[(int) to_mode]
905 && ! mode_dependent_address_p (XEXP (from, 0)))
906 || GET_CODE (from) == REG
907 || GET_CODE (from) == SUBREG))
908 from = force_reg (from_mode, from);
909 convert_move (to, gen_lowpart (word_mode, from), 0);
910 return;
911 }
912
913 /* Handle pointer conversion. */ /* SPEE 900220. */
914 if (to_mode == PQImode)
915 {
916 if (from_mode != QImode)
917 from = convert_to_mode (QImode, from, unsignedp);
918
919 #ifdef HAVE_truncqipqi2
920 if (HAVE_truncqipqi2)
921 {
922 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
923 return;
924 }
925 #endif /* HAVE_truncqipqi2 */
926 abort ();
927 }
928
929 if (from_mode == PQImode)
930 {
931 if (to_mode != QImode)
932 {
933 from = convert_to_mode (QImode, from, unsignedp);
934 from_mode = QImode;
935 }
936 else
937 {
938 #ifdef HAVE_extendpqiqi2
939 if (HAVE_extendpqiqi2)
940 {
941 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
942 return;
943 }
944 #endif /* HAVE_extendpqiqi2 */
945 abort ();
946 }
947 }
948
949 if (to_mode == PSImode)
950 {
951 if (from_mode != SImode)
952 from = convert_to_mode (SImode, from, unsignedp);
953
954 #ifdef HAVE_truncsipsi2
955 if (HAVE_truncsipsi2)
956 {
957 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
958 return;
959 }
960 #endif /* HAVE_truncsipsi2 */
961 abort ();
962 }
963
964 if (from_mode == PSImode)
965 {
966 if (to_mode != SImode)
967 {
968 from = convert_to_mode (SImode, from, unsignedp);
969 from_mode = SImode;
970 }
971 else
972 {
973 #ifdef HAVE_extendpsisi2
974 if (! unsignedp && HAVE_extendpsisi2)
975 {
976 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
977 return;
978 }
979 #endif /* HAVE_extendpsisi2 */
980 #ifdef HAVE_zero_extendpsisi2
981 if (unsignedp && HAVE_zero_extendpsisi2)
982 {
983 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
984 return;
985 }
986 #endif /* HAVE_zero_extendpsisi2 */
987 abort ();
988 }
989 }
990
991 if (to_mode == PDImode)
992 {
993 if (from_mode != DImode)
994 from = convert_to_mode (DImode, from, unsignedp);
995
996 #ifdef HAVE_truncdipdi2
997 if (HAVE_truncdipdi2)
998 {
999 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1000 return;
1001 }
1002 #endif /* HAVE_truncdipdi2 */
1003 abort ();
1004 }
1005
1006 if (from_mode == PDImode)
1007 {
1008 if (to_mode != DImode)
1009 {
1010 from = convert_to_mode (DImode, from, unsignedp);
1011 from_mode = DImode;
1012 }
1013 else
1014 {
1015 #ifdef HAVE_extendpdidi2
1016 if (HAVE_extendpdidi2)
1017 {
1018 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1019 return;
1020 }
1021 #endif /* HAVE_extendpdidi2 */
1022 abort ();
1023 }
1024 }
1025
1026 /* Now follow all the conversions between integers
1027 no more than a word long. */
1028
1029 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1030 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1031 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1032 GET_MODE_BITSIZE (from_mode)))
1033 {
1034 if (!((GET_CODE (from) == MEM
1035 && ! MEM_VOLATILE_P (from)
1036 && direct_load[(int) to_mode]
1037 && ! mode_dependent_address_p (XEXP (from, 0)))
1038 || GET_CODE (from) == REG
1039 || GET_CODE (from) == SUBREG))
1040 from = force_reg (from_mode, from);
1041 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1042 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1043 from = copy_to_reg (from);
1044 emit_move_insn (to, gen_lowpart (to_mode, from));
1045 return;
1046 }
1047
1048 /* Handle extension. */
1049 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1050 {
1051 /* Convert directly if that works. */
1052 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1053 != CODE_FOR_nothing)
1054 {
1055 emit_unop_insn (code, to, from, equiv_code);
1056 return;
1057 }
1058 else
1059 {
1060 enum machine_mode intermediate;
1061 rtx tmp;
1062 tree shift_amount;
1063
1064 /* Search for a mode to convert via. */
1065 for (intermediate = from_mode; intermediate != VOIDmode;
1066 intermediate = GET_MODE_WIDER_MODE (intermediate))
1067 if (((can_extend_p (to_mode, intermediate, unsignedp)
1068 != CODE_FOR_nothing)
1069 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1070 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1071 GET_MODE_BITSIZE (intermediate))))
1072 && (can_extend_p (intermediate, from_mode, unsignedp)
1073 != CODE_FOR_nothing))
1074 {
1075 convert_move (to, convert_to_mode (intermediate, from,
1076 unsignedp), unsignedp);
1077 return;
1078 }
1079
1080 /* No suitable intermediate mode.
1081 Generate what we need with shifts. */
1082 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1083 - GET_MODE_BITSIZE (from_mode), 0);
1084 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1085 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1086 to, unsignedp);
1087 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1088 to, unsignedp);
1089 if (tmp != to)
1090 emit_move_insn (to, tmp);
1091 return;
1092 }
1093 }
1094
1095 /* Support special truncate insns for certain modes. */
1096
1097 if (from_mode == DImode && to_mode == SImode)
1098 {
1099 #ifdef HAVE_truncdisi2
1100 if (HAVE_truncdisi2)
1101 {
1102 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1103 return;
1104 }
1105 #endif
1106 convert_move (to, force_reg (from_mode, from), unsignedp);
1107 return;
1108 }
1109
1110 if (from_mode == DImode && to_mode == HImode)
1111 {
1112 #ifdef HAVE_truncdihi2
1113 if (HAVE_truncdihi2)
1114 {
1115 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1116 return;
1117 }
1118 #endif
1119 convert_move (to, force_reg (from_mode, from), unsignedp);
1120 return;
1121 }
1122
1123 if (from_mode == DImode && to_mode == QImode)
1124 {
1125 #ifdef HAVE_truncdiqi2
1126 if (HAVE_truncdiqi2)
1127 {
1128 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1129 return;
1130 }
1131 #endif
1132 convert_move (to, force_reg (from_mode, from), unsignedp);
1133 return;
1134 }
1135
1136 if (from_mode == SImode && to_mode == HImode)
1137 {
1138 #ifdef HAVE_truncsihi2
1139 if (HAVE_truncsihi2)
1140 {
1141 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1142 return;
1143 }
1144 #endif
1145 convert_move (to, force_reg (from_mode, from), unsignedp);
1146 return;
1147 }
1148
1149 if (from_mode == SImode && to_mode == QImode)
1150 {
1151 #ifdef HAVE_truncsiqi2
1152 if (HAVE_truncsiqi2)
1153 {
1154 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1155 return;
1156 }
1157 #endif
1158 convert_move (to, force_reg (from_mode, from), unsignedp);
1159 return;
1160 }
1161
1162 if (from_mode == HImode && to_mode == QImode)
1163 {
1164 #ifdef HAVE_trunchiqi2
1165 if (HAVE_trunchiqi2)
1166 {
1167 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1168 return;
1169 }
1170 #endif
1171 convert_move (to, force_reg (from_mode, from), unsignedp);
1172 return;
1173 }
1174
1175 if (from_mode == TImode && to_mode == DImode)
1176 {
1177 #ifdef HAVE_trunctidi2
1178 if (HAVE_trunctidi2)
1179 {
1180 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1181 return;
1182 }
1183 #endif
1184 convert_move (to, force_reg (from_mode, from), unsignedp);
1185 return;
1186 }
1187
1188 if (from_mode == TImode && to_mode == SImode)
1189 {
1190 #ifdef HAVE_trunctisi2
1191 if (HAVE_trunctisi2)
1192 {
1193 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1194 return;
1195 }
1196 #endif
1197 convert_move (to, force_reg (from_mode, from), unsignedp);
1198 return;
1199 }
1200
1201 if (from_mode == TImode && to_mode == HImode)
1202 {
1203 #ifdef HAVE_trunctihi2
1204 if (HAVE_trunctihi2)
1205 {
1206 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1207 return;
1208 }
1209 #endif
1210 convert_move (to, force_reg (from_mode, from), unsignedp);
1211 return;
1212 }
1213
1214 if (from_mode == TImode && to_mode == QImode)
1215 {
1216 #ifdef HAVE_trunctiqi2
1217 if (HAVE_trunctiqi2)
1218 {
1219 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1220 return;
1221 }
1222 #endif
1223 convert_move (to, force_reg (from_mode, from), unsignedp);
1224 return;
1225 }
1226
1227 /* Handle truncation of volatile memrefs, and so on;
1228 the things that couldn't be truncated directly,
1229 and for which there was no special instruction. */
1230 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1231 {
1232 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1233 emit_move_insn (to, temp);
1234 return;
1235 }
1236
1237 /* Mode combination is not recognized. */
1238 abort ();
1239 }
1240
1241 /* Return an rtx for a value that would result
1242 from converting X to mode MODE.
1243 Both X and MODE may be floating, or both integer.
1244 UNSIGNEDP is nonzero if X is an unsigned value.
1245 This can be done by referring to a part of X in place
1246 or by copying to a new temporary with conversion.
1247
1248 This function *must not* call protect_from_queue
1249 except when putting X into an insn (in which case convert_move does it). */
1250
1251 rtx
1252 convert_to_mode (mode, x, unsignedp)
1253 enum machine_mode mode;
1254 rtx x;
1255 int unsignedp;
1256 {
1257 return convert_modes (mode, VOIDmode, x, unsignedp);
1258 }
1259
1260 /* Return an rtx for a value that would result
1261 from converting X from mode OLDMODE to mode MODE.
1262 Both modes may be floating, or both integer.
1263 UNSIGNEDP is nonzero if X is an unsigned value.
1264
1265 This can be done by referring to a part of X in place
1266 or by copying to a new temporary with conversion.
1267
1268 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1269
1270 This function *must not* call protect_from_queue
1271 except when putting X into an insn (in which case convert_move does it). */
1272
1273 rtx
1274 convert_modes (mode, oldmode, x, unsignedp)
1275 enum machine_mode mode, oldmode;
1276 rtx x;
1277 int unsignedp;
1278 {
1279 rtx temp;
1280
1281 /* If FROM is a SUBREG that indicates that we have already done at least
1282 the required extension, strip it. */
1283
1284 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1285 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1286 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1287 x = gen_lowpart (mode, x);
1288
1289 if (GET_MODE (x) != VOIDmode)
1290 oldmode = GET_MODE (x);
1291
1292 if (mode == oldmode)
1293 return x;
1294
1295 /* There is one case that we must handle specially: If we are converting
1296 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1297 we are to interpret the constant as unsigned, gen_lowpart will do
1298 the wrong if the constant appears negative. What we want to do is
1299 make the high-order word of the constant zero, not all ones. */
1300
1301 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1302 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1303 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1304 {
1305 HOST_WIDE_INT val = INTVAL (x);
1306
1307 if (oldmode != VOIDmode
1308 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1309 {
1310 int width = GET_MODE_BITSIZE (oldmode);
1311
1312 /* We need to zero extend VAL. */
1313 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1314 }
1315
1316 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1317 }
1318
1319 /* We can do this with a gen_lowpart if both desired and current modes
1320 are integer, and this is either a constant integer, a register, or a
1321 non-volatile MEM. Except for the constant case where MODE is no
1322 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1323
1324 if ((GET_CODE (x) == CONST_INT
1325 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1326 || (GET_MODE_CLASS (mode) == MODE_INT
1327 && GET_MODE_CLASS (oldmode) == MODE_INT
1328 && (GET_CODE (x) == CONST_DOUBLE
1329 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1330 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1331 && direct_load[(int) mode])
1332 || (GET_CODE (x) == REG
1333 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1334 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1335 {
1336 /* ?? If we don't know OLDMODE, we have to assume here that
1337 X does not need sign- or zero-extension. This may not be
1338 the case, but it's the best we can do. */
1339 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1340 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1341 {
1342 HOST_WIDE_INT val = INTVAL (x);
1343 int width = GET_MODE_BITSIZE (oldmode);
1344
1345 /* We must sign or zero-extend in this case. Start by
1346 zero-extending, then sign extend if we need to. */
1347 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1348 if (! unsignedp
1349 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1350 val |= (HOST_WIDE_INT) (-1) << width;
1351
1352 return GEN_INT (trunc_int_for_mode (val, mode));
1353 }
1354
1355 return gen_lowpart (mode, x);
1356 }
1357
1358 temp = gen_reg_rtx (mode);
1359 convert_move (temp, x, unsignedp);
1360 return temp;
1361 }
1362 \f
1363 /* This macro is used to determine what the largest unit size that
1364 move_by_pieces can use is. */
1365
1366 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1367 move efficiently, as opposed to MOVE_MAX which is the maximum
1368 number of bytes we can move with a single instruction. */
1369
1370 #ifndef MOVE_MAX_PIECES
1371 #define MOVE_MAX_PIECES MOVE_MAX
1372 #endif
1373
1374 /* Generate several move instructions to copy LEN bytes from block FROM to
1375 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1376 and TO through protect_from_queue before calling.
1377
1378 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1379 used to push FROM to the stack.
1380
1381 ALIGN is maximum alignment we can assume. */
1382
1383 void
1384 move_by_pieces (to, from, len, align)
1385 rtx to, from;
1386 unsigned HOST_WIDE_INT len;
1387 unsigned int align;
1388 {
1389 struct move_by_pieces data;
1390 rtx to_addr, from_addr = XEXP (from, 0);
1391 unsigned int max_size = MOVE_MAX_PIECES + 1;
1392 enum machine_mode mode = VOIDmode, tmode;
1393 enum insn_code icode;
1394
1395 data.offset = 0;
1396 data.from_addr = from_addr;
1397 if (to)
1398 {
1399 to_addr = XEXP (to, 0);
1400 data.to = to;
1401 data.autinc_to
1402 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1403 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1404 data.reverse
1405 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1406 }
1407 else
1408 {
1409 to_addr = NULL_RTX;
1410 data.to = NULL_RTX;
1411 data.autinc_to = 1;
1412 #ifdef STACK_GROWS_DOWNWARD
1413 data.reverse = 1;
1414 #else
1415 data.reverse = 0;
1416 #endif
1417 }
1418 data.to_addr = to_addr;
1419 data.from = from;
1420 data.autinc_from
1421 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1422 || GET_CODE (from_addr) == POST_INC
1423 || GET_CODE (from_addr) == POST_DEC);
1424
1425 data.explicit_inc_from = 0;
1426 data.explicit_inc_to = 0;
1427 if (data.reverse) data.offset = len;
1428 data.len = len;
1429
1430 /* If copying requires more than two move insns,
1431 copy addresses to registers (to make displacements shorter)
1432 and use post-increment if available. */
1433 if (!(data.autinc_from && data.autinc_to)
1434 && move_by_pieces_ninsns (len, align) > 2)
1435 {
1436 /* Find the mode of the largest move... */
1437 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1438 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1439 if (GET_MODE_SIZE (tmode) < max_size)
1440 mode = tmode;
1441
1442 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1443 {
1444 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1445 data.autinc_from = 1;
1446 data.explicit_inc_from = -1;
1447 }
1448 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1449 {
1450 data.from_addr = copy_addr_to_reg (from_addr);
1451 data.autinc_from = 1;
1452 data.explicit_inc_from = 1;
1453 }
1454 if (!data.autinc_from && CONSTANT_P (from_addr))
1455 data.from_addr = copy_addr_to_reg (from_addr);
1456 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1457 {
1458 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1459 data.autinc_to = 1;
1460 data.explicit_inc_to = -1;
1461 }
1462 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1463 {
1464 data.to_addr = copy_addr_to_reg (to_addr);
1465 data.autinc_to = 1;
1466 data.explicit_inc_to = 1;
1467 }
1468 if (!data.autinc_to && CONSTANT_P (to_addr))
1469 data.to_addr = copy_addr_to_reg (to_addr);
1470 }
1471
1472 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1473 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1474 align = MOVE_MAX * BITS_PER_UNIT;
1475
1476 /* First move what we can in the largest integer mode, then go to
1477 successively smaller modes. */
1478
1479 while (max_size > 1)
1480 {
1481 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1482 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1483 if (GET_MODE_SIZE (tmode) < max_size)
1484 mode = tmode;
1485
1486 if (mode == VOIDmode)
1487 break;
1488
1489 icode = mov_optab->handlers[(int) mode].insn_code;
1490 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1491 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1492
1493 max_size = GET_MODE_SIZE (mode);
1494 }
1495
1496 /* The code above should have handled everything. */
1497 if (data.len > 0)
1498 abort ();
1499 }
1500
1501 /* Return number of insns required to move L bytes by pieces.
1502 ALIGN (in bits) is maximum alignment we can assume. */
1503
1504 static unsigned HOST_WIDE_INT
1505 move_by_pieces_ninsns (l, align)
1506 unsigned HOST_WIDE_INT l;
1507 unsigned int align;
1508 {
1509 unsigned HOST_WIDE_INT n_insns = 0;
1510 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1511
1512 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1513 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1514 align = MOVE_MAX * BITS_PER_UNIT;
1515
1516 while (max_size > 1)
1517 {
1518 enum machine_mode mode = VOIDmode, tmode;
1519 enum insn_code icode;
1520
1521 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1522 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1523 if (GET_MODE_SIZE (tmode) < max_size)
1524 mode = tmode;
1525
1526 if (mode == VOIDmode)
1527 break;
1528
1529 icode = mov_optab->handlers[(int) mode].insn_code;
1530 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1531 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1532
1533 max_size = GET_MODE_SIZE (mode);
1534 }
1535
1536 if (l)
1537 abort ();
1538 return n_insns;
1539 }
1540
1541 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1542 with move instructions for mode MODE. GENFUN is the gen_... function
1543 to make a move insn for that mode. DATA has all the other info. */
1544
1545 static void
1546 move_by_pieces_1 (genfun, mode, data)
1547 rtx (*genfun) PARAMS ((rtx, ...));
1548 enum machine_mode mode;
1549 struct move_by_pieces *data;
1550 {
1551 unsigned int size = GET_MODE_SIZE (mode);
1552 rtx to1 = NULL_RTX, from1;
1553
1554 while (data->len >= size)
1555 {
1556 if (data->reverse)
1557 data->offset -= size;
1558
1559 if (data->to)
1560 {
1561 if (data->autinc_to)
1562 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1563 data->offset);
1564 else
1565 to1 = adjust_address (data->to, mode, data->offset);
1566 }
1567
1568 if (data->autinc_from)
1569 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1570 data->offset);
1571 else
1572 from1 = adjust_address (data->from, mode, data->offset);
1573
1574 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1575 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1576 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1577 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1578
1579 if (data->to)
1580 emit_insn ((*genfun) (to1, from1));
1581 else
1582 {
1583 #ifdef PUSH_ROUNDING
1584 emit_single_push_insn (mode, from1, NULL);
1585 #else
1586 abort ();
1587 #endif
1588 }
1589
1590 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1591 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1592 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1593 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1594
1595 if (! data->reverse)
1596 data->offset += size;
1597
1598 data->len -= size;
1599 }
1600 }
1601 \f
1602 /* Emit code to move a block Y to a block X.
1603 This may be done with string-move instructions,
1604 with multiple scalar move instructions, or with a library call.
1605
1606 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1607 with mode BLKmode.
1608 SIZE is an rtx that says how long they are.
1609 ALIGN is the maximum alignment we can assume they have.
1610
1611 Return the address of the new block, if memcpy is called and returns it,
1612 0 otherwise. */
1613
1614 rtx
1615 emit_block_move (x, y, size)
1616 rtx x, y;
1617 rtx size;
1618 {
1619 rtx retval = 0;
1620 #ifdef TARGET_MEM_FUNCTIONS
1621 static tree fn;
1622 tree call_expr, arg_list;
1623 #endif
1624 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1625
1626 if (GET_MODE (x) != BLKmode)
1627 abort ();
1628
1629 if (GET_MODE (y) != BLKmode)
1630 abort ();
1631
1632 x = protect_from_queue (x, 1);
1633 y = protect_from_queue (y, 0);
1634 size = protect_from_queue (size, 0);
1635
1636 if (GET_CODE (x) != MEM)
1637 abort ();
1638 if (GET_CODE (y) != MEM)
1639 abort ();
1640 if (size == 0)
1641 abort ();
1642
1643 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1644 move_by_pieces (x, y, INTVAL (size), align);
1645 else
1646 {
1647 /* Try the most limited insn first, because there's no point
1648 including more than one in the machine description unless
1649 the more limited one has some advantage. */
1650
1651 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1652 enum machine_mode mode;
1653
1654 /* Since this is a move insn, we don't care about volatility. */
1655 volatile_ok = 1;
1656
1657 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1658 mode = GET_MODE_WIDER_MODE (mode))
1659 {
1660 enum insn_code code = movstr_optab[(int) mode];
1661 insn_operand_predicate_fn pred;
1662
1663 if (code != CODE_FOR_nothing
1664 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1665 here because if SIZE is less than the mode mask, as it is
1666 returned by the macro, it will definitely be less than the
1667 actual mode mask. */
1668 && ((GET_CODE (size) == CONST_INT
1669 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1670 <= (GET_MODE_MASK (mode) >> 1)))
1671 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1672 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1673 || (*pred) (x, BLKmode))
1674 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1675 || (*pred) (y, BLKmode))
1676 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1677 || (*pred) (opalign, VOIDmode)))
1678 {
1679 rtx op2;
1680 rtx last = get_last_insn ();
1681 rtx pat;
1682
1683 op2 = convert_to_mode (mode, size, 1);
1684 pred = insn_data[(int) code].operand[2].predicate;
1685 if (pred != 0 && ! (*pred) (op2, mode))
1686 op2 = copy_to_mode_reg (mode, op2);
1687
1688 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1689 if (pat)
1690 {
1691 emit_insn (pat);
1692 volatile_ok = 0;
1693 return 0;
1694 }
1695 else
1696 delete_insns_since (last);
1697 }
1698 }
1699
1700 volatile_ok = 0;
1701
1702 /* X, Y, or SIZE may have been passed through protect_from_queue.
1703
1704 It is unsafe to save the value generated by protect_from_queue
1705 and reuse it later. Consider what happens if emit_queue is
1706 called before the return value from protect_from_queue is used.
1707
1708 Expansion of the CALL_EXPR below will call emit_queue before
1709 we are finished emitting RTL for argument setup. So if we are
1710 not careful we could get the wrong value for an argument.
1711
1712 To avoid this problem we go ahead and emit code to copy X, Y &
1713 SIZE into new pseudos. We can then place those new pseudos
1714 into an RTL_EXPR and use them later, even after a call to
1715 emit_queue.
1716
1717 Note this is not strictly needed for library calls since they
1718 do not call emit_queue before loading their arguments. However,
1719 we may need to have library calls call emit_queue in the future
1720 since failing to do so could cause problems for targets which
1721 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1722 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1723 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1724
1725 #ifdef TARGET_MEM_FUNCTIONS
1726 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1727 #else
1728 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1729 TREE_UNSIGNED (integer_type_node));
1730 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1731 #endif
1732
1733 #ifdef TARGET_MEM_FUNCTIONS
1734 /* It is incorrect to use the libcall calling conventions to call
1735 memcpy in this context.
1736
1737 This could be a user call to memcpy and the user may wish to
1738 examine the return value from memcpy.
1739
1740 For targets where libcalls and normal calls have different conventions
1741 for returning pointers, we could end up generating incorrect code.
1742
1743 So instead of using a libcall sequence we build up a suitable
1744 CALL_EXPR and expand the call in the normal fashion. */
1745 if (fn == NULL_TREE)
1746 {
1747 tree fntype;
1748
1749 /* This was copied from except.c, I don't know if all this is
1750 necessary in this context or not. */
1751 fn = get_identifier ("memcpy");
1752 fntype = build_pointer_type (void_type_node);
1753 fntype = build_function_type (fntype, NULL_TREE);
1754 fn = build_decl (FUNCTION_DECL, fn, fntype);
1755 ggc_add_tree_root (&fn, 1);
1756 DECL_EXTERNAL (fn) = 1;
1757 TREE_PUBLIC (fn) = 1;
1758 DECL_ARTIFICIAL (fn) = 1;
1759 TREE_NOTHROW (fn) = 1;
1760 make_decl_rtl (fn, NULL);
1761 assemble_external (fn);
1762 }
1763
1764 /* We need to make an argument list for the function call.
1765
1766 memcpy has three arguments, the first two are void * addresses and
1767 the last is a size_t byte count for the copy. */
1768 arg_list
1769 = build_tree_list (NULL_TREE,
1770 make_tree (build_pointer_type (void_type_node), x));
1771 TREE_CHAIN (arg_list)
1772 = build_tree_list (NULL_TREE,
1773 make_tree (build_pointer_type (void_type_node), y));
1774 TREE_CHAIN (TREE_CHAIN (arg_list))
1775 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1776 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1777
1778 /* Now we have to build up the CALL_EXPR itself. */
1779 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1780 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1781 call_expr, arg_list, NULL_TREE);
1782 TREE_SIDE_EFFECTS (call_expr) = 1;
1783
1784 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1785 #else
1786 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1787 VOIDmode, 3, y, Pmode, x, Pmode,
1788 convert_to_mode (TYPE_MODE (integer_type_node), size,
1789 TREE_UNSIGNED (integer_type_node)),
1790 TYPE_MODE (integer_type_node));
1791 #endif
1792
1793 /* If we are initializing a readonly value, show the above call
1794 clobbered it. Otherwise, a load from it may erroneously be hoisted
1795 from a loop. */
1796 if (RTX_UNCHANGING_P (x))
1797 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1798 }
1799
1800 return retval;
1801 }
1802 \f
1803 /* Copy all or part of a value X into registers starting at REGNO.
1804 The number of registers to be filled is NREGS. */
1805
1806 void
1807 move_block_to_reg (regno, x, nregs, mode)
1808 int regno;
1809 rtx x;
1810 int nregs;
1811 enum machine_mode mode;
1812 {
1813 int i;
1814 #ifdef HAVE_load_multiple
1815 rtx pat;
1816 rtx last;
1817 #endif
1818
1819 if (nregs == 0)
1820 return;
1821
1822 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1823 x = validize_mem (force_const_mem (mode, x));
1824
1825 /* See if the machine can do this with a load multiple insn. */
1826 #ifdef HAVE_load_multiple
1827 if (HAVE_load_multiple)
1828 {
1829 last = get_last_insn ();
1830 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1831 GEN_INT (nregs));
1832 if (pat)
1833 {
1834 emit_insn (pat);
1835 return;
1836 }
1837 else
1838 delete_insns_since (last);
1839 }
1840 #endif
1841
1842 for (i = 0; i < nregs; i++)
1843 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1844 operand_subword_force (x, i, mode));
1845 }
1846
1847 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1848 The number of registers to be filled is NREGS. SIZE indicates the number
1849 of bytes in the object X. */
1850
1851 void
1852 move_block_from_reg (regno, x, nregs, size)
1853 int regno;
1854 rtx x;
1855 int nregs;
1856 int size;
1857 {
1858 int i;
1859 #ifdef HAVE_store_multiple
1860 rtx pat;
1861 rtx last;
1862 #endif
1863 enum machine_mode mode;
1864
1865 if (nregs == 0)
1866 return;
1867
1868 /* If SIZE is that of a mode no bigger than a word, just use that
1869 mode's store operation. */
1870 if (size <= UNITS_PER_WORD
1871 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
1872 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1873 {
1874 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1875 return;
1876 }
1877
1878 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1879 to the left before storing to memory. Note that the previous test
1880 doesn't handle all cases (e.g. SIZE == 3). */
1881 if (size < UNITS_PER_WORD
1882 && BYTES_BIG_ENDIAN
1883 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1884 {
1885 rtx tem = operand_subword (x, 0, 1, BLKmode);
1886 rtx shift;
1887
1888 if (tem == 0)
1889 abort ();
1890
1891 shift = expand_shift (LSHIFT_EXPR, word_mode,
1892 gen_rtx_REG (word_mode, regno),
1893 build_int_2 ((UNITS_PER_WORD - size)
1894 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1895 emit_move_insn (tem, shift);
1896 return;
1897 }
1898
1899 /* See if the machine can do this with a store multiple insn. */
1900 #ifdef HAVE_store_multiple
1901 if (HAVE_store_multiple)
1902 {
1903 last = get_last_insn ();
1904 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1905 GEN_INT (nregs));
1906 if (pat)
1907 {
1908 emit_insn (pat);
1909 return;
1910 }
1911 else
1912 delete_insns_since (last);
1913 }
1914 #endif
1915
1916 for (i = 0; i < nregs; i++)
1917 {
1918 rtx tem = operand_subword (x, i, 1, BLKmode);
1919
1920 if (tem == 0)
1921 abort ();
1922
1923 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1924 }
1925 }
1926
1927 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1928 registers represented by a PARALLEL. SSIZE represents the total size of
1929 block SRC in bytes, or -1 if not known. */
1930 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
1931 the balance will be in what would be the low-order memory addresses, i.e.
1932 left justified for big endian, right justified for little endian. This
1933 happens to be true for the targets currently using this support. If this
1934 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1935 would be needed. */
1936
1937 void
1938 emit_group_load (dst, orig_src, ssize)
1939 rtx dst, orig_src;
1940 int ssize;
1941 {
1942 rtx *tmps, src;
1943 int start, i;
1944
1945 if (GET_CODE (dst) != PARALLEL)
1946 abort ();
1947
1948 /* Check for a NULL entry, used to indicate that the parameter goes
1949 both on the stack and in registers. */
1950 if (XEXP (XVECEXP (dst, 0, 0), 0))
1951 start = 0;
1952 else
1953 start = 1;
1954
1955 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1956
1957 /* Process the pieces. */
1958 for (i = start; i < XVECLEN (dst, 0); i++)
1959 {
1960 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1961 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1962 unsigned int bytelen = GET_MODE_SIZE (mode);
1963 int shift = 0;
1964
1965 /* Handle trailing fragments that run over the size of the struct. */
1966 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1967 {
1968 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1969 bytelen = ssize - bytepos;
1970 if (bytelen <= 0)
1971 abort ();
1972 }
1973
1974 /* If we won't be loading directly from memory, protect the real source
1975 from strange tricks we might play; but make sure that the source can
1976 be loaded directly into the destination. */
1977 src = orig_src;
1978 if (GET_CODE (orig_src) != MEM
1979 && (!CONSTANT_P (orig_src)
1980 || (GET_MODE (orig_src) != mode
1981 && GET_MODE (orig_src) != VOIDmode)))
1982 {
1983 if (GET_MODE (orig_src) == VOIDmode)
1984 src = gen_reg_rtx (mode);
1985 else
1986 src = gen_reg_rtx (GET_MODE (orig_src));
1987
1988 emit_move_insn (src, orig_src);
1989 }
1990
1991 /* Optimize the access just a bit. */
1992 if (GET_CODE (src) == MEM
1993 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
1994 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1995 && bytelen == GET_MODE_SIZE (mode))
1996 {
1997 tmps[i] = gen_reg_rtx (mode);
1998 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1999 }
2000 else if (GET_CODE (src) == CONCAT)
2001 {
2002 if (bytepos == 0
2003 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2004 tmps[i] = XEXP (src, 0);
2005 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2006 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2007 tmps[i] = XEXP (src, 1);
2008 else if (bytepos == 0)
2009 {
2010 rtx mem = assign_stack_temp (GET_MODE (src),
2011 GET_MODE_SIZE (GET_MODE (src)), 0);
2012 emit_move_insn (mem, src);
2013 tmps[i] = adjust_address (mem, mode, 0);
2014 }
2015 else
2016 abort ();
2017 }
2018 else if (CONSTANT_P (src)
2019 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2020 tmps[i] = src;
2021 else
2022 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2023 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2024 mode, mode, ssize);
2025
2026 if (BYTES_BIG_ENDIAN && shift)
2027 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2028 tmps[i], 0, OPTAB_WIDEN);
2029 }
2030
2031 emit_queue ();
2032
2033 /* Copy the extracted pieces into the proper (probable) hard regs. */
2034 for (i = start; i < XVECLEN (dst, 0); i++)
2035 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2036 }
2037
2038 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2039 registers represented by a PARALLEL. SSIZE represents the total size of
2040 block DST, or -1 if not known. */
2041
2042 void
2043 emit_group_store (orig_dst, src, ssize)
2044 rtx orig_dst, src;
2045 int ssize;
2046 {
2047 rtx *tmps, dst;
2048 int start, i;
2049
2050 if (GET_CODE (src) != PARALLEL)
2051 abort ();
2052
2053 /* Check for a NULL entry, used to indicate that the parameter goes
2054 both on the stack and in registers. */
2055 if (XEXP (XVECEXP (src, 0, 0), 0))
2056 start = 0;
2057 else
2058 start = 1;
2059
2060 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2061
2062 /* Copy the (probable) hard regs into pseudos. */
2063 for (i = start; i < XVECLEN (src, 0); i++)
2064 {
2065 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2066 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2067 emit_move_insn (tmps[i], reg);
2068 }
2069 emit_queue ();
2070
2071 /* If we won't be storing directly into memory, protect the real destination
2072 from strange tricks we might play. */
2073 dst = orig_dst;
2074 if (GET_CODE (dst) == PARALLEL)
2075 {
2076 rtx temp;
2077
2078 /* We can get a PARALLEL dst if there is a conditional expression in
2079 a return statement. In that case, the dst and src are the same,
2080 so no action is necessary. */
2081 if (rtx_equal_p (dst, src))
2082 return;
2083
2084 /* It is unclear if we can ever reach here, but we may as well handle
2085 it. Allocate a temporary, and split this into a store/load to/from
2086 the temporary. */
2087
2088 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2089 emit_group_store (temp, src, ssize);
2090 emit_group_load (dst, temp, ssize);
2091 return;
2092 }
2093 else if (GET_CODE (dst) != MEM)
2094 {
2095 dst = gen_reg_rtx (GET_MODE (orig_dst));
2096 /* Make life a bit easier for combine. */
2097 emit_move_insn (dst, const0_rtx);
2098 }
2099
2100 /* Process the pieces. */
2101 for (i = start; i < XVECLEN (src, 0); i++)
2102 {
2103 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2104 enum machine_mode mode = GET_MODE (tmps[i]);
2105 unsigned int bytelen = GET_MODE_SIZE (mode);
2106
2107 /* Handle trailing fragments that run over the size of the struct. */
2108 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2109 {
2110 if (BYTES_BIG_ENDIAN)
2111 {
2112 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2113 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2114 tmps[i], 0, OPTAB_WIDEN);
2115 }
2116 bytelen = ssize - bytepos;
2117 }
2118
2119 /* Optimize the access just a bit. */
2120 if (GET_CODE (dst) == MEM
2121 && MEM_ALIGN (dst) >= GET_MODE_ALIGNMENT (mode)
2122 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2123 && bytelen == GET_MODE_SIZE (mode))
2124 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2125 else
2126 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2127 mode, tmps[i], ssize);
2128 }
2129
2130 emit_queue ();
2131
2132 /* Copy from the pseudo into the (probable) hard reg. */
2133 if (GET_CODE (dst) == REG)
2134 emit_move_insn (orig_dst, dst);
2135 }
2136
2137 /* Generate code to copy a BLKmode object of TYPE out of a
2138 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2139 is null, a stack temporary is created. TGTBLK is returned.
2140
2141 The primary purpose of this routine is to handle functions
2142 that return BLKmode structures in registers. Some machines
2143 (the PA for example) want to return all small structures
2144 in registers regardless of the structure's alignment. */
2145
2146 rtx
2147 copy_blkmode_from_reg (tgtblk, srcreg, type)
2148 rtx tgtblk;
2149 rtx srcreg;
2150 tree type;
2151 {
2152 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2153 rtx src = NULL, dst = NULL;
2154 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2155 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2156
2157 if (tgtblk == 0)
2158 {
2159 tgtblk = assign_temp (build_qualified_type (type,
2160 (TYPE_QUALS (type)
2161 | TYPE_QUAL_CONST)),
2162 0, 1, 1);
2163 preserve_temp_slots (tgtblk);
2164 }
2165
2166 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2167 into a new pseudo which is a full word.
2168
2169 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2170 the wrong part of the register gets copied so we fake a type conversion
2171 in place. */
2172 if (GET_MODE (srcreg) != BLKmode
2173 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2174 {
2175 if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2176 srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2177 else
2178 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2179 }
2180
2181 /* Structures whose size is not a multiple of a word are aligned
2182 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2183 machine, this means we must skip the empty high order bytes when
2184 calculating the bit offset. */
2185 if (BYTES_BIG_ENDIAN
2186 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2187 && bytes % UNITS_PER_WORD)
2188 big_endian_correction
2189 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2190
2191 /* Copy the structure BITSIZE bites at a time.
2192
2193 We could probably emit more efficient code for machines which do not use
2194 strict alignment, but it doesn't seem worth the effort at the current
2195 time. */
2196 for (bitpos = 0, xbitpos = big_endian_correction;
2197 bitpos < bytes * BITS_PER_UNIT;
2198 bitpos += bitsize, xbitpos += bitsize)
2199 {
2200 /* We need a new source operand each time xbitpos is on a
2201 word boundary and when xbitpos == big_endian_correction
2202 (the first time through). */
2203 if (xbitpos % BITS_PER_WORD == 0
2204 || xbitpos == big_endian_correction)
2205 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2206 GET_MODE (srcreg));
2207
2208 /* We need a new destination operand each time bitpos is on
2209 a word boundary. */
2210 if (bitpos % BITS_PER_WORD == 0)
2211 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2212
2213 /* Use xbitpos for the source extraction (right justified) and
2214 xbitpos for the destination store (left justified). */
2215 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2216 extract_bit_field (src, bitsize,
2217 xbitpos % BITS_PER_WORD, 1,
2218 NULL_RTX, word_mode, word_mode,
2219 BITS_PER_WORD),
2220 BITS_PER_WORD);
2221 }
2222
2223 return tgtblk;
2224 }
2225
2226 /* Add a USE expression for REG to the (possibly empty) list pointed
2227 to by CALL_FUSAGE. REG must denote a hard register. */
2228
2229 void
2230 use_reg (call_fusage, reg)
2231 rtx *call_fusage, reg;
2232 {
2233 if (GET_CODE (reg) != REG
2234 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2235 abort ();
2236
2237 *call_fusage
2238 = gen_rtx_EXPR_LIST (VOIDmode,
2239 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2240 }
2241
2242 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2243 starting at REGNO. All of these registers must be hard registers. */
2244
2245 void
2246 use_regs (call_fusage, regno, nregs)
2247 rtx *call_fusage;
2248 int regno;
2249 int nregs;
2250 {
2251 int i;
2252
2253 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2254 abort ();
2255
2256 for (i = 0; i < nregs; i++)
2257 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2258 }
2259
2260 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2261 PARALLEL REGS. This is for calls that pass values in multiple
2262 non-contiguous locations. The Irix 6 ABI has examples of this. */
2263
2264 void
2265 use_group_regs (call_fusage, regs)
2266 rtx *call_fusage;
2267 rtx regs;
2268 {
2269 int i;
2270
2271 for (i = 0; i < XVECLEN (regs, 0); i++)
2272 {
2273 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2274
2275 /* A NULL entry means the parameter goes both on the stack and in
2276 registers. This can also be a MEM for targets that pass values
2277 partially on the stack and partially in registers. */
2278 if (reg != 0 && GET_CODE (reg) == REG)
2279 use_reg (call_fusage, reg);
2280 }
2281 }
2282 \f
2283
2284 int
2285 can_store_by_pieces (len, constfun, constfundata, align)
2286 unsigned HOST_WIDE_INT len;
2287 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2288 PTR constfundata;
2289 unsigned int align;
2290 {
2291 unsigned HOST_WIDE_INT max_size, l;
2292 HOST_WIDE_INT offset = 0;
2293 enum machine_mode mode, tmode;
2294 enum insn_code icode;
2295 int reverse;
2296 rtx cst;
2297
2298 if (! MOVE_BY_PIECES_P (len, align))
2299 return 0;
2300
2301 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2302 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2303 align = MOVE_MAX * BITS_PER_UNIT;
2304
2305 /* We would first store what we can in the largest integer mode, then go to
2306 successively smaller modes. */
2307
2308 for (reverse = 0;
2309 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2310 reverse++)
2311 {
2312 l = len;
2313 mode = VOIDmode;
2314 max_size = MOVE_MAX_PIECES + 1;
2315 while (max_size > 1)
2316 {
2317 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2318 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2319 if (GET_MODE_SIZE (tmode) < max_size)
2320 mode = tmode;
2321
2322 if (mode == VOIDmode)
2323 break;
2324
2325 icode = mov_optab->handlers[(int) mode].insn_code;
2326 if (icode != CODE_FOR_nothing
2327 && align >= GET_MODE_ALIGNMENT (mode))
2328 {
2329 unsigned int size = GET_MODE_SIZE (mode);
2330
2331 while (l >= size)
2332 {
2333 if (reverse)
2334 offset -= size;
2335
2336 cst = (*constfun) (constfundata, offset, mode);
2337 if (!LEGITIMATE_CONSTANT_P (cst))
2338 return 0;
2339
2340 if (!reverse)
2341 offset += size;
2342
2343 l -= size;
2344 }
2345 }
2346
2347 max_size = GET_MODE_SIZE (mode);
2348 }
2349
2350 /* The code above should have handled everything. */
2351 if (l != 0)
2352 abort ();
2353 }
2354
2355 return 1;
2356 }
2357
2358 /* Generate several move instructions to store LEN bytes generated by
2359 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2360 pointer which will be passed as argument in every CONSTFUN call.
2361 ALIGN is maximum alignment we can assume. */
2362
2363 void
2364 store_by_pieces (to, len, constfun, constfundata, align)
2365 rtx to;
2366 unsigned HOST_WIDE_INT len;
2367 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2368 PTR constfundata;
2369 unsigned int align;
2370 {
2371 struct store_by_pieces data;
2372
2373 if (! MOVE_BY_PIECES_P (len, align))
2374 abort ();
2375 to = protect_from_queue (to, 1);
2376 data.constfun = constfun;
2377 data.constfundata = constfundata;
2378 data.len = len;
2379 data.to = to;
2380 store_by_pieces_1 (&data, align);
2381 }
2382
2383 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2384 rtx with BLKmode). The caller must pass TO through protect_from_queue
2385 before calling. ALIGN is maximum alignment we can assume. */
2386
2387 static void
2388 clear_by_pieces (to, len, align)
2389 rtx to;
2390 unsigned HOST_WIDE_INT len;
2391 unsigned int align;
2392 {
2393 struct store_by_pieces data;
2394
2395 data.constfun = clear_by_pieces_1;
2396 data.constfundata = NULL;
2397 data.len = len;
2398 data.to = to;
2399 store_by_pieces_1 (&data, align);
2400 }
2401
2402 /* Callback routine for clear_by_pieces.
2403 Return const0_rtx unconditionally. */
2404
2405 static rtx
2406 clear_by_pieces_1 (data, offset, mode)
2407 PTR data ATTRIBUTE_UNUSED;
2408 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2409 enum machine_mode mode ATTRIBUTE_UNUSED;
2410 {
2411 return const0_rtx;
2412 }
2413
2414 /* Subroutine of clear_by_pieces and store_by_pieces.
2415 Generate several move instructions to store LEN bytes of block TO. (A MEM
2416 rtx with BLKmode). The caller must pass TO through protect_from_queue
2417 before calling. ALIGN is maximum alignment we can assume. */
2418
2419 static void
2420 store_by_pieces_1 (data, align)
2421 struct store_by_pieces *data;
2422 unsigned int align;
2423 {
2424 rtx to_addr = XEXP (data->to, 0);
2425 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2426 enum machine_mode mode = VOIDmode, tmode;
2427 enum insn_code icode;
2428
2429 data->offset = 0;
2430 data->to_addr = to_addr;
2431 data->autinc_to
2432 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2433 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2434
2435 data->explicit_inc_to = 0;
2436 data->reverse
2437 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2438 if (data->reverse)
2439 data->offset = data->len;
2440
2441 /* If storing requires more than two move insns,
2442 copy addresses to registers (to make displacements shorter)
2443 and use post-increment if available. */
2444 if (!data->autinc_to
2445 && move_by_pieces_ninsns (data->len, align) > 2)
2446 {
2447 /* Determine the main mode we'll be using. */
2448 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2449 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2450 if (GET_MODE_SIZE (tmode) < max_size)
2451 mode = tmode;
2452
2453 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2454 {
2455 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2456 data->autinc_to = 1;
2457 data->explicit_inc_to = -1;
2458 }
2459
2460 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2461 && ! data->autinc_to)
2462 {
2463 data->to_addr = copy_addr_to_reg (to_addr);
2464 data->autinc_to = 1;
2465 data->explicit_inc_to = 1;
2466 }
2467
2468 if ( !data->autinc_to && CONSTANT_P (to_addr))
2469 data->to_addr = copy_addr_to_reg (to_addr);
2470 }
2471
2472 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2473 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2474 align = MOVE_MAX * BITS_PER_UNIT;
2475
2476 /* First store what we can in the largest integer mode, then go to
2477 successively smaller modes. */
2478
2479 while (max_size > 1)
2480 {
2481 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2482 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2483 if (GET_MODE_SIZE (tmode) < max_size)
2484 mode = tmode;
2485
2486 if (mode == VOIDmode)
2487 break;
2488
2489 icode = mov_optab->handlers[(int) mode].insn_code;
2490 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2491 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2492
2493 max_size = GET_MODE_SIZE (mode);
2494 }
2495
2496 /* The code above should have handled everything. */
2497 if (data->len != 0)
2498 abort ();
2499 }
2500
2501 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2502 with move instructions for mode MODE. GENFUN is the gen_... function
2503 to make a move insn for that mode. DATA has all the other info. */
2504
2505 static void
2506 store_by_pieces_2 (genfun, mode, data)
2507 rtx (*genfun) PARAMS ((rtx, ...));
2508 enum machine_mode mode;
2509 struct store_by_pieces *data;
2510 {
2511 unsigned int size = GET_MODE_SIZE (mode);
2512 rtx to1, cst;
2513
2514 while (data->len >= size)
2515 {
2516 if (data->reverse)
2517 data->offset -= size;
2518
2519 if (data->autinc_to)
2520 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2521 data->offset);
2522 else
2523 to1 = adjust_address (data->to, mode, data->offset);
2524
2525 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2526 emit_insn (gen_add2_insn (data->to_addr,
2527 GEN_INT (-(HOST_WIDE_INT) size)));
2528
2529 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2530 emit_insn ((*genfun) (to1, cst));
2531
2532 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2533 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2534
2535 if (! data->reverse)
2536 data->offset += size;
2537
2538 data->len -= size;
2539 }
2540 }
2541 \f
2542 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2543 its length in bytes. */
2544
2545 rtx
2546 clear_storage (object, size)
2547 rtx object;
2548 rtx size;
2549 {
2550 #ifdef TARGET_MEM_FUNCTIONS
2551 static tree fn;
2552 tree call_expr, arg_list;
2553 #endif
2554 rtx retval = 0;
2555 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2556 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2557
2558 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2559 just move a zero. Otherwise, do this a piece at a time. */
2560 if (GET_MODE (object) != BLKmode
2561 && GET_CODE (size) == CONST_INT
2562 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2563 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2564 else
2565 {
2566 object = protect_from_queue (object, 1);
2567 size = protect_from_queue (size, 0);
2568
2569 if (GET_CODE (size) == CONST_INT
2570 && MOVE_BY_PIECES_P (INTVAL (size), align))
2571 clear_by_pieces (object, INTVAL (size), align);
2572 else
2573 {
2574 /* Try the most limited insn first, because there's no point
2575 including more than one in the machine description unless
2576 the more limited one has some advantage. */
2577
2578 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2579 enum machine_mode mode;
2580
2581 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2582 mode = GET_MODE_WIDER_MODE (mode))
2583 {
2584 enum insn_code code = clrstr_optab[(int) mode];
2585 insn_operand_predicate_fn pred;
2586
2587 if (code != CODE_FOR_nothing
2588 /* We don't need MODE to be narrower than
2589 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2590 the mode mask, as it is returned by the macro, it will
2591 definitely be less than the actual mode mask. */
2592 && ((GET_CODE (size) == CONST_INT
2593 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2594 <= (GET_MODE_MASK (mode) >> 1)))
2595 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2596 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2597 || (*pred) (object, BLKmode))
2598 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2599 || (*pred) (opalign, VOIDmode)))
2600 {
2601 rtx op1;
2602 rtx last = get_last_insn ();
2603 rtx pat;
2604
2605 op1 = convert_to_mode (mode, size, 1);
2606 pred = insn_data[(int) code].operand[1].predicate;
2607 if (pred != 0 && ! (*pred) (op1, mode))
2608 op1 = copy_to_mode_reg (mode, op1);
2609
2610 pat = GEN_FCN ((int) code) (object, op1, opalign);
2611 if (pat)
2612 {
2613 emit_insn (pat);
2614 return 0;
2615 }
2616 else
2617 delete_insns_since (last);
2618 }
2619 }
2620
2621 /* OBJECT or SIZE may have been passed through protect_from_queue.
2622
2623 It is unsafe to save the value generated by protect_from_queue
2624 and reuse it later. Consider what happens if emit_queue is
2625 called before the return value from protect_from_queue is used.
2626
2627 Expansion of the CALL_EXPR below will call emit_queue before
2628 we are finished emitting RTL for argument setup. So if we are
2629 not careful we could get the wrong value for an argument.
2630
2631 To avoid this problem we go ahead and emit code to copy OBJECT
2632 and SIZE into new pseudos. We can then place those new pseudos
2633 into an RTL_EXPR and use them later, even after a call to
2634 emit_queue.
2635
2636 Note this is not strictly needed for library calls since they
2637 do not call emit_queue before loading their arguments. However,
2638 we may need to have library calls call emit_queue in the future
2639 since failing to do so could cause problems for targets which
2640 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2641 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2642
2643 #ifdef TARGET_MEM_FUNCTIONS
2644 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2645 #else
2646 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2647 TREE_UNSIGNED (integer_type_node));
2648 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2649 #endif
2650
2651 #ifdef TARGET_MEM_FUNCTIONS
2652 /* It is incorrect to use the libcall calling conventions to call
2653 memset in this context.
2654
2655 This could be a user call to memset and the user may wish to
2656 examine the return value from memset.
2657
2658 For targets where libcalls and normal calls have different
2659 conventions for returning pointers, we could end up generating
2660 incorrect code.
2661
2662 So instead of using a libcall sequence we build up a suitable
2663 CALL_EXPR and expand the call in the normal fashion. */
2664 if (fn == NULL_TREE)
2665 {
2666 tree fntype;
2667
2668 /* This was copied from except.c, I don't know if all this is
2669 necessary in this context or not. */
2670 fn = get_identifier ("memset");
2671 fntype = build_pointer_type (void_type_node);
2672 fntype = build_function_type (fntype, NULL_TREE);
2673 fn = build_decl (FUNCTION_DECL, fn, fntype);
2674 ggc_add_tree_root (&fn, 1);
2675 DECL_EXTERNAL (fn) = 1;
2676 TREE_PUBLIC (fn) = 1;
2677 DECL_ARTIFICIAL (fn) = 1;
2678 TREE_NOTHROW (fn) = 1;
2679 make_decl_rtl (fn, NULL);
2680 assemble_external (fn);
2681 }
2682
2683 /* We need to make an argument list for the function call.
2684
2685 memset has three arguments, the first is a void * addresses, the
2686 second an integer with the initialization value, the last is a
2687 size_t byte count for the copy. */
2688 arg_list
2689 = build_tree_list (NULL_TREE,
2690 make_tree (build_pointer_type (void_type_node),
2691 object));
2692 TREE_CHAIN (arg_list)
2693 = build_tree_list (NULL_TREE,
2694 make_tree (integer_type_node, const0_rtx));
2695 TREE_CHAIN (TREE_CHAIN (arg_list))
2696 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2697 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2698
2699 /* Now we have to build up the CALL_EXPR itself. */
2700 call_expr = build1 (ADDR_EXPR,
2701 build_pointer_type (TREE_TYPE (fn)), fn);
2702 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2703 call_expr, arg_list, NULL_TREE);
2704 TREE_SIDE_EFFECTS (call_expr) = 1;
2705
2706 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2707 #else
2708 emit_library_call (bzero_libfunc, LCT_NORMAL,
2709 VOIDmode, 2, object, Pmode, size,
2710 TYPE_MODE (integer_type_node));
2711 #endif
2712
2713 /* If we are initializing a readonly value, show the above call
2714 clobbered it. Otherwise, a load from it may erroneously be
2715 hoisted from a loop. */
2716 if (RTX_UNCHANGING_P (object))
2717 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2718 }
2719 }
2720
2721 return retval;
2722 }
2723
2724 /* Generate code to copy Y into X.
2725 Both Y and X must have the same mode, except that
2726 Y can be a constant with VOIDmode.
2727 This mode cannot be BLKmode; use emit_block_move for that.
2728
2729 Return the last instruction emitted. */
2730
2731 rtx
2732 emit_move_insn (x, y)
2733 rtx x, y;
2734 {
2735 enum machine_mode mode = GET_MODE (x);
2736 rtx y_cst = NULL_RTX;
2737 rtx last_insn;
2738
2739 x = protect_from_queue (x, 1);
2740 y = protect_from_queue (y, 0);
2741
2742 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2743 abort ();
2744
2745 /* Never force constant_p_rtx to memory. */
2746 if (GET_CODE (y) == CONSTANT_P_RTX)
2747 ;
2748 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2749 {
2750 y_cst = y;
2751 y = force_const_mem (mode, y);
2752 }
2753
2754 /* If X or Y are memory references, verify that their addresses are valid
2755 for the machine. */
2756 if (GET_CODE (x) == MEM
2757 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2758 && ! push_operand (x, GET_MODE (x)))
2759 || (flag_force_addr
2760 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2761 x = validize_mem (x);
2762
2763 if (GET_CODE (y) == MEM
2764 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2765 || (flag_force_addr
2766 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2767 y = validize_mem (y);
2768
2769 if (mode == BLKmode)
2770 abort ();
2771
2772 last_insn = emit_move_insn_1 (x, y);
2773
2774 if (y_cst && GET_CODE (x) == REG)
2775 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2776
2777 return last_insn;
2778 }
2779
2780 /* Low level part of emit_move_insn.
2781 Called just like emit_move_insn, but assumes X and Y
2782 are basically valid. */
2783
2784 rtx
2785 emit_move_insn_1 (x, y)
2786 rtx x, y;
2787 {
2788 enum machine_mode mode = GET_MODE (x);
2789 enum machine_mode submode;
2790 enum mode_class class = GET_MODE_CLASS (mode);
2791 unsigned int i;
2792
2793 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2794 abort ();
2795
2796 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2797 return
2798 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2799
2800 /* Expand complex moves by moving real part and imag part, if possible. */
2801 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2802 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2803 * BITS_PER_UNIT),
2804 (class == MODE_COMPLEX_INT
2805 ? MODE_INT : MODE_FLOAT),
2806 0))
2807 && (mov_optab->handlers[(int) submode].insn_code
2808 != CODE_FOR_nothing))
2809 {
2810 /* Don't split destination if it is a stack push. */
2811 int stack = push_operand (x, GET_MODE (x));
2812
2813 #ifdef PUSH_ROUNDING
2814 /* In case we output to the stack, but the size is smaller machine can
2815 push exactly, we need to use move instructions. */
2816 if (stack
2817 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2818 {
2819 rtx temp;
2820 int offset1, offset2;
2821
2822 /* Do not use anti_adjust_stack, since we don't want to update
2823 stack_pointer_delta. */
2824 temp = expand_binop (Pmode,
2825 #ifdef STACK_GROWS_DOWNWARD
2826 sub_optab,
2827 #else
2828 add_optab,
2829 #endif
2830 stack_pointer_rtx,
2831 GEN_INT
2832 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2833 stack_pointer_rtx,
2834 0,
2835 OPTAB_LIB_WIDEN);
2836 if (temp != stack_pointer_rtx)
2837 emit_move_insn (stack_pointer_rtx, temp);
2838 #ifdef STACK_GROWS_DOWNWARD
2839 offset1 = 0;
2840 offset2 = GET_MODE_SIZE (submode);
2841 #else
2842 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2843 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2844 + GET_MODE_SIZE (submode));
2845 #endif
2846 emit_move_insn (change_address (x, submode,
2847 gen_rtx_PLUS (Pmode,
2848 stack_pointer_rtx,
2849 GEN_INT (offset1))),
2850 gen_realpart (submode, y));
2851 emit_move_insn (change_address (x, submode,
2852 gen_rtx_PLUS (Pmode,
2853 stack_pointer_rtx,
2854 GEN_INT (offset2))),
2855 gen_imagpart (submode, y));
2856 }
2857 else
2858 #endif
2859 /* If this is a stack, push the highpart first, so it
2860 will be in the argument order.
2861
2862 In that case, change_address is used only to convert
2863 the mode, not to change the address. */
2864 if (stack)
2865 {
2866 /* Note that the real part always precedes the imag part in memory
2867 regardless of machine's endianness. */
2868 #ifdef STACK_GROWS_DOWNWARD
2869 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2870 (gen_rtx_MEM (submode, XEXP (x, 0)),
2871 gen_imagpart (submode, y)));
2872 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2873 (gen_rtx_MEM (submode, XEXP (x, 0)),
2874 gen_realpart (submode, y)));
2875 #else
2876 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2877 (gen_rtx_MEM (submode, XEXP (x, 0)),
2878 gen_realpart (submode, y)));
2879 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2880 (gen_rtx_MEM (submode, XEXP (x, 0)),
2881 gen_imagpart (submode, y)));
2882 #endif
2883 }
2884 else
2885 {
2886 rtx realpart_x, realpart_y;
2887 rtx imagpart_x, imagpart_y;
2888
2889 /* If this is a complex value with each part being smaller than a
2890 word, the usual calling sequence will likely pack the pieces into
2891 a single register. Unfortunately, SUBREG of hard registers only
2892 deals in terms of words, so we have a problem converting input
2893 arguments to the CONCAT of two registers that is used elsewhere
2894 for complex values. If this is before reload, we can copy it into
2895 memory and reload. FIXME, we should see about using extract and
2896 insert on integer registers, but complex short and complex char
2897 variables should be rarely used. */
2898 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2899 && (reload_in_progress | reload_completed) == 0)
2900 {
2901 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2902 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2903
2904 if (packed_dest_p || packed_src_p)
2905 {
2906 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2907 ? MODE_FLOAT : MODE_INT);
2908
2909 enum machine_mode reg_mode
2910 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2911
2912 if (reg_mode != BLKmode)
2913 {
2914 rtx mem = assign_stack_temp (reg_mode,
2915 GET_MODE_SIZE (mode), 0);
2916 rtx cmem = adjust_address (mem, mode, 0);
2917
2918 cfun->cannot_inline
2919 = N_("function using short complex types cannot be inline");
2920
2921 if (packed_dest_p)
2922 {
2923 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2924 emit_move_insn_1 (cmem, y);
2925 return emit_move_insn_1 (sreg, mem);
2926 }
2927 else
2928 {
2929 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2930 emit_move_insn_1 (mem, sreg);
2931 return emit_move_insn_1 (x, cmem);
2932 }
2933 }
2934 }
2935 }
2936
2937 realpart_x = gen_realpart (submode, x);
2938 realpart_y = gen_realpart (submode, y);
2939 imagpart_x = gen_imagpart (submode, x);
2940 imagpart_y = gen_imagpart (submode, y);
2941
2942 /* Show the output dies here. This is necessary for SUBREGs
2943 of pseudos since we cannot track their lifetimes correctly;
2944 hard regs shouldn't appear here except as return values.
2945 We never want to emit such a clobber after reload. */
2946 if (x != y
2947 && ! (reload_in_progress || reload_completed)
2948 && (GET_CODE (realpart_x) == SUBREG
2949 || GET_CODE (imagpart_x) == SUBREG))
2950 {
2951 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2952 }
2953
2954 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2955 (realpart_x, realpart_y));
2956 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2957 (imagpart_x, imagpart_y));
2958 }
2959
2960 return get_last_insn ();
2961 }
2962
2963 /* This will handle any multi-word mode that lacks a move_insn pattern.
2964 However, you will get better code if you define such patterns,
2965 even if they must turn into multiple assembler instructions. */
2966 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2967 {
2968 rtx last_insn = 0;
2969 rtx seq, inner;
2970 int need_clobber;
2971
2972 #ifdef PUSH_ROUNDING
2973
2974 /* If X is a push on the stack, do the push now and replace
2975 X with a reference to the stack pointer. */
2976 if (push_operand (x, GET_MODE (x)))
2977 {
2978 rtx temp;
2979 enum rtx_code code;
2980
2981 /* Do not use anti_adjust_stack, since we don't want to update
2982 stack_pointer_delta. */
2983 temp = expand_binop (Pmode,
2984 #ifdef STACK_GROWS_DOWNWARD
2985 sub_optab,
2986 #else
2987 add_optab,
2988 #endif
2989 stack_pointer_rtx,
2990 GEN_INT
2991 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2992 stack_pointer_rtx,
2993 0,
2994 OPTAB_LIB_WIDEN);
2995 if (temp != stack_pointer_rtx)
2996 emit_move_insn (stack_pointer_rtx, temp);
2997
2998 code = GET_CODE (XEXP (x, 0));
2999 /* Just hope that small offsets off SP are OK. */
3000 if (code == POST_INC)
3001 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3002 GEN_INT (-(HOST_WIDE_INT)
3003 GET_MODE_SIZE (GET_MODE (x))));
3004 else if (code == POST_DEC)
3005 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3006 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3007 else
3008 temp = stack_pointer_rtx;
3009
3010 x = change_address (x, VOIDmode, temp);
3011 }
3012 #endif
3013
3014 /* If we are in reload, see if either operand is a MEM whose address
3015 is scheduled for replacement. */
3016 if (reload_in_progress && GET_CODE (x) == MEM
3017 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3018 x = replace_equiv_address_nv (x, inner);
3019 if (reload_in_progress && GET_CODE (y) == MEM
3020 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3021 y = replace_equiv_address_nv (y, inner);
3022
3023 start_sequence ();
3024
3025 need_clobber = 0;
3026 for (i = 0;
3027 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3028 i++)
3029 {
3030 rtx xpart = operand_subword (x, i, 1, mode);
3031 rtx ypart = operand_subword (y, i, 1, mode);
3032
3033 /* If we can't get a part of Y, put Y into memory if it is a
3034 constant. Otherwise, force it into a register. If we still
3035 can't get a part of Y, abort. */
3036 if (ypart == 0 && CONSTANT_P (y))
3037 {
3038 y = force_const_mem (mode, y);
3039 ypart = operand_subword (y, i, 1, mode);
3040 }
3041 else if (ypart == 0)
3042 ypart = operand_subword_force (y, i, mode);
3043
3044 if (xpart == 0 || ypart == 0)
3045 abort ();
3046
3047 need_clobber |= (GET_CODE (xpart) == SUBREG);
3048
3049 last_insn = emit_move_insn (xpart, ypart);
3050 }
3051
3052 seq = gen_sequence ();
3053 end_sequence ();
3054
3055 /* Show the output dies here. This is necessary for SUBREGs
3056 of pseudos since we cannot track their lifetimes correctly;
3057 hard regs shouldn't appear here except as return values.
3058 We never want to emit such a clobber after reload. */
3059 if (x != y
3060 && ! (reload_in_progress || reload_completed)
3061 && need_clobber != 0)
3062 {
3063 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3064 }
3065
3066 emit_insn (seq);
3067
3068 return last_insn;
3069 }
3070 else
3071 abort ();
3072 }
3073 \f
3074 /* Pushing data onto the stack. */
3075
3076 /* Push a block of length SIZE (perhaps variable)
3077 and return an rtx to address the beginning of the block.
3078 Note that it is not possible for the value returned to be a QUEUED.
3079 The value may be virtual_outgoing_args_rtx.
3080
3081 EXTRA is the number of bytes of padding to push in addition to SIZE.
3082 BELOW nonzero means this padding comes at low addresses;
3083 otherwise, the padding comes at high addresses. */
3084
3085 rtx
3086 push_block (size, extra, below)
3087 rtx size;
3088 int extra, below;
3089 {
3090 rtx temp;
3091
3092 size = convert_modes (Pmode, ptr_mode, size, 1);
3093 if (CONSTANT_P (size))
3094 anti_adjust_stack (plus_constant (size, extra));
3095 else if (GET_CODE (size) == REG && extra == 0)
3096 anti_adjust_stack (size);
3097 else
3098 {
3099 temp = copy_to_mode_reg (Pmode, size);
3100 if (extra != 0)
3101 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3102 temp, 0, OPTAB_LIB_WIDEN);
3103 anti_adjust_stack (temp);
3104 }
3105
3106 #ifndef STACK_GROWS_DOWNWARD
3107 if (0)
3108 #else
3109 if (1)
3110 #endif
3111 {
3112 temp = virtual_outgoing_args_rtx;
3113 if (extra != 0 && below)
3114 temp = plus_constant (temp, extra);
3115 }
3116 else
3117 {
3118 if (GET_CODE (size) == CONST_INT)
3119 temp = plus_constant (virtual_outgoing_args_rtx,
3120 -INTVAL (size) - (below ? 0 : extra));
3121 else if (extra != 0 && !below)
3122 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3123 negate_rtx (Pmode, plus_constant (size, extra)));
3124 else
3125 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3126 negate_rtx (Pmode, size));
3127 }
3128
3129 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3130 }
3131
3132 #ifdef PUSH_ROUNDING
3133
3134 /* Emit single push insn. */
3135
3136 static void
3137 emit_single_push_insn (mode, x, type)
3138 rtx x;
3139 enum machine_mode mode;
3140 tree type;
3141 {
3142 rtx dest_addr;
3143 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3144 rtx dest;
3145 enum insn_code icode;
3146 insn_operand_predicate_fn pred;
3147
3148 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3149 /* If there is push pattern, use it. Otherwise try old way of throwing
3150 MEM representing push operation to move expander. */
3151 icode = push_optab->handlers[(int) mode].insn_code;
3152 if (icode != CODE_FOR_nothing)
3153 {
3154 if (((pred = insn_data[(int) icode].operand[0].predicate)
3155 && !((*pred) (x, mode))))
3156 x = force_reg (mode, x);
3157 emit_insn (GEN_FCN (icode) (x));
3158 return;
3159 }
3160 if (GET_MODE_SIZE (mode) == rounded_size)
3161 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3162 else
3163 {
3164 #ifdef STACK_GROWS_DOWNWARD
3165 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3166 GEN_INT (-(HOST_WIDE_INT)rounded_size));
3167 #else
3168 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3169 GEN_INT (rounded_size));
3170 #endif
3171 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3172 }
3173
3174 dest = gen_rtx_MEM (mode, dest_addr);
3175
3176 if (type != 0)
3177 {
3178 set_mem_attributes (dest, type, 1);
3179
3180 if (flag_optimize_sibling_calls)
3181 /* Function incoming arguments may overlap with sibling call
3182 outgoing arguments and we cannot allow reordering of reads
3183 from function arguments with stores to outgoing arguments
3184 of sibling calls. */
3185 set_mem_alias_set (dest, 0);
3186 }
3187 emit_move_insn (dest, x);
3188 }
3189 #endif
3190
3191 /* Generate code to push X onto the stack, assuming it has mode MODE and
3192 type TYPE.
3193 MODE is redundant except when X is a CONST_INT (since they don't
3194 carry mode info).
3195 SIZE is an rtx for the size of data to be copied (in bytes),
3196 needed only if X is BLKmode.
3197
3198 ALIGN (in bits) is maximum alignment we can assume.
3199
3200 If PARTIAL and REG are both nonzero, then copy that many of the first
3201 words of X into registers starting with REG, and push the rest of X.
3202 The amount of space pushed is decreased by PARTIAL words,
3203 rounded *down* to a multiple of PARM_BOUNDARY.
3204 REG must be a hard register in this case.
3205 If REG is zero but PARTIAL is not, take any all others actions for an
3206 argument partially in registers, but do not actually load any
3207 registers.
3208
3209 EXTRA is the amount in bytes of extra space to leave next to this arg.
3210 This is ignored if an argument block has already been allocated.
3211
3212 On a machine that lacks real push insns, ARGS_ADDR is the address of
3213 the bottom of the argument block for this call. We use indexing off there
3214 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3215 argument block has not been preallocated.
3216
3217 ARGS_SO_FAR is the size of args previously pushed for this call.
3218
3219 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3220 for arguments passed in registers. If nonzero, it will be the number
3221 of bytes required. */
3222
3223 void
3224 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3225 args_addr, args_so_far, reg_parm_stack_space,
3226 alignment_pad)
3227 rtx x;
3228 enum machine_mode mode;
3229 tree type;
3230 rtx size;
3231 unsigned int align;
3232 int partial;
3233 rtx reg;
3234 int extra;
3235 rtx args_addr;
3236 rtx args_so_far;
3237 int reg_parm_stack_space;
3238 rtx alignment_pad;
3239 {
3240 rtx xinner;
3241 enum direction stack_direction
3242 #ifdef STACK_GROWS_DOWNWARD
3243 = downward;
3244 #else
3245 = upward;
3246 #endif
3247
3248 /* Decide where to pad the argument: `downward' for below,
3249 `upward' for above, or `none' for don't pad it.
3250 Default is below for small data on big-endian machines; else above. */
3251 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3252
3253 /* Invert direction if stack is post-decrement.
3254 FIXME: why? */
3255 if (STACK_PUSH_CODE == POST_DEC)
3256 if (where_pad != none)
3257 where_pad = (where_pad == downward ? upward : downward);
3258
3259 xinner = x = protect_from_queue (x, 0);
3260
3261 if (mode == BLKmode)
3262 {
3263 /* Copy a block into the stack, entirely or partially. */
3264
3265 rtx temp;
3266 int used = partial * UNITS_PER_WORD;
3267 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3268 int skip;
3269
3270 if (size == 0)
3271 abort ();
3272
3273 used -= offset;
3274
3275 /* USED is now the # of bytes we need not copy to the stack
3276 because registers will take care of them. */
3277
3278 if (partial != 0)
3279 xinner = adjust_address (xinner, BLKmode, used);
3280
3281 /* If the partial register-part of the arg counts in its stack size,
3282 skip the part of stack space corresponding to the registers.
3283 Otherwise, start copying to the beginning of the stack space,
3284 by setting SKIP to 0. */
3285 skip = (reg_parm_stack_space == 0) ? 0 : used;
3286
3287 #ifdef PUSH_ROUNDING
3288 /* Do it with several push insns if that doesn't take lots of insns
3289 and if there is no difficulty with push insns that skip bytes
3290 on the stack for alignment purposes. */
3291 if (args_addr == 0
3292 && PUSH_ARGS
3293 && GET_CODE (size) == CONST_INT
3294 && skip == 0
3295 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3296 /* Here we avoid the case of a structure whose weak alignment
3297 forces many pushes of a small amount of data,
3298 and such small pushes do rounding that causes trouble. */
3299 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3300 || align >= BIGGEST_ALIGNMENT
3301 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3302 == (align / BITS_PER_UNIT)))
3303 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3304 {
3305 /* Push padding now if padding above and stack grows down,
3306 or if padding below and stack grows up.
3307 But if space already allocated, this has already been done. */
3308 if (extra && args_addr == 0
3309 && where_pad != none && where_pad != stack_direction)
3310 anti_adjust_stack (GEN_INT (extra));
3311
3312 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3313 }
3314 else
3315 #endif /* PUSH_ROUNDING */
3316 {
3317 rtx target;
3318
3319 /* Otherwise make space on the stack and copy the data
3320 to the address of that space. */
3321
3322 /* Deduct words put into registers from the size we must copy. */
3323 if (partial != 0)
3324 {
3325 if (GET_CODE (size) == CONST_INT)
3326 size = GEN_INT (INTVAL (size) - used);
3327 else
3328 size = expand_binop (GET_MODE (size), sub_optab, size,
3329 GEN_INT (used), NULL_RTX, 0,
3330 OPTAB_LIB_WIDEN);
3331 }
3332
3333 /* Get the address of the stack space.
3334 In this case, we do not deal with EXTRA separately.
3335 A single stack adjust will do. */
3336 if (! args_addr)
3337 {
3338 temp = push_block (size, extra, where_pad == downward);
3339 extra = 0;
3340 }
3341 else if (GET_CODE (args_so_far) == CONST_INT)
3342 temp = memory_address (BLKmode,
3343 plus_constant (args_addr,
3344 skip + INTVAL (args_so_far)));
3345 else
3346 temp = memory_address (BLKmode,
3347 plus_constant (gen_rtx_PLUS (Pmode,
3348 args_addr,
3349 args_so_far),
3350 skip));
3351 target = gen_rtx_MEM (BLKmode, temp);
3352
3353 if (type != 0)
3354 {
3355 set_mem_attributes (target, type, 1);
3356 /* Function incoming arguments may overlap with sibling call
3357 outgoing arguments and we cannot allow reordering of reads
3358 from function arguments with stores to outgoing arguments
3359 of sibling calls. */
3360 set_mem_alias_set (target, 0);
3361 }
3362 else
3363 set_mem_align (target, align);
3364
3365 /* TEMP is the address of the block. Copy the data there. */
3366 if (GET_CODE (size) == CONST_INT
3367 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3368 {
3369 move_by_pieces (target, xinner, INTVAL (size), align);
3370 goto ret;
3371 }
3372 else
3373 {
3374 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3375 enum machine_mode mode;
3376
3377 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3378 mode != VOIDmode;
3379 mode = GET_MODE_WIDER_MODE (mode))
3380 {
3381 enum insn_code code = movstr_optab[(int) mode];
3382 insn_operand_predicate_fn pred;
3383
3384 if (code != CODE_FOR_nothing
3385 && ((GET_CODE (size) == CONST_INT
3386 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3387 <= (GET_MODE_MASK (mode) >> 1)))
3388 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3389 && (!(pred = insn_data[(int) code].operand[0].predicate)
3390 || ((*pred) (target, BLKmode)))
3391 && (!(pred = insn_data[(int) code].operand[1].predicate)
3392 || ((*pred) (xinner, BLKmode)))
3393 && (!(pred = insn_data[(int) code].operand[3].predicate)
3394 || ((*pred) (opalign, VOIDmode))))
3395 {
3396 rtx op2 = convert_to_mode (mode, size, 1);
3397 rtx last = get_last_insn ();
3398 rtx pat;
3399
3400 pred = insn_data[(int) code].operand[2].predicate;
3401 if (pred != 0 && ! (*pred) (op2, mode))
3402 op2 = copy_to_mode_reg (mode, op2);
3403
3404 pat = GEN_FCN ((int) code) (target, xinner,
3405 op2, opalign);
3406 if (pat)
3407 {
3408 emit_insn (pat);
3409 goto ret;
3410 }
3411 else
3412 delete_insns_since (last);
3413 }
3414 }
3415 }
3416
3417 if (!ACCUMULATE_OUTGOING_ARGS)
3418 {
3419 /* If the source is referenced relative to the stack pointer,
3420 copy it to another register to stabilize it. We do not need
3421 to do this if we know that we won't be changing sp. */
3422
3423 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3424 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3425 temp = copy_to_reg (temp);
3426 }
3427
3428 /* Make inhibit_defer_pop nonzero around the library call
3429 to force it to pop the bcopy-arguments right away. */
3430 NO_DEFER_POP;
3431 #ifdef TARGET_MEM_FUNCTIONS
3432 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3433 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3434 convert_to_mode (TYPE_MODE (sizetype),
3435 size, TREE_UNSIGNED (sizetype)),
3436 TYPE_MODE (sizetype));
3437 #else
3438 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3439 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3440 convert_to_mode (TYPE_MODE (integer_type_node),
3441 size,
3442 TREE_UNSIGNED (integer_type_node)),
3443 TYPE_MODE (integer_type_node));
3444 #endif
3445 OK_DEFER_POP;
3446 }
3447 }
3448 else if (partial > 0)
3449 {
3450 /* Scalar partly in registers. */
3451
3452 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3453 int i;
3454 int not_stack;
3455 /* # words of start of argument
3456 that we must make space for but need not store. */
3457 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3458 int args_offset = INTVAL (args_so_far);
3459 int skip;
3460
3461 /* Push padding now if padding above and stack grows down,
3462 or if padding below and stack grows up.
3463 But if space already allocated, this has already been done. */
3464 if (extra && args_addr == 0
3465 && where_pad != none && where_pad != stack_direction)
3466 anti_adjust_stack (GEN_INT (extra));
3467
3468 /* If we make space by pushing it, we might as well push
3469 the real data. Otherwise, we can leave OFFSET nonzero
3470 and leave the space uninitialized. */
3471 if (args_addr == 0)
3472 offset = 0;
3473
3474 /* Now NOT_STACK gets the number of words that we don't need to
3475 allocate on the stack. */
3476 not_stack = partial - offset;
3477
3478 /* If the partial register-part of the arg counts in its stack size,
3479 skip the part of stack space corresponding to the registers.
3480 Otherwise, start copying to the beginning of the stack space,
3481 by setting SKIP to 0. */
3482 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3483
3484 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3485 x = validize_mem (force_const_mem (mode, x));
3486
3487 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3488 SUBREGs of such registers are not allowed. */
3489 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3490 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3491 x = copy_to_reg (x);
3492
3493 /* Loop over all the words allocated on the stack for this arg. */
3494 /* We can do it by words, because any scalar bigger than a word
3495 has a size a multiple of a word. */
3496 #ifndef PUSH_ARGS_REVERSED
3497 for (i = not_stack; i < size; i++)
3498 #else
3499 for (i = size - 1; i >= not_stack; i--)
3500 #endif
3501 if (i >= not_stack + offset)
3502 emit_push_insn (operand_subword_force (x, i, mode),
3503 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3504 0, args_addr,
3505 GEN_INT (args_offset + ((i - not_stack + skip)
3506 * UNITS_PER_WORD)),
3507 reg_parm_stack_space, alignment_pad);
3508 }
3509 else
3510 {
3511 rtx addr;
3512 rtx target = NULL_RTX;
3513 rtx dest;
3514
3515 /* Push padding now if padding above and stack grows down,
3516 or if padding below and stack grows up.
3517 But if space already allocated, this has already been done. */
3518 if (extra && args_addr == 0
3519 && where_pad != none && where_pad != stack_direction)
3520 anti_adjust_stack (GEN_INT (extra));
3521
3522 #ifdef PUSH_ROUNDING
3523 if (args_addr == 0 && PUSH_ARGS)
3524 emit_single_push_insn (mode, x, type);
3525 else
3526 #endif
3527 {
3528 if (GET_CODE (args_so_far) == CONST_INT)
3529 addr
3530 = memory_address (mode,
3531 plus_constant (args_addr,
3532 INTVAL (args_so_far)));
3533 else
3534 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3535 args_so_far));
3536 target = addr;
3537 dest = gen_rtx_MEM (mode, addr);
3538 if (type != 0)
3539 {
3540 set_mem_attributes (dest, type, 1);
3541 /* Function incoming arguments may overlap with sibling call
3542 outgoing arguments and we cannot allow reordering of reads
3543 from function arguments with stores to outgoing arguments
3544 of sibling calls. */
3545 set_mem_alias_set (dest, 0);
3546 }
3547
3548 emit_move_insn (dest, x);
3549 }
3550
3551 }
3552
3553 ret:
3554 /* If part should go in registers, copy that part
3555 into the appropriate registers. Do this now, at the end,
3556 since mem-to-mem copies above may do function calls. */
3557 if (partial > 0 && reg != 0)
3558 {
3559 /* Handle calls that pass values in multiple non-contiguous locations.
3560 The Irix 6 ABI has examples of this. */
3561 if (GET_CODE (reg) == PARALLEL)
3562 emit_group_load (reg, x, -1); /* ??? size? */
3563 else
3564 move_block_to_reg (REGNO (reg), x, partial, mode);
3565 }
3566
3567 if (extra && args_addr == 0 && where_pad == stack_direction)
3568 anti_adjust_stack (GEN_INT (extra));
3569
3570 if (alignment_pad && args_addr == 0)
3571 anti_adjust_stack (alignment_pad);
3572 }
3573 \f
3574 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3575 operations. */
3576
3577 static rtx
3578 get_subtarget (x)
3579 rtx x;
3580 {
3581 return ((x == 0
3582 /* Only registers can be subtargets. */
3583 || GET_CODE (x) != REG
3584 /* If the register is readonly, it can't be set more than once. */
3585 || RTX_UNCHANGING_P (x)
3586 /* Don't use hard regs to avoid extending their life. */
3587 || REGNO (x) < FIRST_PSEUDO_REGISTER
3588 /* Avoid subtargets inside loops,
3589 since they hide some invariant expressions. */
3590 || preserve_subexpressions_p ())
3591 ? 0 : x);
3592 }
3593
3594 /* Expand an assignment that stores the value of FROM into TO.
3595 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3596 (This may contain a QUEUED rtx;
3597 if the value is constant, this rtx is a constant.)
3598 Otherwise, the returned value is NULL_RTX.
3599
3600 SUGGEST_REG is no longer actually used.
3601 It used to mean, copy the value through a register
3602 and return that register, if that is possible.
3603 We now use WANT_VALUE to decide whether to do this. */
3604
3605 rtx
3606 expand_assignment (to, from, want_value, suggest_reg)
3607 tree to, from;
3608 int want_value;
3609 int suggest_reg ATTRIBUTE_UNUSED;
3610 {
3611 rtx to_rtx = 0;
3612 rtx result;
3613
3614 /* Don't crash if the lhs of the assignment was erroneous. */
3615
3616 if (TREE_CODE (to) == ERROR_MARK)
3617 {
3618 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3619 return want_value ? result : NULL_RTX;
3620 }
3621
3622 /* Assignment of a structure component needs special treatment
3623 if the structure component's rtx is not simply a MEM.
3624 Assignment of an array element at a constant index, and assignment of
3625 an array element in an unaligned packed structure field, has the same
3626 problem. */
3627
3628 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3629 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3630 {
3631 enum machine_mode mode1;
3632 HOST_WIDE_INT bitsize, bitpos;
3633 rtx orig_to_rtx;
3634 tree offset;
3635 int unsignedp;
3636 int volatilep = 0;
3637 tree tem;
3638
3639 push_temp_slots ();
3640 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3641 &unsignedp, &volatilep);
3642
3643 /* If we are going to use store_bit_field and extract_bit_field,
3644 make sure to_rtx will be safe for multiple use. */
3645
3646 if (mode1 == VOIDmode && want_value)
3647 tem = stabilize_reference (tem);
3648
3649 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3650
3651 if (offset != 0)
3652 {
3653 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3654
3655 if (GET_CODE (to_rtx) != MEM)
3656 abort ();
3657
3658 if (GET_MODE (offset_rtx) != ptr_mode)
3659 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3660
3661 #ifdef POINTERS_EXTEND_UNSIGNED
3662 if (GET_MODE (offset_rtx) != Pmode)
3663 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3664 #endif
3665
3666 /* A constant address in TO_RTX can have VOIDmode, we must not try
3667 to call force_reg for that case. Avoid that case. */
3668 if (GET_CODE (to_rtx) == MEM
3669 && GET_MODE (to_rtx) == BLKmode
3670 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3671 && bitsize > 0
3672 && (bitpos % bitsize) == 0
3673 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3674 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3675 {
3676 rtx temp
3677 = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3678
3679 if (GET_CODE (XEXP (temp, 0)) == REG)
3680 to_rtx = temp;
3681 else
3682 to_rtx = (replace_equiv_address
3683 (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
3684 XEXP (temp, 0))));
3685 bitpos = 0;
3686 }
3687
3688 to_rtx = offset_address (to_rtx, offset_rtx,
3689 highest_pow2_factor (offset));
3690 }
3691
3692 if (GET_CODE (to_rtx) == MEM)
3693 {
3694 tree old_expr = MEM_EXPR (to_rtx);
3695
3696 /* If the field is at offset zero, we could have been given the
3697 DECL_RTX of the parent struct. Don't munge it. */
3698 to_rtx = shallow_copy_rtx (to_rtx);
3699
3700 set_mem_attributes (to_rtx, to, 0);
3701
3702 /* If we changed MEM_EXPR, that means we're now referencing
3703 the COMPONENT_REF, which means that MEM_OFFSET must be
3704 relative to that field. But we've not yet reflected BITPOS
3705 in TO_RTX. This will be done in store_field. Adjust for
3706 that by biasing MEM_OFFSET by -bitpos. */
3707 if (MEM_EXPR (to_rtx) != old_expr && MEM_OFFSET (to_rtx)
3708 && (bitpos / BITS_PER_UNIT) != 0)
3709 set_mem_offset (to_rtx, GEN_INT (INTVAL (MEM_OFFSET (to_rtx))
3710 - (bitpos / BITS_PER_UNIT)));
3711 }
3712
3713 /* Deal with volatile and readonly fields. The former is only done
3714 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3715 if (volatilep && GET_CODE (to_rtx) == MEM)
3716 {
3717 if (to_rtx == orig_to_rtx)
3718 to_rtx = copy_rtx (to_rtx);
3719 MEM_VOLATILE_P (to_rtx) = 1;
3720 }
3721
3722 if (TREE_CODE (to) == COMPONENT_REF
3723 && TREE_READONLY (TREE_OPERAND (to, 1)))
3724 {
3725 if (to_rtx == orig_to_rtx)
3726 to_rtx = copy_rtx (to_rtx);
3727 RTX_UNCHANGING_P (to_rtx) = 1;
3728 }
3729
3730 if (! can_address_p (to))
3731 {
3732 if (to_rtx == orig_to_rtx)
3733 to_rtx = copy_rtx (to_rtx);
3734 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3735 }
3736
3737 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3738 (want_value
3739 /* Spurious cast for HPUX compiler. */
3740 ? ((enum machine_mode)
3741 TYPE_MODE (TREE_TYPE (to)))
3742 : VOIDmode),
3743 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3744
3745 preserve_temp_slots (result);
3746 free_temp_slots ();
3747 pop_temp_slots ();
3748
3749 /* If the value is meaningful, convert RESULT to the proper mode.
3750 Otherwise, return nothing. */
3751 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3752 TYPE_MODE (TREE_TYPE (from)),
3753 result,
3754 TREE_UNSIGNED (TREE_TYPE (to)))
3755 : NULL_RTX);
3756 }
3757
3758 /* If the rhs is a function call and its value is not an aggregate,
3759 call the function before we start to compute the lhs.
3760 This is needed for correct code for cases such as
3761 val = setjmp (buf) on machines where reference to val
3762 requires loading up part of an address in a separate insn.
3763
3764 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3765 since it might be a promoted variable where the zero- or sign- extension
3766 needs to be done. Handling this in the normal way is safe because no
3767 computation is done before the call. */
3768 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3769 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3770 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3771 && GET_CODE (DECL_RTL (to)) == REG))
3772 {
3773 rtx value;
3774
3775 push_temp_slots ();
3776 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3777 if (to_rtx == 0)
3778 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3779
3780 /* Handle calls that return values in multiple non-contiguous locations.
3781 The Irix 6 ABI has examples of this. */
3782 if (GET_CODE (to_rtx) == PARALLEL)
3783 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
3784 else if (GET_MODE (to_rtx) == BLKmode)
3785 emit_block_move (to_rtx, value, expr_size (from));
3786 else
3787 {
3788 #ifdef POINTERS_EXTEND_UNSIGNED
3789 if (POINTER_TYPE_P (TREE_TYPE (to))
3790 && GET_MODE (to_rtx) != GET_MODE (value))
3791 value = convert_memory_address (GET_MODE (to_rtx), value);
3792 #endif
3793 emit_move_insn (to_rtx, value);
3794 }
3795 preserve_temp_slots (to_rtx);
3796 free_temp_slots ();
3797 pop_temp_slots ();
3798 return want_value ? to_rtx : NULL_RTX;
3799 }
3800
3801 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3802 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3803
3804 if (to_rtx == 0)
3805 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3806
3807 /* Don't move directly into a return register. */
3808 if (TREE_CODE (to) == RESULT_DECL
3809 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3810 {
3811 rtx temp;
3812
3813 push_temp_slots ();
3814 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3815
3816 if (GET_CODE (to_rtx) == PARALLEL)
3817 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
3818 else
3819 emit_move_insn (to_rtx, temp);
3820
3821 preserve_temp_slots (to_rtx);
3822 free_temp_slots ();
3823 pop_temp_slots ();
3824 return want_value ? to_rtx : NULL_RTX;
3825 }
3826
3827 /* In case we are returning the contents of an object which overlaps
3828 the place the value is being stored, use a safe function when copying
3829 a value through a pointer into a structure value return block. */
3830 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3831 && current_function_returns_struct
3832 && !current_function_returns_pcc_struct)
3833 {
3834 rtx from_rtx, size;
3835
3836 push_temp_slots ();
3837 size = expr_size (from);
3838 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3839
3840 #ifdef TARGET_MEM_FUNCTIONS
3841 emit_library_call (memmove_libfunc, LCT_NORMAL,
3842 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3843 XEXP (from_rtx, 0), Pmode,
3844 convert_to_mode (TYPE_MODE (sizetype),
3845 size, TREE_UNSIGNED (sizetype)),
3846 TYPE_MODE (sizetype));
3847 #else
3848 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3849 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3850 XEXP (to_rtx, 0), Pmode,
3851 convert_to_mode (TYPE_MODE (integer_type_node),
3852 size, TREE_UNSIGNED (integer_type_node)),
3853 TYPE_MODE (integer_type_node));
3854 #endif
3855
3856 preserve_temp_slots (to_rtx);
3857 free_temp_slots ();
3858 pop_temp_slots ();
3859 return want_value ? to_rtx : NULL_RTX;
3860 }
3861
3862 /* Compute FROM and store the value in the rtx we got. */
3863
3864 push_temp_slots ();
3865 result = store_expr (from, to_rtx, want_value);
3866 preserve_temp_slots (result);
3867 free_temp_slots ();
3868 pop_temp_slots ();
3869 return want_value ? result : NULL_RTX;
3870 }
3871
3872 /* Generate code for computing expression EXP,
3873 and storing the value into TARGET.
3874 TARGET may contain a QUEUED rtx.
3875
3876 If WANT_VALUE is nonzero, return a copy of the value
3877 not in TARGET, so that we can be sure to use the proper
3878 value in a containing expression even if TARGET has something
3879 else stored in it. If possible, we copy the value through a pseudo
3880 and return that pseudo. Or, if the value is constant, we try to
3881 return the constant. In some cases, we return a pseudo
3882 copied *from* TARGET.
3883
3884 If the mode is BLKmode then we may return TARGET itself.
3885 It turns out that in BLKmode it doesn't cause a problem.
3886 because C has no operators that could combine two different
3887 assignments into the same BLKmode object with different values
3888 with no sequence point. Will other languages need this to
3889 be more thorough?
3890
3891 If WANT_VALUE is 0, we return NULL, to make sure
3892 to catch quickly any cases where the caller uses the value
3893 and fails to set WANT_VALUE. */
3894
3895 rtx
3896 store_expr (exp, target, want_value)
3897 tree exp;
3898 rtx target;
3899 int want_value;
3900 {
3901 rtx temp;
3902 int dont_return_target = 0;
3903 int dont_store_target = 0;
3904
3905 if (TREE_CODE (exp) == COMPOUND_EXPR)
3906 {
3907 /* Perform first part of compound expression, then assign from second
3908 part. */
3909 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3910 emit_queue ();
3911 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3912 }
3913 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3914 {
3915 /* For conditional expression, get safe form of the target. Then
3916 test the condition, doing the appropriate assignment on either
3917 side. This avoids the creation of unnecessary temporaries.
3918 For non-BLKmode, it is more efficient not to do this. */
3919
3920 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3921
3922 emit_queue ();
3923 target = protect_from_queue (target, 1);
3924
3925 do_pending_stack_adjust ();
3926 NO_DEFER_POP;
3927 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3928 start_cleanup_deferral ();
3929 store_expr (TREE_OPERAND (exp, 1), target, 0);
3930 end_cleanup_deferral ();
3931 emit_queue ();
3932 emit_jump_insn (gen_jump (lab2));
3933 emit_barrier ();
3934 emit_label (lab1);
3935 start_cleanup_deferral ();
3936 store_expr (TREE_OPERAND (exp, 2), target, 0);
3937 end_cleanup_deferral ();
3938 emit_queue ();
3939 emit_label (lab2);
3940 OK_DEFER_POP;
3941
3942 return want_value ? target : NULL_RTX;
3943 }
3944 else if (queued_subexp_p (target))
3945 /* If target contains a postincrement, let's not risk
3946 using it as the place to generate the rhs. */
3947 {
3948 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3949 {
3950 /* Expand EXP into a new pseudo. */
3951 temp = gen_reg_rtx (GET_MODE (target));
3952 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3953 }
3954 else
3955 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3956
3957 /* If target is volatile, ANSI requires accessing the value
3958 *from* the target, if it is accessed. So make that happen.
3959 In no case return the target itself. */
3960 if (! MEM_VOLATILE_P (target) && want_value)
3961 dont_return_target = 1;
3962 }
3963 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3964 && GET_MODE (target) != BLKmode)
3965 /* If target is in memory and caller wants value in a register instead,
3966 arrange that. Pass TARGET as target for expand_expr so that,
3967 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3968 We know expand_expr will not use the target in that case.
3969 Don't do this if TARGET is volatile because we are supposed
3970 to write it and then read it. */
3971 {
3972 temp = expand_expr (exp, target, GET_MODE (target), 0);
3973 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3974 {
3975 /* If TEMP is already in the desired TARGET, only copy it from
3976 memory and don't store it there again. */
3977 if (temp == target
3978 || (rtx_equal_p (temp, target)
3979 && ! side_effects_p (temp) && ! side_effects_p (target)))
3980 dont_store_target = 1;
3981 temp = copy_to_reg (temp);
3982 }
3983 dont_return_target = 1;
3984 }
3985 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3986 /* If this is an scalar in a register that is stored in a wider mode
3987 than the declared mode, compute the result into its declared mode
3988 and then convert to the wider mode. Our value is the computed
3989 expression. */
3990 {
3991 /* If we don't want a value, we can do the conversion inside EXP,
3992 which will often result in some optimizations. Do the conversion
3993 in two steps: first change the signedness, if needed, then
3994 the extend. But don't do this if the type of EXP is a subtype
3995 of something else since then the conversion might involve
3996 more than just converting modes. */
3997 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3998 && TREE_TYPE (TREE_TYPE (exp)) == 0)
3999 {
4000 if (TREE_UNSIGNED (TREE_TYPE (exp))
4001 != SUBREG_PROMOTED_UNSIGNED_P (target))
4002 exp
4003 = convert
4004 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4005 TREE_TYPE (exp)),
4006 exp);
4007
4008 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4009 SUBREG_PROMOTED_UNSIGNED_P (target)),
4010 exp);
4011 }
4012
4013 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4014
4015 /* If TEMP is a volatile MEM and we want a result value, make
4016 the access now so it gets done only once. Likewise if
4017 it contains TARGET. */
4018 if (GET_CODE (temp) == MEM && want_value
4019 && (MEM_VOLATILE_P (temp)
4020 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4021 temp = copy_to_reg (temp);
4022
4023 /* If TEMP is a VOIDmode constant, use convert_modes to make
4024 sure that we properly convert it. */
4025 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4026 {
4027 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4028 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4029 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4030 GET_MODE (target), temp,
4031 SUBREG_PROMOTED_UNSIGNED_P (target));
4032 }
4033
4034 convert_move (SUBREG_REG (target), temp,
4035 SUBREG_PROMOTED_UNSIGNED_P (target));
4036
4037 /* If we promoted a constant, change the mode back down to match
4038 target. Otherwise, the caller might get confused by a result whose
4039 mode is larger than expected. */
4040
4041 if (want_value && GET_MODE (temp) != GET_MODE (target)
4042 && GET_MODE (temp) != VOIDmode)
4043 {
4044 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4045 SUBREG_PROMOTED_VAR_P (temp) = 1;
4046 SUBREG_PROMOTED_UNSIGNED_P (temp)
4047 = SUBREG_PROMOTED_UNSIGNED_P (target);
4048 }
4049
4050 return want_value ? temp : NULL_RTX;
4051 }
4052 else
4053 {
4054 temp = expand_expr (exp, target, GET_MODE (target), 0);
4055 /* Return TARGET if it's a specified hardware register.
4056 If TARGET is a volatile mem ref, either return TARGET
4057 or return a reg copied *from* TARGET; ANSI requires this.
4058
4059 Otherwise, if TEMP is not TARGET, return TEMP
4060 if it is constant (for efficiency),
4061 or if we really want the correct value. */
4062 if (!(target && GET_CODE (target) == REG
4063 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4064 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4065 && ! rtx_equal_p (temp, target)
4066 && (CONSTANT_P (temp) || want_value))
4067 dont_return_target = 1;
4068 }
4069
4070 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4071 the same as that of TARGET, adjust the constant. This is needed, for
4072 example, in case it is a CONST_DOUBLE and we want only a word-sized
4073 value. */
4074 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4075 && TREE_CODE (exp) != ERROR_MARK
4076 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4077 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4078 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4079
4080 /* If value was not generated in the target, store it there.
4081 Convert the value to TARGET's type first if necessary.
4082 If TEMP and TARGET compare equal according to rtx_equal_p, but
4083 one or both of them are volatile memory refs, we have to distinguish
4084 two cases:
4085 - expand_expr has used TARGET. In this case, we must not generate
4086 another copy. This can be detected by TARGET being equal according
4087 to == .
4088 - expand_expr has not used TARGET - that means that the source just
4089 happens to have the same RTX form. Since temp will have been created
4090 by expand_expr, it will compare unequal according to == .
4091 We must generate a copy in this case, to reach the correct number
4092 of volatile memory references. */
4093
4094 if ((! rtx_equal_p (temp, target)
4095 || (temp != target && (side_effects_p (temp)
4096 || side_effects_p (target))))
4097 && TREE_CODE (exp) != ERROR_MARK
4098 && ! dont_store_target)
4099 {
4100 target = protect_from_queue (target, 1);
4101 if (GET_MODE (temp) != GET_MODE (target)
4102 && GET_MODE (temp) != VOIDmode)
4103 {
4104 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4105 if (dont_return_target)
4106 {
4107 /* In this case, we will return TEMP,
4108 so make sure it has the proper mode.
4109 But don't forget to store the value into TARGET. */
4110 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4111 emit_move_insn (target, temp);
4112 }
4113 else
4114 convert_move (target, temp, unsignedp);
4115 }
4116
4117 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4118 {
4119 /* Handle copying a string constant into an array. The string
4120 constant may be shorter than the array. So copy just the string's
4121 actual length, and clear the rest. First get the size of the data
4122 type of the string, which is actually the size of the target. */
4123 rtx size = expr_size (exp);
4124
4125 if (GET_CODE (size) == CONST_INT
4126 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4127 emit_block_move (target, temp, size);
4128 else
4129 {
4130 /* Compute the size of the data to copy from the string. */
4131 tree copy_size
4132 = size_binop (MIN_EXPR,
4133 make_tree (sizetype, size),
4134 size_int (TREE_STRING_LENGTH (exp)));
4135 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4136 VOIDmode, 0);
4137 rtx label = 0;
4138
4139 /* Copy that much. */
4140 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4141 emit_block_move (target, temp, copy_size_rtx);
4142
4143 /* Figure out how much is left in TARGET that we have to clear.
4144 Do all calculations in ptr_mode. */
4145 if (GET_CODE (copy_size_rtx) == CONST_INT)
4146 {
4147 size = plus_constant (size, -INTVAL (copy_size_rtx));
4148 target = adjust_address (target, BLKmode,
4149 INTVAL (copy_size_rtx));
4150 }
4151 else
4152 {
4153 size = expand_binop (ptr_mode, sub_optab, size,
4154 copy_size_rtx, NULL_RTX, 0,
4155 OPTAB_LIB_WIDEN);
4156
4157 #ifdef POINTERS_EXTEND_UNSIGNED
4158 if (GET_MODE (copy_size_rtx) != Pmode)
4159 copy_size_rtx = convert_memory_address (Pmode,
4160 copy_size_rtx);
4161 #endif
4162
4163 target = offset_address (target, copy_size_rtx,
4164 highest_pow2_factor (copy_size));
4165 label = gen_label_rtx ();
4166 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4167 GET_MODE (size), 0, label);
4168 }
4169
4170 if (size != const0_rtx)
4171 clear_storage (target, size);
4172
4173 if (label)
4174 emit_label (label);
4175 }
4176 }
4177 /* Handle calls that return values in multiple non-contiguous locations.
4178 The Irix 6 ABI has examples of this. */
4179 else if (GET_CODE (target) == PARALLEL)
4180 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4181 else if (GET_MODE (temp) == BLKmode)
4182 emit_block_move (target, temp, expr_size (exp));
4183 else
4184 emit_move_insn (target, temp);
4185 }
4186
4187 /* If we don't want a value, return NULL_RTX. */
4188 if (! want_value)
4189 return NULL_RTX;
4190
4191 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4192 ??? The latter test doesn't seem to make sense. */
4193 else if (dont_return_target && GET_CODE (temp) != MEM)
4194 return temp;
4195
4196 /* Return TARGET itself if it is a hard register. */
4197 else if (want_value && GET_MODE (target) != BLKmode
4198 && ! (GET_CODE (target) == REG
4199 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4200 return copy_to_reg (target);
4201
4202 else
4203 return target;
4204 }
4205 \f
4206 /* Return 1 if EXP just contains zeros. */
4207
4208 static int
4209 is_zeros_p (exp)
4210 tree exp;
4211 {
4212 tree elt;
4213
4214 switch (TREE_CODE (exp))
4215 {
4216 case CONVERT_EXPR:
4217 case NOP_EXPR:
4218 case NON_LVALUE_EXPR:
4219 case VIEW_CONVERT_EXPR:
4220 return is_zeros_p (TREE_OPERAND (exp, 0));
4221
4222 case INTEGER_CST:
4223 return integer_zerop (exp);
4224
4225 case COMPLEX_CST:
4226 return
4227 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4228
4229 case REAL_CST:
4230 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4231
4232 case CONSTRUCTOR:
4233 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4234 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4235 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4236 if (! is_zeros_p (TREE_VALUE (elt)))
4237 return 0;
4238
4239 return 1;
4240
4241 default:
4242 return 0;
4243 }
4244 }
4245
4246 /* Return 1 if EXP contains mostly (3/4) zeros. */
4247
4248 static int
4249 mostly_zeros_p (exp)
4250 tree exp;
4251 {
4252 if (TREE_CODE (exp) == CONSTRUCTOR)
4253 {
4254 int elts = 0, zeros = 0;
4255 tree elt = CONSTRUCTOR_ELTS (exp);
4256 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4257 {
4258 /* If there are no ranges of true bits, it is all zero. */
4259 return elt == NULL_TREE;
4260 }
4261 for (; elt; elt = TREE_CHAIN (elt))
4262 {
4263 /* We do not handle the case where the index is a RANGE_EXPR,
4264 so the statistic will be somewhat inaccurate.
4265 We do make a more accurate count in store_constructor itself,
4266 so since this function is only used for nested array elements,
4267 this should be close enough. */
4268 if (mostly_zeros_p (TREE_VALUE (elt)))
4269 zeros++;
4270 elts++;
4271 }
4272
4273 return 4 * zeros >= 3 * elts;
4274 }
4275
4276 return is_zeros_p (exp);
4277 }
4278 \f
4279 /* Helper function for store_constructor.
4280 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4281 TYPE is the type of the CONSTRUCTOR, not the element type.
4282 CLEARED is as for store_constructor.
4283 ALIAS_SET is the alias set to use for any stores.
4284
4285 This provides a recursive shortcut back to store_constructor when it isn't
4286 necessary to go through store_field. This is so that we can pass through
4287 the cleared field to let store_constructor know that we may not have to
4288 clear a substructure if the outer structure has already been cleared. */
4289
4290 static void
4291 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4292 alias_set)
4293 rtx target;
4294 unsigned HOST_WIDE_INT bitsize;
4295 HOST_WIDE_INT bitpos;
4296 enum machine_mode mode;
4297 tree exp, type;
4298 int cleared;
4299 int alias_set;
4300 {
4301 if (TREE_CODE (exp) == CONSTRUCTOR
4302 && bitpos % BITS_PER_UNIT == 0
4303 /* If we have a non-zero bitpos for a register target, then we just
4304 let store_field do the bitfield handling. This is unlikely to
4305 generate unnecessary clear instructions anyways. */
4306 && (bitpos == 0 || GET_CODE (target) == MEM))
4307 {
4308 if (GET_CODE (target) == MEM)
4309 target
4310 = adjust_address (target,
4311 GET_MODE (target) == BLKmode
4312 || 0 != (bitpos
4313 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4314 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4315
4316
4317 /* Update the alias set, if required. */
4318 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4319 && MEM_ALIAS_SET (target) != 0)
4320 {
4321 target = copy_rtx (target);
4322 set_mem_alias_set (target, alias_set);
4323 }
4324
4325 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4326 }
4327 else
4328 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4329 alias_set);
4330 }
4331
4332 /* Store the value of constructor EXP into the rtx TARGET.
4333 TARGET is either a REG or a MEM; we know it cannot conflict, since
4334 safe_from_p has been called.
4335 CLEARED is true if TARGET is known to have been zero'd.
4336 SIZE is the number of bytes of TARGET we are allowed to modify: this
4337 may not be the same as the size of EXP if we are assigning to a field
4338 which has been packed to exclude padding bits. */
4339
4340 static void
4341 store_constructor (exp, target, cleared, size)
4342 tree exp;
4343 rtx target;
4344 int cleared;
4345 HOST_WIDE_INT size;
4346 {
4347 tree type = TREE_TYPE (exp);
4348 #ifdef WORD_REGISTER_OPERATIONS
4349 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4350 #endif
4351
4352 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4353 || TREE_CODE (type) == QUAL_UNION_TYPE)
4354 {
4355 tree elt;
4356
4357 /* We either clear the aggregate or indicate the value is dead. */
4358 if ((TREE_CODE (type) == UNION_TYPE
4359 || TREE_CODE (type) == QUAL_UNION_TYPE)
4360 && ! cleared
4361 && ! CONSTRUCTOR_ELTS (exp))
4362 /* If the constructor is empty, clear the union. */
4363 {
4364 clear_storage (target, expr_size (exp));
4365 cleared = 1;
4366 }
4367
4368 /* If we are building a static constructor into a register,
4369 set the initial value as zero so we can fold the value into
4370 a constant. But if more than one register is involved,
4371 this probably loses. */
4372 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4373 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4374 {
4375 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4376 cleared = 1;
4377 }
4378
4379 /* If the constructor has fewer fields than the structure
4380 or if we are initializing the structure to mostly zeros,
4381 clear the whole structure first. Don't do this if TARGET is a
4382 register whose mode size isn't equal to SIZE since clear_storage
4383 can't handle this case. */
4384 else if (! cleared && size > 0
4385 && ((list_length (CONSTRUCTOR_ELTS (exp))
4386 != fields_length (type))
4387 || mostly_zeros_p (exp))
4388 && (GET_CODE (target) != REG
4389 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4390 == size)))
4391 {
4392 clear_storage (target, GEN_INT (size));
4393 cleared = 1;
4394 }
4395
4396 if (! cleared)
4397 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4398
4399 /* Store each element of the constructor into
4400 the corresponding field of TARGET. */
4401
4402 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4403 {
4404 tree field = TREE_PURPOSE (elt);
4405 tree value = TREE_VALUE (elt);
4406 enum machine_mode mode;
4407 HOST_WIDE_INT bitsize;
4408 HOST_WIDE_INT bitpos = 0;
4409 int unsignedp;
4410 tree offset;
4411 rtx to_rtx = target;
4412
4413 /* Just ignore missing fields.
4414 We cleared the whole structure, above,
4415 if any fields are missing. */
4416 if (field == 0)
4417 continue;
4418
4419 if (cleared && is_zeros_p (value))
4420 continue;
4421
4422 if (host_integerp (DECL_SIZE (field), 1))
4423 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4424 else
4425 bitsize = -1;
4426
4427 unsignedp = TREE_UNSIGNED (field);
4428 mode = DECL_MODE (field);
4429 if (DECL_BIT_FIELD (field))
4430 mode = VOIDmode;
4431
4432 offset = DECL_FIELD_OFFSET (field);
4433 if (host_integerp (offset, 0)
4434 && host_integerp (bit_position (field), 0))
4435 {
4436 bitpos = int_bit_position (field);
4437 offset = 0;
4438 }
4439 else
4440 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4441
4442 if (offset)
4443 {
4444 rtx offset_rtx;
4445
4446 if (contains_placeholder_p (offset))
4447 offset = build (WITH_RECORD_EXPR, sizetype,
4448 offset, make_tree (TREE_TYPE (exp), target));
4449
4450 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4451 if (GET_CODE (to_rtx) != MEM)
4452 abort ();
4453
4454 if (GET_MODE (offset_rtx) != ptr_mode)
4455 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4456
4457 #ifdef POINTERS_EXTEND_UNSIGNED
4458 if (GET_MODE (offset_rtx) != Pmode)
4459 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4460 #endif
4461
4462 to_rtx = offset_address (to_rtx, offset_rtx,
4463 highest_pow2_factor (offset));
4464 }
4465
4466 if (TREE_READONLY (field))
4467 {
4468 if (GET_CODE (to_rtx) == MEM)
4469 to_rtx = copy_rtx (to_rtx);
4470
4471 RTX_UNCHANGING_P (to_rtx) = 1;
4472 }
4473
4474 #ifdef WORD_REGISTER_OPERATIONS
4475 /* If this initializes a field that is smaller than a word, at the
4476 start of a word, try to widen it to a full word.
4477 This special case allows us to output C++ member function
4478 initializations in a form that the optimizers can understand. */
4479 if (GET_CODE (target) == REG
4480 && bitsize < BITS_PER_WORD
4481 && bitpos % BITS_PER_WORD == 0
4482 && GET_MODE_CLASS (mode) == MODE_INT
4483 && TREE_CODE (value) == INTEGER_CST
4484 && exp_size >= 0
4485 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4486 {
4487 tree type = TREE_TYPE (value);
4488
4489 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4490 {
4491 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4492 value = convert (type, value);
4493 }
4494
4495 if (BYTES_BIG_ENDIAN)
4496 value
4497 = fold (build (LSHIFT_EXPR, type, value,
4498 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4499 bitsize = BITS_PER_WORD;
4500 mode = word_mode;
4501 }
4502 #endif
4503
4504 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4505 && DECL_NONADDRESSABLE_P (field))
4506 {
4507 to_rtx = copy_rtx (to_rtx);
4508 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4509 }
4510
4511 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4512 value, type, cleared,
4513 get_alias_set (TREE_TYPE (field)));
4514 }
4515 }
4516 else if (TREE_CODE (type) == ARRAY_TYPE)
4517 {
4518 tree elt;
4519 int i;
4520 int need_to_clear;
4521 tree domain = TYPE_DOMAIN (type);
4522 tree elttype = TREE_TYPE (type);
4523 int const_bounds_p = (TYPE_MIN_VALUE (domain)
4524 && TYPE_MAX_VALUE (domain)
4525 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4526 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4527 HOST_WIDE_INT minelt = 0;
4528 HOST_WIDE_INT maxelt = 0;
4529
4530 /* If we have constant bounds for the range of the type, get them. */
4531 if (const_bounds_p)
4532 {
4533 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4534 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4535 }
4536
4537 /* If the constructor has fewer elements than the array,
4538 clear the whole array first. Similarly if this is
4539 static constructor of a non-BLKmode object. */
4540 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4541 need_to_clear = 1;
4542 else
4543 {
4544 HOST_WIDE_INT count = 0, zero_count = 0;
4545 need_to_clear = ! const_bounds_p;
4546
4547 /* This loop is a more accurate version of the loop in
4548 mostly_zeros_p (it handles RANGE_EXPR in an index).
4549 It is also needed to check for missing elements. */
4550 for (elt = CONSTRUCTOR_ELTS (exp);
4551 elt != NULL_TREE && ! need_to_clear;
4552 elt = TREE_CHAIN (elt))
4553 {
4554 tree index = TREE_PURPOSE (elt);
4555 HOST_WIDE_INT this_node_count;
4556
4557 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4558 {
4559 tree lo_index = TREE_OPERAND (index, 0);
4560 tree hi_index = TREE_OPERAND (index, 1);
4561
4562 if (! host_integerp (lo_index, 1)
4563 || ! host_integerp (hi_index, 1))
4564 {
4565 need_to_clear = 1;
4566 break;
4567 }
4568
4569 this_node_count = (tree_low_cst (hi_index, 1)
4570 - tree_low_cst (lo_index, 1) + 1);
4571 }
4572 else
4573 this_node_count = 1;
4574
4575 count += this_node_count;
4576 if (mostly_zeros_p (TREE_VALUE (elt)))
4577 zero_count += this_node_count;
4578 }
4579
4580 /* Clear the entire array first if there are any missing elements,
4581 or if the incidence of zero elements is >= 75%. */
4582 if (! need_to_clear
4583 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4584 need_to_clear = 1;
4585 }
4586
4587 if (need_to_clear && size > 0)
4588 {
4589 if (! cleared)
4590 clear_storage (target, GEN_INT (size));
4591 cleared = 1;
4592 }
4593 else if (REG_P (target))
4594 /* Inform later passes that the old value is dead. */
4595 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4596
4597 /* Store each element of the constructor into
4598 the corresponding element of TARGET, determined
4599 by counting the elements. */
4600 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4601 elt;
4602 elt = TREE_CHAIN (elt), i++)
4603 {
4604 enum machine_mode mode;
4605 HOST_WIDE_INT bitsize;
4606 HOST_WIDE_INT bitpos;
4607 int unsignedp;
4608 tree value = TREE_VALUE (elt);
4609 tree index = TREE_PURPOSE (elt);
4610 rtx xtarget = target;
4611
4612 if (cleared && is_zeros_p (value))
4613 continue;
4614
4615 unsignedp = TREE_UNSIGNED (elttype);
4616 mode = TYPE_MODE (elttype);
4617 if (mode == BLKmode)
4618 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4619 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4620 : -1);
4621 else
4622 bitsize = GET_MODE_BITSIZE (mode);
4623
4624 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4625 {
4626 tree lo_index = TREE_OPERAND (index, 0);
4627 tree hi_index = TREE_OPERAND (index, 1);
4628 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4629 struct nesting *loop;
4630 HOST_WIDE_INT lo, hi, count;
4631 tree position;
4632
4633 /* If the range is constant and "small", unroll the loop. */
4634 if (const_bounds_p
4635 && host_integerp (lo_index, 0)
4636 && host_integerp (hi_index, 0)
4637 && (lo = tree_low_cst (lo_index, 0),
4638 hi = tree_low_cst (hi_index, 0),
4639 count = hi - lo + 1,
4640 (GET_CODE (target) != MEM
4641 || count <= 2
4642 || (host_integerp (TYPE_SIZE (elttype), 1)
4643 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4644 <= 40 * 8)))))
4645 {
4646 lo -= minelt; hi -= minelt;
4647 for (; lo <= hi; lo++)
4648 {
4649 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4650
4651 if (GET_CODE (target) == MEM
4652 && !MEM_KEEP_ALIAS_SET_P (target)
4653 && TYPE_NONALIASED_COMPONENT (type))
4654 {
4655 target = copy_rtx (target);
4656 MEM_KEEP_ALIAS_SET_P (target) = 1;
4657 }
4658
4659 store_constructor_field
4660 (target, bitsize, bitpos, mode, value, type, cleared,
4661 get_alias_set (elttype));
4662 }
4663 }
4664 else
4665 {
4666 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4667 loop_top = gen_label_rtx ();
4668 loop_end = gen_label_rtx ();
4669
4670 unsignedp = TREE_UNSIGNED (domain);
4671
4672 index = build_decl (VAR_DECL, NULL_TREE, domain);
4673
4674 index_r
4675 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4676 &unsignedp, 0));
4677 SET_DECL_RTL (index, index_r);
4678 if (TREE_CODE (value) == SAVE_EXPR
4679 && SAVE_EXPR_RTL (value) == 0)
4680 {
4681 /* Make sure value gets expanded once before the
4682 loop. */
4683 expand_expr (value, const0_rtx, VOIDmode, 0);
4684 emit_queue ();
4685 }
4686 store_expr (lo_index, index_r, 0);
4687 loop = expand_start_loop (0);
4688
4689 /* Assign value to element index. */
4690 position
4691 = convert (ssizetype,
4692 fold (build (MINUS_EXPR, TREE_TYPE (index),
4693 index, TYPE_MIN_VALUE (domain))));
4694 position = size_binop (MULT_EXPR, position,
4695 convert (ssizetype,
4696 TYPE_SIZE_UNIT (elttype)));
4697
4698 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4699 xtarget = offset_address (target, pos_rtx,
4700 highest_pow2_factor (position));
4701 xtarget = adjust_address (xtarget, mode, 0);
4702 if (TREE_CODE (value) == CONSTRUCTOR)
4703 store_constructor (value, xtarget, cleared,
4704 bitsize / BITS_PER_UNIT);
4705 else
4706 store_expr (value, xtarget, 0);
4707
4708 expand_exit_loop_if_false (loop,
4709 build (LT_EXPR, integer_type_node,
4710 index, hi_index));
4711
4712 expand_increment (build (PREINCREMENT_EXPR,
4713 TREE_TYPE (index),
4714 index, integer_one_node), 0, 0);
4715 expand_end_loop ();
4716 emit_label (loop_end);
4717 }
4718 }
4719 else if ((index != 0 && ! host_integerp (index, 0))
4720 || ! host_integerp (TYPE_SIZE (elttype), 1))
4721 {
4722 tree position;
4723
4724 if (index == 0)
4725 index = ssize_int (1);
4726
4727 if (minelt)
4728 index = convert (ssizetype,
4729 fold (build (MINUS_EXPR, index,
4730 TYPE_MIN_VALUE (domain))));
4731
4732 position = size_binop (MULT_EXPR, index,
4733 convert (ssizetype,
4734 TYPE_SIZE_UNIT (elttype)));
4735 xtarget = offset_address (target,
4736 expand_expr (position, 0, VOIDmode, 0),
4737 highest_pow2_factor (position));
4738 xtarget = adjust_address (xtarget, mode, 0);
4739 store_expr (value, xtarget, 0);
4740 }
4741 else
4742 {
4743 if (index != 0)
4744 bitpos = ((tree_low_cst (index, 0) - minelt)
4745 * tree_low_cst (TYPE_SIZE (elttype), 1));
4746 else
4747 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4748
4749 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4750 && TYPE_NONALIASED_COMPONENT (type))
4751 {
4752 target = copy_rtx (target);
4753 MEM_KEEP_ALIAS_SET_P (target) = 1;
4754 }
4755
4756 store_constructor_field (target, bitsize, bitpos, mode, value,
4757 type, cleared, get_alias_set (elttype));
4758
4759 }
4760 }
4761 }
4762
4763 /* Set constructor assignments. */
4764 else if (TREE_CODE (type) == SET_TYPE)
4765 {
4766 tree elt = CONSTRUCTOR_ELTS (exp);
4767 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4768 tree domain = TYPE_DOMAIN (type);
4769 tree domain_min, domain_max, bitlength;
4770
4771 /* The default implementation strategy is to extract the constant
4772 parts of the constructor, use that to initialize the target,
4773 and then "or" in whatever non-constant ranges we need in addition.
4774
4775 If a large set is all zero or all ones, it is
4776 probably better to set it using memset (if available) or bzero.
4777 Also, if a large set has just a single range, it may also be
4778 better to first clear all the first clear the set (using
4779 bzero/memset), and set the bits we want. */
4780
4781 /* Check for all zeros. */
4782 if (elt == NULL_TREE && size > 0)
4783 {
4784 if (!cleared)
4785 clear_storage (target, GEN_INT (size));
4786 return;
4787 }
4788
4789 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4790 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4791 bitlength = size_binop (PLUS_EXPR,
4792 size_diffop (domain_max, domain_min),
4793 ssize_int (1));
4794
4795 nbits = tree_low_cst (bitlength, 1);
4796
4797 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4798 are "complicated" (more than one range), initialize (the
4799 constant parts) by copying from a constant. */
4800 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4801 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4802 {
4803 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4804 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4805 char *bit_buffer = (char *) alloca (nbits);
4806 HOST_WIDE_INT word = 0;
4807 unsigned int bit_pos = 0;
4808 unsigned int ibit = 0;
4809 unsigned int offset = 0; /* In bytes from beginning of set. */
4810
4811 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4812 for (;;)
4813 {
4814 if (bit_buffer[ibit])
4815 {
4816 if (BYTES_BIG_ENDIAN)
4817 word |= (1 << (set_word_size - 1 - bit_pos));
4818 else
4819 word |= 1 << bit_pos;
4820 }
4821
4822 bit_pos++; ibit++;
4823 if (bit_pos >= set_word_size || ibit == nbits)
4824 {
4825 if (word != 0 || ! cleared)
4826 {
4827 rtx datum = GEN_INT (word);
4828 rtx to_rtx;
4829
4830 /* The assumption here is that it is safe to use
4831 XEXP if the set is multi-word, but not if
4832 it's single-word. */
4833 if (GET_CODE (target) == MEM)
4834 to_rtx = adjust_address (target, mode, offset);
4835 else if (offset == 0)
4836 to_rtx = target;
4837 else
4838 abort ();
4839 emit_move_insn (to_rtx, datum);
4840 }
4841
4842 if (ibit == nbits)
4843 break;
4844 word = 0;
4845 bit_pos = 0;
4846 offset += set_word_size / BITS_PER_UNIT;
4847 }
4848 }
4849 }
4850 else if (!cleared)
4851 /* Don't bother clearing storage if the set is all ones. */
4852 if (TREE_CHAIN (elt) != NULL_TREE
4853 || (TREE_PURPOSE (elt) == NULL_TREE
4854 ? nbits != 1
4855 : ( ! host_integerp (TREE_VALUE (elt), 0)
4856 || ! host_integerp (TREE_PURPOSE (elt), 0)
4857 || (tree_low_cst (TREE_VALUE (elt), 0)
4858 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4859 != (HOST_WIDE_INT) nbits))))
4860 clear_storage (target, expr_size (exp));
4861
4862 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4863 {
4864 /* Start of range of element or NULL. */
4865 tree startbit = TREE_PURPOSE (elt);
4866 /* End of range of element, or element value. */
4867 tree endbit = TREE_VALUE (elt);
4868 #ifdef TARGET_MEM_FUNCTIONS
4869 HOST_WIDE_INT startb, endb;
4870 #endif
4871 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4872
4873 bitlength_rtx = expand_expr (bitlength,
4874 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4875
4876 /* Handle non-range tuple element like [ expr ]. */
4877 if (startbit == NULL_TREE)
4878 {
4879 startbit = save_expr (endbit);
4880 endbit = startbit;
4881 }
4882
4883 startbit = convert (sizetype, startbit);
4884 endbit = convert (sizetype, endbit);
4885 if (! integer_zerop (domain_min))
4886 {
4887 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4888 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4889 }
4890 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4891 EXPAND_CONST_ADDRESS);
4892 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4893 EXPAND_CONST_ADDRESS);
4894
4895 if (REG_P (target))
4896 {
4897 targetx
4898 = assign_temp
4899 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
4900 TYPE_QUAL_CONST)),
4901 0, 1, 1);
4902 emit_move_insn (targetx, target);
4903 }
4904
4905 else if (GET_CODE (target) == MEM)
4906 targetx = target;
4907 else
4908 abort ();
4909
4910 #ifdef TARGET_MEM_FUNCTIONS
4911 /* Optimization: If startbit and endbit are
4912 constants divisible by BITS_PER_UNIT,
4913 call memset instead. */
4914 if (TREE_CODE (startbit) == INTEGER_CST
4915 && TREE_CODE (endbit) == INTEGER_CST
4916 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4917 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4918 {
4919 emit_library_call (memset_libfunc, LCT_NORMAL,
4920 VOIDmode, 3,
4921 plus_constant (XEXP (targetx, 0),
4922 startb / BITS_PER_UNIT),
4923 Pmode,
4924 constm1_rtx, TYPE_MODE (integer_type_node),
4925 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4926 TYPE_MODE (sizetype));
4927 }
4928 else
4929 #endif
4930 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4931 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4932 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
4933 startbit_rtx, TYPE_MODE (sizetype),
4934 endbit_rtx, TYPE_MODE (sizetype));
4935
4936 if (REG_P (target))
4937 emit_move_insn (target, targetx);
4938 }
4939 }
4940
4941 else
4942 abort ();
4943 }
4944
4945 /* Store the value of EXP (an expression tree)
4946 into a subfield of TARGET which has mode MODE and occupies
4947 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4948 If MODE is VOIDmode, it means that we are storing into a bit-field.
4949
4950 If VALUE_MODE is VOIDmode, return nothing in particular.
4951 UNSIGNEDP is not used in this case.
4952
4953 Otherwise, return an rtx for the value stored. This rtx
4954 has mode VALUE_MODE if that is convenient to do.
4955 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4956
4957 TYPE is the type of the underlying object,
4958
4959 ALIAS_SET is the alias set for the destination. This value will
4960 (in general) be different from that for TARGET, since TARGET is a
4961 reference to the containing structure. */
4962
4963 static rtx
4964 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
4965 alias_set)
4966 rtx target;
4967 HOST_WIDE_INT bitsize;
4968 HOST_WIDE_INT bitpos;
4969 enum machine_mode mode;
4970 tree exp;
4971 enum machine_mode value_mode;
4972 int unsignedp;
4973 tree type;
4974 int alias_set;
4975 {
4976 HOST_WIDE_INT width_mask = 0;
4977
4978 if (TREE_CODE (exp) == ERROR_MARK)
4979 return const0_rtx;
4980
4981 /* If we have nothing to store, do nothing unless the expression has
4982 side-effects. */
4983 if (bitsize == 0)
4984 return expand_expr (exp, const0_rtx, VOIDmode, 0);
4985 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
4986 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
4987
4988 /* If we are storing into an unaligned field of an aligned union that is
4989 in a register, we may have the mode of TARGET being an integer mode but
4990 MODE == BLKmode. In that case, get an aligned object whose size and
4991 alignment are the same as TARGET and store TARGET into it (we can avoid
4992 the store if the field being stored is the entire width of TARGET). Then
4993 call ourselves recursively to store the field into a BLKmode version of
4994 that object. Finally, load from the object into TARGET. This is not
4995 very efficient in general, but should only be slightly more expensive
4996 than the otherwise-required unaligned accesses. Perhaps this can be
4997 cleaned up later. */
4998
4999 if (mode == BLKmode
5000 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5001 {
5002 rtx object
5003 = assign_temp
5004 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5005 0, 1, 1);
5006 rtx blk_object = copy_rtx (object);
5007
5008 PUT_MODE (blk_object, BLKmode);
5009
5010 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5011 emit_move_insn (object, target);
5012
5013 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5014 alias_set);
5015
5016 emit_move_insn (target, object);
5017
5018 /* We want to return the BLKmode version of the data. */
5019 return blk_object;
5020 }
5021
5022 if (GET_CODE (target) == CONCAT)
5023 {
5024 /* We're storing into a struct containing a single __complex. */
5025
5026 if (bitpos != 0)
5027 abort ();
5028 return store_expr (exp, target, 0);
5029 }
5030
5031 /* If the structure is in a register or if the component
5032 is a bit field, we cannot use addressing to access it.
5033 Use bit-field techniques or SUBREG to store in it. */
5034
5035 if (mode == VOIDmode
5036 || (mode != BLKmode && ! direct_store[(int) mode]
5037 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5038 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5039 || GET_CODE (target) == REG
5040 || GET_CODE (target) == SUBREG
5041 /* If the field isn't aligned enough to store as an ordinary memref,
5042 store it as a bit field. */
5043 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5044 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5045 || bitpos % GET_MODE_ALIGNMENT (mode)))
5046 /* If the RHS and field are a constant size and the size of the
5047 RHS isn't the same size as the bitfield, we must use bitfield
5048 operations. */
5049 || (bitsize >= 0
5050 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5051 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5052 {
5053 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5054
5055 /* If BITSIZE is narrower than the size of the type of EXP
5056 we will be narrowing TEMP. Normally, what's wanted are the
5057 low-order bits. However, if EXP's type is a record and this is
5058 big-endian machine, we want the upper BITSIZE bits. */
5059 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5060 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5061 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5062 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5063 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5064 - bitsize),
5065 temp, 1);
5066
5067 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5068 MODE. */
5069 if (mode != VOIDmode && mode != BLKmode
5070 && mode != TYPE_MODE (TREE_TYPE (exp)))
5071 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5072
5073 /* If the modes of TARGET and TEMP are both BLKmode, both
5074 must be in memory and BITPOS must be aligned on a byte
5075 boundary. If so, we simply do a block copy. */
5076 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5077 {
5078 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5079 || bitpos % BITS_PER_UNIT != 0)
5080 abort ();
5081
5082 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5083 emit_block_move (target, temp,
5084 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5085 / BITS_PER_UNIT));
5086
5087 return value_mode == VOIDmode ? const0_rtx : target;
5088 }
5089
5090 /* Store the value in the bitfield. */
5091 store_bit_field (target, bitsize, bitpos, mode, temp,
5092 int_size_in_bytes (type));
5093
5094 if (value_mode != VOIDmode)
5095 {
5096 /* The caller wants an rtx for the value.
5097 If possible, avoid refetching from the bitfield itself. */
5098 if (width_mask != 0
5099 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5100 {
5101 tree count;
5102 enum machine_mode tmode;
5103
5104 if (unsignedp)
5105 return expand_and (temp,
5106 GEN_INT
5107 (trunc_int_for_mode
5108 (width_mask,
5109 GET_MODE (temp) == VOIDmode
5110 ? value_mode
5111 : GET_MODE (temp))), NULL_RTX);
5112
5113 tmode = GET_MODE (temp);
5114 if (tmode == VOIDmode)
5115 tmode = value_mode;
5116 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5117 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5118 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5119 }
5120
5121 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5122 NULL_RTX, value_mode, VOIDmode,
5123 int_size_in_bytes (type));
5124 }
5125 return const0_rtx;
5126 }
5127 else
5128 {
5129 rtx addr = XEXP (target, 0);
5130 rtx to_rtx = target;
5131
5132 /* If a value is wanted, it must be the lhs;
5133 so make the address stable for multiple use. */
5134
5135 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5136 && ! CONSTANT_ADDRESS_P (addr)
5137 /* A frame-pointer reference is already stable. */
5138 && ! (GET_CODE (addr) == PLUS
5139 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5140 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5141 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5142 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5143
5144 /* Now build a reference to just the desired component. */
5145
5146 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5147
5148 if (to_rtx == target)
5149 to_rtx = copy_rtx (to_rtx);
5150
5151 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5152 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5153 set_mem_alias_set (to_rtx, alias_set);
5154
5155 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5156 }
5157 }
5158 \f
5159 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5160 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5161 codes and find the ultimate containing object, which we return.
5162
5163 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5164 bit position, and *PUNSIGNEDP to the signedness of the field.
5165 If the position of the field is variable, we store a tree
5166 giving the variable offset (in units) in *POFFSET.
5167 This offset is in addition to the bit position.
5168 If the position is not variable, we store 0 in *POFFSET.
5169
5170 If any of the extraction expressions is volatile,
5171 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5172
5173 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5174 is a mode that can be used to access the field. In that case, *PBITSIZE
5175 is redundant.
5176
5177 If the field describes a variable-sized object, *PMODE is set to
5178 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5179 this case, but the address of the object can be found. */
5180
5181 tree
5182 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5183 punsignedp, pvolatilep)
5184 tree exp;
5185 HOST_WIDE_INT *pbitsize;
5186 HOST_WIDE_INT *pbitpos;
5187 tree *poffset;
5188 enum machine_mode *pmode;
5189 int *punsignedp;
5190 int *pvolatilep;
5191 {
5192 tree size_tree = 0;
5193 enum machine_mode mode = VOIDmode;
5194 tree offset = size_zero_node;
5195 tree bit_offset = bitsize_zero_node;
5196 tree placeholder_ptr = 0;
5197 tree tem;
5198
5199 /* First get the mode, signedness, and size. We do this from just the
5200 outermost expression. */
5201 if (TREE_CODE (exp) == COMPONENT_REF)
5202 {
5203 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5204 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5205 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5206
5207 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5208 }
5209 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5210 {
5211 size_tree = TREE_OPERAND (exp, 1);
5212 *punsignedp = TREE_UNSIGNED (exp);
5213 }
5214 else
5215 {
5216 mode = TYPE_MODE (TREE_TYPE (exp));
5217 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5218
5219 if (mode == BLKmode)
5220 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5221 else
5222 *pbitsize = GET_MODE_BITSIZE (mode);
5223 }
5224
5225 if (size_tree != 0)
5226 {
5227 if (! host_integerp (size_tree, 1))
5228 mode = BLKmode, *pbitsize = -1;
5229 else
5230 *pbitsize = tree_low_cst (size_tree, 1);
5231 }
5232
5233 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5234 and find the ultimate containing object. */
5235 while (1)
5236 {
5237 if (TREE_CODE (exp) == BIT_FIELD_REF)
5238 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5239 else if (TREE_CODE (exp) == COMPONENT_REF)
5240 {
5241 tree field = TREE_OPERAND (exp, 1);
5242 tree this_offset = DECL_FIELD_OFFSET (field);
5243
5244 /* If this field hasn't been filled in yet, don't go
5245 past it. This should only happen when folding expressions
5246 made during type construction. */
5247 if (this_offset == 0)
5248 break;
5249 else if (! TREE_CONSTANT (this_offset)
5250 && contains_placeholder_p (this_offset))
5251 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5252
5253 offset = size_binop (PLUS_EXPR, offset, this_offset);
5254 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5255 DECL_FIELD_BIT_OFFSET (field));
5256
5257 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5258 }
5259
5260 else if (TREE_CODE (exp) == ARRAY_REF
5261 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5262 {
5263 tree index = TREE_OPERAND (exp, 1);
5264 tree array = TREE_OPERAND (exp, 0);
5265 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5266 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5267 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5268
5269 /* We assume all arrays have sizes that are a multiple of a byte.
5270 First subtract the lower bound, if any, in the type of the
5271 index, then convert to sizetype and multiply by the size of the
5272 array element. */
5273 if (low_bound != 0 && ! integer_zerop (low_bound))
5274 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5275 index, low_bound));
5276
5277 /* If the index has a self-referential type, pass it to a
5278 WITH_RECORD_EXPR; if the component size is, pass our
5279 component to one. */
5280 if (! TREE_CONSTANT (index)
5281 && contains_placeholder_p (index))
5282 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5283 if (! TREE_CONSTANT (unit_size)
5284 && contains_placeholder_p (unit_size))
5285 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5286
5287 offset = size_binop (PLUS_EXPR, offset,
5288 size_binop (MULT_EXPR,
5289 convert (sizetype, index),
5290 unit_size));
5291 }
5292
5293 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5294 {
5295 tree new = find_placeholder (exp, &placeholder_ptr);
5296
5297 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5298 We might have been called from tree optimization where we
5299 haven't set up an object yet. */
5300 if (new == 0)
5301 break;
5302 else
5303 exp = new;
5304
5305 continue;
5306 }
5307 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5308 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5309 && ! ((TREE_CODE (exp) == NOP_EXPR
5310 || TREE_CODE (exp) == CONVERT_EXPR)
5311 && (TYPE_MODE (TREE_TYPE (exp))
5312 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5313 break;
5314
5315 /* If any reference in the chain is volatile, the effect is volatile. */
5316 if (TREE_THIS_VOLATILE (exp))
5317 *pvolatilep = 1;
5318
5319 exp = TREE_OPERAND (exp, 0);
5320 }
5321
5322 /* If OFFSET is constant, see if we can return the whole thing as a
5323 constant bit position. Otherwise, split it up. */
5324 if (host_integerp (offset, 0)
5325 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5326 bitsize_unit_node))
5327 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5328 && host_integerp (tem, 0))
5329 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5330 else
5331 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5332
5333 *pmode = mode;
5334 return exp;
5335 }
5336
5337 /* Return 1 if T is an expression that get_inner_reference handles. */
5338
5339 int
5340 handled_component_p (t)
5341 tree t;
5342 {
5343 switch (TREE_CODE (t))
5344 {
5345 case BIT_FIELD_REF:
5346 case COMPONENT_REF:
5347 case ARRAY_REF:
5348 case ARRAY_RANGE_REF:
5349 case NON_LVALUE_EXPR:
5350 case VIEW_CONVERT_EXPR:
5351 return 1;
5352
5353 case NOP_EXPR:
5354 case CONVERT_EXPR:
5355 return (TYPE_MODE (TREE_TYPE (t))
5356 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5357
5358 default:
5359 return 0;
5360 }
5361 }
5362 \f
5363 /* Given an rtx VALUE that may contain additions and multiplications, return
5364 an equivalent value that just refers to a register, memory, or constant.
5365 This is done by generating instructions to perform the arithmetic and
5366 returning a pseudo-register containing the value.
5367
5368 The returned value may be a REG, SUBREG, MEM or constant. */
5369
5370 rtx
5371 force_operand (value, target)
5372 rtx value, target;
5373 {
5374 optab binoptab = 0;
5375 /* Use a temporary to force order of execution of calls to
5376 `force_operand'. */
5377 rtx tmp;
5378 rtx op2;
5379 /* Use subtarget as the target for operand 0 of a binary operation. */
5380 rtx subtarget = get_subtarget (target);
5381
5382 /* Check for a PIC address load. */
5383 if (flag_pic
5384 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5385 && XEXP (value, 0) == pic_offset_table_rtx
5386 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5387 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5388 || GET_CODE (XEXP (value, 1)) == CONST))
5389 {
5390 if (!subtarget)
5391 subtarget = gen_reg_rtx (GET_MODE (value));
5392 emit_move_insn (subtarget, value);
5393 return subtarget;
5394 }
5395
5396 if (GET_CODE (value) == PLUS)
5397 binoptab = add_optab;
5398 else if (GET_CODE (value) == MINUS)
5399 binoptab = sub_optab;
5400 else if (GET_CODE (value) == MULT)
5401 {
5402 op2 = XEXP (value, 1);
5403 if (!CONSTANT_P (op2)
5404 && !(GET_CODE (op2) == REG && op2 != subtarget))
5405 subtarget = 0;
5406 tmp = force_operand (XEXP (value, 0), subtarget);
5407 return expand_mult (GET_MODE (value), tmp,
5408 force_operand (op2, NULL_RTX),
5409 target, 1);
5410 }
5411
5412 if (binoptab)
5413 {
5414 op2 = XEXP (value, 1);
5415 if (!CONSTANT_P (op2)
5416 && !(GET_CODE (op2) == REG && op2 != subtarget))
5417 subtarget = 0;
5418 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5419 {
5420 binoptab = add_optab;
5421 op2 = negate_rtx (GET_MODE (value), op2);
5422 }
5423
5424 /* Check for an addition with OP2 a constant integer and our first
5425 operand a PLUS of a virtual register and something else. In that
5426 case, we want to emit the sum of the virtual register and the
5427 constant first and then add the other value. This allows virtual
5428 register instantiation to simply modify the constant rather than
5429 creating another one around this addition. */
5430 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5431 && GET_CODE (XEXP (value, 0)) == PLUS
5432 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5433 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5434 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5435 {
5436 rtx temp = expand_binop (GET_MODE (value), binoptab,
5437 XEXP (XEXP (value, 0), 0), op2,
5438 subtarget, 0, OPTAB_LIB_WIDEN);
5439 return expand_binop (GET_MODE (value), binoptab, temp,
5440 force_operand (XEXP (XEXP (value, 0), 1), 0),
5441 target, 0, OPTAB_LIB_WIDEN);
5442 }
5443
5444 tmp = force_operand (XEXP (value, 0), subtarget);
5445 return expand_binop (GET_MODE (value), binoptab, tmp,
5446 force_operand (op2, NULL_RTX),
5447 target, 0, OPTAB_LIB_WIDEN);
5448 /* We give UNSIGNEDP = 0 to expand_binop
5449 because the only operations we are expanding here are signed ones. */
5450 }
5451
5452 #ifdef INSN_SCHEDULING
5453 /* On machines that have insn scheduling, we want all memory reference to be
5454 explicit, so we need to deal with such paradoxical SUBREGs. */
5455 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5456 && (GET_MODE_SIZE (GET_MODE (value))
5457 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5458 value
5459 = simplify_gen_subreg (GET_MODE (value),
5460 force_reg (GET_MODE (SUBREG_REG (value)),
5461 force_operand (SUBREG_REG (value),
5462 NULL_RTX)),
5463 GET_MODE (SUBREG_REG (value)),
5464 SUBREG_BYTE (value));
5465 #endif
5466
5467 return value;
5468 }
5469 \f
5470 /* Subroutine of expand_expr: return nonzero iff there is no way that
5471 EXP can reference X, which is being modified. TOP_P is nonzero if this
5472 call is going to be used to determine whether we need a temporary
5473 for EXP, as opposed to a recursive call to this function.
5474
5475 It is always safe for this routine to return zero since it merely
5476 searches for optimization opportunities. */
5477
5478 int
5479 safe_from_p (x, exp, top_p)
5480 rtx x;
5481 tree exp;
5482 int top_p;
5483 {
5484 rtx exp_rtl = 0;
5485 int i, nops;
5486 static tree save_expr_list;
5487
5488 if (x == 0
5489 /* If EXP has varying size, we MUST use a target since we currently
5490 have no way of allocating temporaries of variable size
5491 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5492 So we assume here that something at a higher level has prevented a
5493 clash. This is somewhat bogus, but the best we can do. Only
5494 do this when X is BLKmode and when we are at the top level. */
5495 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5496 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5497 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5498 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5499 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5500 != INTEGER_CST)
5501 && GET_MODE (x) == BLKmode)
5502 /* If X is in the outgoing argument area, it is always safe. */
5503 || (GET_CODE (x) == MEM
5504 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5505 || (GET_CODE (XEXP (x, 0)) == PLUS
5506 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5507 return 1;
5508
5509 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5510 find the underlying pseudo. */
5511 if (GET_CODE (x) == SUBREG)
5512 {
5513 x = SUBREG_REG (x);
5514 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5515 return 0;
5516 }
5517
5518 /* A SAVE_EXPR might appear many times in the expression passed to the
5519 top-level safe_from_p call, and if it has a complex subexpression,
5520 examining it multiple times could result in a combinatorial explosion.
5521 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5522 with optimization took about 28 minutes to compile -- even though it was
5523 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5524 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5525 we have processed. Note that the only test of top_p was above. */
5526
5527 if (top_p)
5528 {
5529 int rtn;
5530 tree t;
5531
5532 save_expr_list = 0;
5533
5534 rtn = safe_from_p (x, exp, 0);
5535
5536 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5537 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5538
5539 return rtn;
5540 }
5541
5542 /* Now look at our tree code and possibly recurse. */
5543 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5544 {
5545 case 'd':
5546 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5547 break;
5548
5549 case 'c':
5550 return 1;
5551
5552 case 'x':
5553 if (TREE_CODE (exp) == TREE_LIST)
5554 return ((TREE_VALUE (exp) == 0
5555 || safe_from_p (x, TREE_VALUE (exp), 0))
5556 && (TREE_CHAIN (exp) == 0
5557 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5558 else if (TREE_CODE (exp) == ERROR_MARK)
5559 return 1; /* An already-visited SAVE_EXPR? */
5560 else
5561 return 0;
5562
5563 case '1':
5564 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5565
5566 case '2':
5567 case '<':
5568 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5569 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5570
5571 case 'e':
5572 case 'r':
5573 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5574 the expression. If it is set, we conflict iff we are that rtx or
5575 both are in memory. Otherwise, we check all operands of the
5576 expression recursively. */
5577
5578 switch (TREE_CODE (exp))
5579 {
5580 case ADDR_EXPR:
5581 /* If the operand is static or we are static, we can't conflict.
5582 Likewise if we don't conflict with the operand at all. */
5583 if (staticp (TREE_OPERAND (exp, 0))
5584 || TREE_STATIC (exp)
5585 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5586 return 1;
5587
5588 /* Otherwise, the only way this can conflict is if we are taking
5589 the address of a DECL a that address if part of X, which is
5590 very rare. */
5591 exp = TREE_OPERAND (exp, 0);
5592 if (DECL_P (exp))
5593 {
5594 if (!DECL_RTL_SET_P (exp)
5595 || GET_CODE (DECL_RTL (exp)) != MEM)
5596 return 0;
5597 else
5598 exp_rtl = XEXP (DECL_RTL (exp), 0);
5599 }
5600 break;
5601
5602 case INDIRECT_REF:
5603 if (GET_CODE (x) == MEM
5604 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5605 get_alias_set (exp)))
5606 return 0;
5607 break;
5608
5609 case CALL_EXPR:
5610 /* Assume that the call will clobber all hard registers and
5611 all of memory. */
5612 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5613 || GET_CODE (x) == MEM)
5614 return 0;
5615 break;
5616
5617 case RTL_EXPR:
5618 /* If a sequence exists, we would have to scan every instruction
5619 in the sequence to see if it was safe. This is probably not
5620 worthwhile. */
5621 if (RTL_EXPR_SEQUENCE (exp))
5622 return 0;
5623
5624 exp_rtl = RTL_EXPR_RTL (exp);
5625 break;
5626
5627 case WITH_CLEANUP_EXPR:
5628 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5629 break;
5630
5631 case CLEANUP_POINT_EXPR:
5632 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5633
5634 case SAVE_EXPR:
5635 exp_rtl = SAVE_EXPR_RTL (exp);
5636 if (exp_rtl)
5637 break;
5638
5639 /* If we've already scanned this, don't do it again. Otherwise,
5640 show we've scanned it and record for clearing the flag if we're
5641 going on. */
5642 if (TREE_PRIVATE (exp))
5643 return 1;
5644
5645 TREE_PRIVATE (exp) = 1;
5646 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5647 {
5648 TREE_PRIVATE (exp) = 0;
5649 return 0;
5650 }
5651
5652 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5653 return 1;
5654
5655 case BIND_EXPR:
5656 /* The only operand we look at is operand 1. The rest aren't
5657 part of the expression. */
5658 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5659
5660 case METHOD_CALL_EXPR:
5661 /* This takes an rtx argument, but shouldn't appear here. */
5662 abort ();
5663
5664 default:
5665 break;
5666 }
5667
5668 /* If we have an rtx, we do not need to scan our operands. */
5669 if (exp_rtl)
5670 break;
5671
5672 nops = first_rtl_op (TREE_CODE (exp));
5673 for (i = 0; i < nops; i++)
5674 if (TREE_OPERAND (exp, i) != 0
5675 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5676 return 0;
5677
5678 /* If this is a language-specific tree code, it may require
5679 special handling. */
5680 if ((unsigned int) TREE_CODE (exp)
5681 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5682 && !(*lang_hooks.safe_from_p) (x, exp))
5683 return 0;
5684 }
5685
5686 /* If we have an rtl, find any enclosed object. Then see if we conflict
5687 with it. */
5688 if (exp_rtl)
5689 {
5690 if (GET_CODE (exp_rtl) == SUBREG)
5691 {
5692 exp_rtl = SUBREG_REG (exp_rtl);
5693 if (GET_CODE (exp_rtl) == REG
5694 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5695 return 0;
5696 }
5697
5698 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5699 are memory and they conflict. */
5700 return ! (rtx_equal_p (x, exp_rtl)
5701 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5702 && true_dependence (exp_rtl, GET_MODE (x), x,
5703 rtx_addr_varies_p)));
5704 }
5705
5706 /* If we reach here, it is safe. */
5707 return 1;
5708 }
5709
5710 /* Subroutine of expand_expr: return rtx if EXP is a
5711 variable or parameter; else return 0. */
5712
5713 static rtx
5714 var_rtx (exp)
5715 tree exp;
5716 {
5717 STRIP_NOPS (exp);
5718 switch (TREE_CODE (exp))
5719 {
5720 case PARM_DECL:
5721 case VAR_DECL:
5722 return DECL_RTL (exp);
5723 default:
5724 return 0;
5725 }
5726 }
5727
5728 #ifdef MAX_INTEGER_COMPUTATION_MODE
5729
5730 void
5731 check_max_integer_computation_mode (exp)
5732 tree exp;
5733 {
5734 enum tree_code code;
5735 enum machine_mode mode;
5736
5737 /* Strip any NOPs that don't change the mode. */
5738 STRIP_NOPS (exp);
5739 code = TREE_CODE (exp);
5740
5741 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5742 if (code == NOP_EXPR
5743 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5744 return;
5745
5746 /* First check the type of the overall operation. We need only look at
5747 unary, binary and relational operations. */
5748 if (TREE_CODE_CLASS (code) == '1'
5749 || TREE_CODE_CLASS (code) == '2'
5750 || TREE_CODE_CLASS (code) == '<')
5751 {
5752 mode = TYPE_MODE (TREE_TYPE (exp));
5753 if (GET_MODE_CLASS (mode) == MODE_INT
5754 && mode > MAX_INTEGER_COMPUTATION_MODE)
5755 internal_error ("unsupported wide integer operation");
5756 }
5757
5758 /* Check operand of a unary op. */
5759 if (TREE_CODE_CLASS (code) == '1')
5760 {
5761 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5762 if (GET_MODE_CLASS (mode) == MODE_INT
5763 && mode > MAX_INTEGER_COMPUTATION_MODE)
5764 internal_error ("unsupported wide integer operation");
5765 }
5766
5767 /* Check operands of a binary/comparison op. */
5768 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5769 {
5770 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5771 if (GET_MODE_CLASS (mode) == MODE_INT
5772 && mode > MAX_INTEGER_COMPUTATION_MODE)
5773 internal_error ("unsupported wide integer operation");
5774
5775 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5776 if (GET_MODE_CLASS (mode) == MODE_INT
5777 && mode > MAX_INTEGER_COMPUTATION_MODE)
5778 internal_error ("unsupported wide integer operation");
5779 }
5780 }
5781 #endif
5782 \f
5783 /* Return the highest power of two that EXP is known to be a multiple of.
5784 This is used in updating alignment of MEMs in array references. */
5785
5786 static HOST_WIDE_INT
5787 highest_pow2_factor (exp)
5788 tree exp;
5789 {
5790 HOST_WIDE_INT c0, c1;
5791
5792 switch (TREE_CODE (exp))
5793 {
5794 case INTEGER_CST:
5795 /* If the integer is expressable in a HOST_WIDE_INT, we can find the
5796 lowest bit that's a one. If the result is zero, pessimize by
5797 returning 1. This is overly-conservative, but such things should not
5798 happen in the offset expressions that we are called with. If
5799 the constant overlows, we some erroneous program, so return
5800 BIGGEST_ALIGNMENT to avoid any later ICE. */
5801 if (TREE_CONSTANT_OVERFLOW (exp))
5802 return BIGGEST_ALIGNMENT;
5803 else if (host_integerp (exp, 0))
5804 {
5805 c0 = tree_low_cst (exp, 0);
5806 c0 = c0 < 0 ? - c0 : c0;
5807 return c0 != 0 ? c0 & -c0 : 1;
5808 }
5809 break;
5810
5811 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5812 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5813 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5814 return MIN (c0, c1);
5815
5816 case MULT_EXPR:
5817 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5818 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5819 return c0 * c1;
5820
5821 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5822 case CEIL_DIV_EXPR:
5823 if (integer_pow2p (TREE_OPERAND (exp, 1))
5824 && host_integerp (TREE_OPERAND (exp, 1), 1))
5825 {
5826 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5827 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5828 return MAX (1, c0 / c1);
5829 }
5830 break;
5831
5832 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5833 case SAVE_EXPR: case WITH_RECORD_EXPR:
5834 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5835
5836 case COMPOUND_EXPR:
5837 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5838
5839 case COND_EXPR:
5840 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5841 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5842 return MIN (c0, c1);
5843
5844 default:
5845 break;
5846 }
5847
5848 return 1;
5849 }
5850 \f
5851 /* Return an object on the placeholder list that matches EXP, a
5852 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
5853 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
5854 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
5855 is a location which initially points to a starting location in the
5856 placeholder list (zero means start of the list) and where a pointer into
5857 the placeholder list at which the object is found is placed. */
5858
5859 tree
5860 find_placeholder (exp, plist)
5861 tree exp;
5862 tree *plist;
5863 {
5864 tree type = TREE_TYPE (exp);
5865 tree placeholder_expr;
5866
5867 for (placeholder_expr
5868 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
5869 placeholder_expr != 0;
5870 placeholder_expr = TREE_CHAIN (placeholder_expr))
5871 {
5872 tree need_type = TYPE_MAIN_VARIANT (type);
5873 tree elt;
5874
5875 /* Find the outermost reference that is of the type we want. If none,
5876 see if any object has a type that is a pointer to the type we
5877 want. */
5878 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5879 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5880 || TREE_CODE (elt) == COND_EXPR)
5881 ? TREE_OPERAND (elt, 1)
5882 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5883 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5884 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5885 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5886 ? TREE_OPERAND (elt, 0) : 0))
5887 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5888 {
5889 if (plist)
5890 *plist = placeholder_expr;
5891 return elt;
5892 }
5893
5894 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5895 elt
5896 = ((TREE_CODE (elt) == COMPOUND_EXPR
5897 || TREE_CODE (elt) == COND_EXPR)
5898 ? TREE_OPERAND (elt, 1)
5899 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5900 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5901 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5902 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5903 ? TREE_OPERAND (elt, 0) : 0))
5904 if (POINTER_TYPE_P (TREE_TYPE (elt))
5905 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
5906 == need_type))
5907 {
5908 if (plist)
5909 *plist = placeholder_expr;
5910 return build1 (INDIRECT_REF, need_type, elt);
5911 }
5912 }
5913
5914 return 0;
5915 }
5916 \f
5917 /* expand_expr: generate code for computing expression EXP.
5918 An rtx for the computed value is returned. The value is never null.
5919 In the case of a void EXP, const0_rtx is returned.
5920
5921 The value may be stored in TARGET if TARGET is nonzero.
5922 TARGET is just a suggestion; callers must assume that
5923 the rtx returned may not be the same as TARGET.
5924
5925 If TARGET is CONST0_RTX, it means that the value will be ignored.
5926
5927 If TMODE is not VOIDmode, it suggests generating the
5928 result in mode TMODE. But this is done only when convenient.
5929 Otherwise, TMODE is ignored and the value generated in its natural mode.
5930 TMODE is just a suggestion; callers must assume that
5931 the rtx returned may not have mode TMODE.
5932
5933 Note that TARGET may have neither TMODE nor MODE. In that case, it
5934 probably will not be used.
5935
5936 If MODIFIER is EXPAND_SUM then when EXP is an addition
5937 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5938 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5939 products as above, or REG or MEM, or constant.
5940 Ordinarily in such cases we would output mul or add instructions
5941 and then return a pseudo reg containing the sum.
5942
5943 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5944 it also marks a label as absolutely required (it can't be dead).
5945 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
5946 This is used for outputting expressions used in initializers.
5947
5948 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5949 with a constant address even if that address is not normally legitimate.
5950 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
5951
5952 rtx
5953 expand_expr (exp, target, tmode, modifier)
5954 tree exp;
5955 rtx target;
5956 enum machine_mode tmode;
5957 enum expand_modifier modifier;
5958 {
5959 rtx op0, op1, temp;
5960 tree type = TREE_TYPE (exp);
5961 int unsignedp = TREE_UNSIGNED (type);
5962 enum machine_mode mode;
5963 enum tree_code code = TREE_CODE (exp);
5964 optab this_optab;
5965 rtx subtarget, original_target;
5966 int ignore;
5967 tree context;
5968
5969 /* Handle ERROR_MARK before anybody tries to access its type. */
5970 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
5971 {
5972 op0 = CONST0_RTX (tmode);
5973 if (op0 != 0)
5974 return op0;
5975 return const0_rtx;
5976 }
5977
5978 mode = TYPE_MODE (type);
5979 /* Use subtarget as the target for operand 0 of a binary operation. */
5980 subtarget = get_subtarget (target);
5981 original_target = target;
5982 ignore = (target == const0_rtx
5983 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5984 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5985 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
5986 && TREE_CODE (type) == VOID_TYPE));
5987
5988 /* If we are going to ignore this result, we need only do something
5989 if there is a side-effect somewhere in the expression. If there
5990 is, short-circuit the most common cases here. Note that we must
5991 not call expand_expr with anything but const0_rtx in case this
5992 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
5993
5994 if (ignore)
5995 {
5996 if (! TREE_SIDE_EFFECTS (exp))
5997 return const0_rtx;
5998
5999 /* Ensure we reference a volatile object even if value is ignored, but
6000 don't do this if all we are doing is taking its address. */
6001 if (TREE_THIS_VOLATILE (exp)
6002 && TREE_CODE (exp) != FUNCTION_DECL
6003 && mode != VOIDmode && mode != BLKmode
6004 && modifier != EXPAND_CONST_ADDRESS)
6005 {
6006 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6007 if (GET_CODE (temp) == MEM)
6008 temp = copy_to_reg (temp);
6009 return const0_rtx;
6010 }
6011
6012 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6013 || code == INDIRECT_REF || code == BUFFER_REF)
6014 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6015 modifier);
6016
6017 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6018 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6019 {
6020 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6021 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6022 return const0_rtx;
6023 }
6024 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6025 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6026 /* If the second operand has no side effects, just evaluate
6027 the first. */
6028 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6029 modifier);
6030 else if (code == BIT_FIELD_REF)
6031 {
6032 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6033 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6034 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6035 return const0_rtx;
6036 }
6037
6038 target = 0;
6039 }
6040
6041 #ifdef MAX_INTEGER_COMPUTATION_MODE
6042 /* Only check stuff here if the mode we want is different from the mode
6043 of the expression; if it's the same, check_max_integer_computiation_mode
6044 will handle it. Do we really need to check this stuff at all? */
6045
6046 if (target
6047 && GET_MODE (target) != mode
6048 && TREE_CODE (exp) != INTEGER_CST
6049 && TREE_CODE (exp) != PARM_DECL
6050 && TREE_CODE (exp) != ARRAY_REF
6051 && TREE_CODE (exp) != ARRAY_RANGE_REF
6052 && TREE_CODE (exp) != COMPONENT_REF
6053 && TREE_CODE (exp) != BIT_FIELD_REF
6054 && TREE_CODE (exp) != INDIRECT_REF
6055 && TREE_CODE (exp) != CALL_EXPR
6056 && TREE_CODE (exp) != VAR_DECL
6057 && TREE_CODE (exp) != RTL_EXPR)
6058 {
6059 enum machine_mode mode = GET_MODE (target);
6060
6061 if (GET_MODE_CLASS (mode) == MODE_INT
6062 && mode > MAX_INTEGER_COMPUTATION_MODE)
6063 internal_error ("unsupported wide integer operation");
6064 }
6065
6066 if (tmode != mode
6067 && TREE_CODE (exp) != INTEGER_CST
6068 && TREE_CODE (exp) != PARM_DECL
6069 && TREE_CODE (exp) != ARRAY_REF
6070 && TREE_CODE (exp) != ARRAY_RANGE_REF
6071 && TREE_CODE (exp) != COMPONENT_REF
6072 && TREE_CODE (exp) != BIT_FIELD_REF
6073 && TREE_CODE (exp) != INDIRECT_REF
6074 && TREE_CODE (exp) != VAR_DECL
6075 && TREE_CODE (exp) != CALL_EXPR
6076 && TREE_CODE (exp) != RTL_EXPR
6077 && GET_MODE_CLASS (tmode) == MODE_INT
6078 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6079 internal_error ("unsupported wide integer operation");
6080
6081 check_max_integer_computation_mode (exp);
6082 #endif
6083
6084 /* If will do cse, generate all results into pseudo registers
6085 since 1) that allows cse to find more things
6086 and 2) otherwise cse could produce an insn the machine
6087 cannot support. And exception is a CONSTRUCTOR into a multi-word
6088 MEM: that's much more likely to be most efficient into the MEM. */
6089
6090 if (! cse_not_expected && mode != BLKmode && target
6091 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6092 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6093 target = subtarget;
6094
6095 switch (code)
6096 {
6097 case LABEL_DECL:
6098 {
6099 tree function = decl_function_context (exp);
6100 /* Handle using a label in a containing function. */
6101 if (function != current_function_decl
6102 && function != inline_function_decl && function != 0)
6103 {
6104 struct function *p = find_function_data (function);
6105 p->expr->x_forced_labels
6106 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6107 p->expr->x_forced_labels);
6108 }
6109 else
6110 {
6111 if (modifier == EXPAND_INITIALIZER)
6112 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6113 label_rtx (exp),
6114 forced_labels);
6115 }
6116
6117 temp = gen_rtx_MEM (FUNCTION_MODE,
6118 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6119 if (function != current_function_decl
6120 && function != inline_function_decl && function != 0)
6121 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6122 return temp;
6123 }
6124
6125 case PARM_DECL:
6126 if (DECL_RTL (exp) == 0)
6127 {
6128 error_with_decl (exp, "prior parameter's size depends on `%s'");
6129 return CONST0_RTX (mode);
6130 }
6131
6132 /* ... fall through ... */
6133
6134 case VAR_DECL:
6135 /* If a static var's type was incomplete when the decl was written,
6136 but the type is complete now, lay out the decl now. */
6137 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6138 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6139 {
6140 rtx value = DECL_RTL_IF_SET (exp);
6141
6142 layout_decl (exp, 0);
6143
6144 /* If the RTL was already set, update its mode and memory
6145 attributes. */
6146 if (value != 0)
6147 {
6148 PUT_MODE (value, DECL_MODE (exp));
6149 SET_DECL_RTL (exp, 0);
6150 set_mem_attributes (value, exp, 1);
6151 SET_DECL_RTL (exp, value);
6152 }
6153 }
6154
6155 /* ... fall through ... */
6156
6157 case FUNCTION_DECL:
6158 case RESULT_DECL:
6159 if (DECL_RTL (exp) == 0)
6160 abort ();
6161
6162 /* Ensure variable marked as used even if it doesn't go through
6163 a parser. If it hasn't be used yet, write out an external
6164 definition. */
6165 if (! TREE_USED (exp))
6166 {
6167 assemble_external (exp);
6168 TREE_USED (exp) = 1;
6169 }
6170
6171 /* Show we haven't gotten RTL for this yet. */
6172 temp = 0;
6173
6174 /* Handle variables inherited from containing functions. */
6175 context = decl_function_context (exp);
6176
6177 /* We treat inline_function_decl as an alias for the current function
6178 because that is the inline function whose vars, types, etc.
6179 are being merged into the current function.
6180 See expand_inline_function. */
6181
6182 if (context != 0 && context != current_function_decl
6183 && context != inline_function_decl
6184 /* If var is static, we don't need a static chain to access it. */
6185 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6186 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6187 {
6188 rtx addr;
6189
6190 /* Mark as non-local and addressable. */
6191 DECL_NONLOCAL (exp) = 1;
6192 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6193 abort ();
6194 mark_addressable (exp);
6195 if (GET_CODE (DECL_RTL (exp)) != MEM)
6196 abort ();
6197 addr = XEXP (DECL_RTL (exp), 0);
6198 if (GET_CODE (addr) == MEM)
6199 addr
6200 = replace_equiv_address (addr,
6201 fix_lexical_addr (XEXP (addr, 0), exp));
6202 else
6203 addr = fix_lexical_addr (addr, exp);
6204
6205 temp = replace_equiv_address (DECL_RTL (exp), addr);
6206 }
6207
6208 /* This is the case of an array whose size is to be determined
6209 from its initializer, while the initializer is still being parsed.
6210 See expand_decl. */
6211
6212 else if (GET_CODE (DECL_RTL (exp)) == MEM
6213 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6214 temp = validize_mem (DECL_RTL (exp));
6215
6216 /* If DECL_RTL is memory, we are in the normal case and either
6217 the address is not valid or it is not a register and -fforce-addr
6218 is specified, get the address into a register. */
6219
6220 else if (GET_CODE (DECL_RTL (exp)) == MEM
6221 && modifier != EXPAND_CONST_ADDRESS
6222 && modifier != EXPAND_SUM
6223 && modifier != EXPAND_INITIALIZER
6224 && (! memory_address_p (DECL_MODE (exp),
6225 XEXP (DECL_RTL (exp), 0))
6226 || (flag_force_addr
6227 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6228 temp = replace_equiv_address (DECL_RTL (exp),
6229 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6230
6231 /* If we got something, return it. But first, set the alignment
6232 if the address is a register. */
6233 if (temp != 0)
6234 {
6235 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6236 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6237
6238 return temp;
6239 }
6240
6241 /* If the mode of DECL_RTL does not match that of the decl, it
6242 must be a promoted value. We return a SUBREG of the wanted mode,
6243 but mark it so that we know that it was already extended. */
6244
6245 if (GET_CODE (DECL_RTL (exp)) == REG
6246 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6247 {
6248 /* Get the signedness used for this variable. Ensure we get the
6249 same mode we got when the variable was declared. */
6250 if (GET_MODE (DECL_RTL (exp))
6251 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6252 abort ();
6253
6254 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6255 SUBREG_PROMOTED_VAR_P (temp) = 1;
6256 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6257 return temp;
6258 }
6259
6260 return DECL_RTL (exp);
6261
6262 case INTEGER_CST:
6263 return immed_double_const (TREE_INT_CST_LOW (exp),
6264 TREE_INT_CST_HIGH (exp), mode);
6265
6266 case CONST_DECL:
6267 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6268
6269 case REAL_CST:
6270 /* If optimized, generate immediate CONST_DOUBLE
6271 which will be turned into memory by reload if necessary.
6272
6273 We used to force a register so that loop.c could see it. But
6274 this does not allow gen_* patterns to perform optimizations with
6275 the constants. It also produces two insns in cases like "x = 1.0;".
6276 On most machines, floating-point constants are not permitted in
6277 many insns, so we'd end up copying it to a register in any case.
6278
6279 Now, we do the copying in expand_binop, if appropriate. */
6280 return immed_real_const (exp);
6281
6282 case COMPLEX_CST:
6283 case STRING_CST:
6284 if (! TREE_CST_RTL (exp))
6285 output_constant_def (exp, 1);
6286
6287 /* TREE_CST_RTL probably contains a constant address.
6288 On RISC machines where a constant address isn't valid,
6289 make some insns to get that address into a register. */
6290 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6291 && modifier != EXPAND_CONST_ADDRESS
6292 && modifier != EXPAND_INITIALIZER
6293 && modifier != EXPAND_SUM
6294 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6295 || (flag_force_addr
6296 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6297 return replace_equiv_address (TREE_CST_RTL (exp),
6298 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6299 return TREE_CST_RTL (exp);
6300
6301 case EXPR_WITH_FILE_LOCATION:
6302 {
6303 rtx to_return;
6304 const char *saved_input_filename = input_filename;
6305 int saved_lineno = lineno;
6306 input_filename = EXPR_WFL_FILENAME (exp);
6307 lineno = EXPR_WFL_LINENO (exp);
6308 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6309 emit_line_note (input_filename, lineno);
6310 /* Possibly avoid switching back and forth here. */
6311 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6312 input_filename = saved_input_filename;
6313 lineno = saved_lineno;
6314 return to_return;
6315 }
6316
6317 case SAVE_EXPR:
6318 context = decl_function_context (exp);
6319
6320 /* If this SAVE_EXPR was at global context, assume we are an
6321 initialization function and move it into our context. */
6322 if (context == 0)
6323 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6324
6325 /* We treat inline_function_decl as an alias for the current function
6326 because that is the inline function whose vars, types, etc.
6327 are being merged into the current function.
6328 See expand_inline_function. */
6329 if (context == current_function_decl || context == inline_function_decl)
6330 context = 0;
6331
6332 /* If this is non-local, handle it. */
6333 if (context)
6334 {
6335 /* The following call just exists to abort if the context is
6336 not of a containing function. */
6337 find_function_data (context);
6338
6339 temp = SAVE_EXPR_RTL (exp);
6340 if (temp && GET_CODE (temp) == REG)
6341 {
6342 put_var_into_stack (exp);
6343 temp = SAVE_EXPR_RTL (exp);
6344 }
6345 if (temp == 0 || GET_CODE (temp) != MEM)
6346 abort ();
6347 return
6348 replace_equiv_address (temp,
6349 fix_lexical_addr (XEXP (temp, 0), exp));
6350 }
6351 if (SAVE_EXPR_RTL (exp) == 0)
6352 {
6353 if (mode == VOIDmode)
6354 temp = const0_rtx;
6355 else
6356 temp = assign_temp (build_qualified_type (type,
6357 (TYPE_QUALS (type)
6358 | TYPE_QUAL_CONST)),
6359 3, 0, 0);
6360
6361 SAVE_EXPR_RTL (exp) = temp;
6362 if (!optimize && GET_CODE (temp) == REG)
6363 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6364 save_expr_regs);
6365
6366 /* If the mode of TEMP does not match that of the expression, it
6367 must be a promoted value. We pass store_expr a SUBREG of the
6368 wanted mode but mark it so that we know that it was already
6369 extended. Note that `unsignedp' was modified above in
6370 this case. */
6371
6372 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6373 {
6374 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6375 SUBREG_PROMOTED_VAR_P (temp) = 1;
6376 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6377 }
6378
6379 if (temp == const0_rtx)
6380 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6381 else
6382 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6383
6384 TREE_USED (exp) = 1;
6385 }
6386
6387 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6388 must be a promoted value. We return a SUBREG of the wanted mode,
6389 but mark it so that we know that it was already extended. */
6390
6391 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6392 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6393 {
6394 /* Compute the signedness and make the proper SUBREG. */
6395 promote_mode (type, mode, &unsignedp, 0);
6396 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6397 SUBREG_PROMOTED_VAR_P (temp) = 1;
6398 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6399 return temp;
6400 }
6401
6402 return SAVE_EXPR_RTL (exp);
6403
6404 case UNSAVE_EXPR:
6405 {
6406 rtx temp;
6407 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6408 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6409 return temp;
6410 }
6411
6412 case PLACEHOLDER_EXPR:
6413 {
6414 tree old_list = placeholder_list;
6415 tree placeholder_expr = 0;
6416
6417 exp = find_placeholder (exp, &placeholder_expr);
6418 if (exp == 0)
6419 abort ();
6420
6421 placeholder_list = TREE_CHAIN (placeholder_expr);
6422 temp = expand_expr (exp, original_target, tmode, modifier);
6423 placeholder_list = old_list;
6424 return temp;
6425 }
6426
6427 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6428 abort ();
6429
6430 case WITH_RECORD_EXPR:
6431 /* Put the object on the placeholder list, expand our first operand,
6432 and pop the list. */
6433 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6434 placeholder_list);
6435 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6436 modifier);
6437 placeholder_list = TREE_CHAIN (placeholder_list);
6438 return target;
6439
6440 case GOTO_EXPR:
6441 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6442 expand_goto (TREE_OPERAND (exp, 0));
6443 else
6444 expand_computed_goto (TREE_OPERAND (exp, 0));
6445 return const0_rtx;
6446
6447 case EXIT_EXPR:
6448 expand_exit_loop_if_false (NULL,
6449 invert_truthvalue (TREE_OPERAND (exp, 0)));
6450 return const0_rtx;
6451
6452 case LABELED_BLOCK_EXPR:
6453 if (LABELED_BLOCK_BODY (exp))
6454 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6455 /* Should perhaps use expand_label, but this is simpler and safer. */
6456 do_pending_stack_adjust ();
6457 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6458 return const0_rtx;
6459
6460 case EXIT_BLOCK_EXPR:
6461 if (EXIT_BLOCK_RETURN (exp))
6462 sorry ("returned value in block_exit_expr");
6463 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6464 return const0_rtx;
6465
6466 case LOOP_EXPR:
6467 push_temp_slots ();
6468 expand_start_loop (1);
6469 expand_expr_stmt (TREE_OPERAND (exp, 0));
6470 expand_end_loop ();
6471 pop_temp_slots ();
6472
6473 return const0_rtx;
6474
6475 case BIND_EXPR:
6476 {
6477 tree vars = TREE_OPERAND (exp, 0);
6478 int vars_need_expansion = 0;
6479
6480 /* Need to open a binding contour here because
6481 if there are any cleanups they must be contained here. */
6482 expand_start_bindings (2);
6483
6484 /* Mark the corresponding BLOCK for output in its proper place. */
6485 if (TREE_OPERAND (exp, 2) != 0
6486 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6487 insert_block (TREE_OPERAND (exp, 2));
6488
6489 /* If VARS have not yet been expanded, expand them now. */
6490 while (vars)
6491 {
6492 if (!DECL_RTL_SET_P (vars))
6493 {
6494 vars_need_expansion = 1;
6495 expand_decl (vars);
6496 }
6497 expand_decl_init (vars);
6498 vars = TREE_CHAIN (vars);
6499 }
6500
6501 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6502
6503 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6504
6505 return temp;
6506 }
6507
6508 case RTL_EXPR:
6509 if (RTL_EXPR_SEQUENCE (exp))
6510 {
6511 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6512 abort ();
6513 emit_insns (RTL_EXPR_SEQUENCE (exp));
6514 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6515 }
6516 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6517 free_temps_for_rtl_expr (exp);
6518 return RTL_EXPR_RTL (exp);
6519
6520 case CONSTRUCTOR:
6521 /* If we don't need the result, just ensure we evaluate any
6522 subexpressions. */
6523 if (ignore)
6524 {
6525 tree elt;
6526
6527 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6528 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6529
6530 return const0_rtx;
6531 }
6532
6533 /* All elts simple constants => refer to a constant in memory. But
6534 if this is a non-BLKmode mode, let it store a field at a time
6535 since that should make a CONST_INT or CONST_DOUBLE when we
6536 fold. Likewise, if we have a target we can use, it is best to
6537 store directly into the target unless the type is large enough
6538 that memcpy will be used. If we are making an initializer and
6539 all operands are constant, put it in memory as well. */
6540 else if ((TREE_STATIC (exp)
6541 && ((mode == BLKmode
6542 && ! (target != 0 && safe_from_p (target, exp, 1)))
6543 || TREE_ADDRESSABLE (exp)
6544 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6545 && (! MOVE_BY_PIECES_P
6546 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6547 TYPE_ALIGN (type)))
6548 && ! mostly_zeros_p (exp))))
6549 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6550 {
6551 rtx constructor = output_constant_def (exp, 1);
6552
6553 if (modifier != EXPAND_CONST_ADDRESS
6554 && modifier != EXPAND_INITIALIZER
6555 && modifier != EXPAND_SUM)
6556 constructor = validize_mem (constructor);
6557
6558 return constructor;
6559 }
6560 else
6561 {
6562 /* Handle calls that pass values in multiple non-contiguous
6563 locations. The Irix 6 ABI has examples of this. */
6564 if (target == 0 || ! safe_from_p (target, exp, 1)
6565 || GET_CODE (target) == PARALLEL)
6566 target
6567 = assign_temp (build_qualified_type (type,
6568 (TYPE_QUALS (type)
6569 | (TREE_READONLY (exp)
6570 * TYPE_QUAL_CONST))),
6571 0, TREE_ADDRESSABLE (exp), 1);
6572
6573 store_constructor (exp, target, 0,
6574 int_size_in_bytes (TREE_TYPE (exp)));
6575 return target;
6576 }
6577
6578 case INDIRECT_REF:
6579 {
6580 tree exp1 = TREE_OPERAND (exp, 0);
6581 tree index;
6582 tree string = string_constant (exp1, &index);
6583
6584 /* Try to optimize reads from const strings. */
6585 if (string
6586 && TREE_CODE (string) == STRING_CST
6587 && TREE_CODE (index) == INTEGER_CST
6588 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6589 && GET_MODE_CLASS (mode) == MODE_INT
6590 && GET_MODE_SIZE (mode) == 1
6591 && modifier != EXPAND_WRITE)
6592 return
6593 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6594
6595 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6596 op0 = memory_address (mode, op0);
6597 temp = gen_rtx_MEM (mode, op0);
6598 set_mem_attributes (temp, exp, 0);
6599
6600 /* If we are writing to this object and its type is a record with
6601 readonly fields, we must mark it as readonly so it will
6602 conflict with readonly references to those fields. */
6603 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6604 RTX_UNCHANGING_P (temp) = 1;
6605
6606 return temp;
6607 }
6608
6609 case ARRAY_REF:
6610 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6611 abort ();
6612
6613 {
6614 tree array = TREE_OPERAND (exp, 0);
6615 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6616 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6617 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6618 HOST_WIDE_INT i;
6619
6620 /* Optimize the special-case of a zero lower bound.
6621
6622 We convert the low_bound to sizetype to avoid some problems
6623 with constant folding. (E.g. suppose the lower bound is 1,
6624 and its mode is QI. Without the conversion, (ARRAY
6625 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6626 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6627
6628 if (! integer_zerop (low_bound))
6629 index = size_diffop (index, convert (sizetype, low_bound));
6630
6631 /* Fold an expression like: "foo"[2].
6632 This is not done in fold so it won't happen inside &.
6633 Don't fold if this is for wide characters since it's too
6634 difficult to do correctly and this is a very rare case. */
6635
6636 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6637 && TREE_CODE (array) == STRING_CST
6638 && TREE_CODE (index) == INTEGER_CST
6639 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6640 && GET_MODE_CLASS (mode) == MODE_INT
6641 && GET_MODE_SIZE (mode) == 1)
6642 return
6643 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6644
6645 /* If this is a constant index into a constant array,
6646 just get the value from the array. Handle both the cases when
6647 we have an explicit constructor and when our operand is a variable
6648 that was declared const. */
6649
6650 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6651 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6652 && TREE_CODE (index) == INTEGER_CST
6653 && 0 > compare_tree_int (index,
6654 list_length (CONSTRUCTOR_ELTS
6655 (TREE_OPERAND (exp, 0)))))
6656 {
6657 tree elem;
6658
6659 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6660 i = TREE_INT_CST_LOW (index);
6661 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6662 ;
6663
6664 if (elem)
6665 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6666 modifier);
6667 }
6668
6669 else if (optimize >= 1
6670 && modifier != EXPAND_CONST_ADDRESS
6671 && modifier != EXPAND_INITIALIZER
6672 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6673 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6674 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6675 {
6676 if (TREE_CODE (index) == INTEGER_CST)
6677 {
6678 tree init = DECL_INITIAL (array);
6679
6680 if (TREE_CODE (init) == CONSTRUCTOR)
6681 {
6682 tree elem;
6683
6684 for (elem = CONSTRUCTOR_ELTS (init);
6685 (elem
6686 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6687 elem = TREE_CHAIN (elem))
6688 ;
6689
6690 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6691 return expand_expr (fold (TREE_VALUE (elem)), target,
6692 tmode, modifier);
6693 }
6694 else if (TREE_CODE (init) == STRING_CST
6695 && 0 > compare_tree_int (index,
6696 TREE_STRING_LENGTH (init)))
6697 {
6698 tree type = TREE_TYPE (TREE_TYPE (init));
6699 enum machine_mode mode = TYPE_MODE (type);
6700
6701 if (GET_MODE_CLASS (mode) == MODE_INT
6702 && GET_MODE_SIZE (mode) == 1)
6703 return (GEN_INT
6704 (TREE_STRING_POINTER
6705 (init)[TREE_INT_CST_LOW (index)]));
6706 }
6707 }
6708 }
6709 }
6710 /* Fall through. */
6711
6712 case COMPONENT_REF:
6713 case BIT_FIELD_REF:
6714 case ARRAY_RANGE_REF:
6715 /* If the operand is a CONSTRUCTOR, we can just extract the
6716 appropriate field if it is present. Don't do this if we have
6717 already written the data since we want to refer to that copy
6718 and varasm.c assumes that's what we'll do. */
6719 if (code == COMPONENT_REF
6720 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6721 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6722 {
6723 tree elt;
6724
6725 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6726 elt = TREE_CHAIN (elt))
6727 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6728 /* We can normally use the value of the field in the
6729 CONSTRUCTOR. However, if this is a bitfield in
6730 an integral mode that we can fit in a HOST_WIDE_INT,
6731 we must mask only the number of bits in the bitfield,
6732 since this is done implicitly by the constructor. If
6733 the bitfield does not meet either of those conditions,
6734 we can't do this optimization. */
6735 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6736 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6737 == MODE_INT)
6738 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6739 <= HOST_BITS_PER_WIDE_INT))))
6740 {
6741 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6742 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6743 {
6744 HOST_WIDE_INT bitsize
6745 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6746
6747 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6748 {
6749 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6750 op0 = expand_and (op0, op1, target);
6751 }
6752 else
6753 {
6754 enum machine_mode imode
6755 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6756 tree count
6757 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6758 0);
6759
6760 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6761 target, 0);
6762 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6763 target, 0);
6764 }
6765 }
6766
6767 return op0;
6768 }
6769 }
6770
6771 {
6772 enum machine_mode mode1;
6773 HOST_WIDE_INT bitsize, bitpos;
6774 tree offset;
6775 int volatilep = 0;
6776 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6777 &mode1, &unsignedp, &volatilep);
6778 rtx orig_op0;
6779
6780 /* If we got back the original object, something is wrong. Perhaps
6781 we are evaluating an expression too early. In any event, don't
6782 infinitely recurse. */
6783 if (tem == exp)
6784 abort ();
6785
6786 /* If TEM's type is a union of variable size, pass TARGET to the inner
6787 computation, since it will need a temporary and TARGET is known
6788 to have to do. This occurs in unchecked conversion in Ada. */
6789
6790 orig_op0 = op0
6791 = expand_expr (tem,
6792 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6793 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6794 != INTEGER_CST)
6795 ? target : NULL_RTX),
6796 VOIDmode,
6797 (modifier == EXPAND_INITIALIZER
6798 || modifier == EXPAND_CONST_ADDRESS)
6799 ? modifier : EXPAND_NORMAL);
6800
6801 /* If this is a constant, put it into a register if it is a
6802 legitimate constant and OFFSET is 0 and memory if it isn't. */
6803 if (CONSTANT_P (op0))
6804 {
6805 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6806 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6807 && offset == 0)
6808 op0 = force_reg (mode, op0);
6809 else
6810 op0 = validize_mem (force_const_mem (mode, op0));
6811 }
6812
6813 if (offset != 0)
6814 {
6815 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6816
6817 /* If this object is in a register, put it into memory.
6818 This case can't occur in C, but can in Ada if we have
6819 unchecked conversion of an expression from a scalar type to
6820 an array or record type. */
6821 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6822 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6823 {
6824 /* If the operand is a SAVE_EXPR, we can deal with this by
6825 forcing the SAVE_EXPR into memory. */
6826 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
6827 {
6828 put_var_into_stack (TREE_OPERAND (exp, 0));
6829 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
6830 }
6831 else
6832 {
6833 tree nt
6834 = build_qualified_type (TREE_TYPE (tem),
6835 (TYPE_QUALS (TREE_TYPE (tem))
6836 | TYPE_QUAL_CONST));
6837 rtx memloc = assign_temp (nt, 1, 1, 1);
6838
6839 emit_move_insn (memloc, op0);
6840 op0 = memloc;
6841 }
6842 }
6843
6844 if (GET_CODE (op0) != MEM)
6845 abort ();
6846
6847 if (GET_MODE (offset_rtx) != ptr_mode)
6848 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6849
6850 #ifdef POINTERS_EXTEND_UNSIGNED
6851 if (GET_MODE (offset_rtx) != Pmode)
6852 offset_rtx = convert_memory_address (Pmode, offset_rtx);
6853 #endif
6854
6855 /* A constant address in OP0 can have VOIDmode, we must not try
6856 to call force_reg for that case. Avoid that case. */
6857 if (GET_CODE (op0) == MEM
6858 && GET_MODE (op0) == BLKmode
6859 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6860 && bitsize != 0
6861 && (bitpos % bitsize) == 0
6862 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6863 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6864 {
6865 rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6866
6867 if (GET_CODE (XEXP (temp, 0)) == REG)
6868 op0 = temp;
6869 else
6870 op0 = (replace_equiv_address
6871 (op0,
6872 force_reg (GET_MODE (XEXP (temp, 0)),
6873 XEXP (temp, 0))));
6874 bitpos = 0;
6875 }
6876
6877 op0 = offset_address (op0, offset_rtx,
6878 highest_pow2_factor (offset));
6879 }
6880
6881 /* Don't forget about volatility even if this is a bitfield. */
6882 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6883 {
6884 if (op0 == orig_op0)
6885 op0 = copy_rtx (op0);
6886
6887 MEM_VOLATILE_P (op0) = 1;
6888 }
6889
6890 /* In cases where an aligned union has an unaligned object
6891 as a field, we might be extracting a BLKmode value from
6892 an integer-mode (e.g., SImode) object. Handle this case
6893 by doing the extract into an object as wide as the field
6894 (which we know to be the width of a basic mode), then
6895 storing into memory, and changing the mode to BLKmode. */
6896 if (mode1 == VOIDmode
6897 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6898 || (mode1 != BLKmode && ! direct_load[(int) mode1]
6899 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6900 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
6901 && modifier != EXPAND_CONST_ADDRESS
6902 && modifier != EXPAND_INITIALIZER)
6903 /* If the field isn't aligned enough to fetch as a memref,
6904 fetch it as a bit field. */
6905 || (mode1 != BLKmode
6906 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
6907 && ((TYPE_ALIGN (TREE_TYPE (tem))
6908 < GET_MODE_ALIGNMENT (mode))
6909 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6910 /* If the type and the field are a constant size and the
6911 size of the type isn't the same size as the bitfield,
6912 we must use bitfield operations. */
6913 || (bitsize >= 0
6914 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6915 == INTEGER_CST)
6916 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6917 bitsize)))
6918 {
6919 enum machine_mode ext_mode = mode;
6920
6921 if (ext_mode == BLKmode
6922 && ! (target != 0 && GET_CODE (op0) == MEM
6923 && GET_CODE (target) == MEM
6924 && bitpos % BITS_PER_UNIT == 0))
6925 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6926
6927 if (ext_mode == BLKmode)
6928 {
6929 /* In this case, BITPOS must start at a byte boundary and
6930 TARGET, if specified, must be a MEM. */
6931 if (GET_CODE (op0) != MEM
6932 || (target != 0 && GET_CODE (target) != MEM)
6933 || bitpos % BITS_PER_UNIT != 0)
6934 abort ();
6935
6936 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
6937 if (target == 0)
6938 target = assign_temp (type, 0, 1, 1);
6939
6940 emit_block_move (target, op0,
6941 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6942 / BITS_PER_UNIT));
6943
6944 return target;
6945 }
6946
6947 op0 = validize_mem (op0);
6948
6949 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6950 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
6951
6952 op0 = extract_bit_field (op0, bitsize, bitpos,
6953 unsignedp, target, ext_mode, ext_mode,
6954 int_size_in_bytes (TREE_TYPE (tem)));
6955
6956 /* If the result is a record type and BITSIZE is narrower than
6957 the mode of OP0, an integral mode, and this is a big endian
6958 machine, we must put the field into the high-order bits. */
6959 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6960 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6961 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
6962 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6963 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6964 - bitsize),
6965 op0, 1);
6966
6967 if (mode == BLKmode)
6968 {
6969 rtx new = assign_temp (build_qualified_type
6970 (type_for_mode (ext_mode, 0),
6971 TYPE_QUAL_CONST), 0, 1, 1);
6972
6973 emit_move_insn (new, op0);
6974 op0 = copy_rtx (new);
6975 PUT_MODE (op0, BLKmode);
6976 set_mem_attributes (op0, exp, 1);
6977 }
6978
6979 return op0;
6980 }
6981
6982 /* If the result is BLKmode, use that to access the object
6983 now as well. */
6984 if (mode == BLKmode)
6985 mode1 = BLKmode;
6986
6987 /* Get a reference to just this component. */
6988 if (modifier == EXPAND_CONST_ADDRESS
6989 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6990 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
6991 else
6992 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6993
6994 if (op0 == orig_op0)
6995 op0 = copy_rtx (op0);
6996
6997 set_mem_attributes (op0, exp, 0);
6998 if (GET_CODE (XEXP (op0, 0)) == REG)
6999 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7000
7001 MEM_VOLATILE_P (op0) |= volatilep;
7002 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7003 || modifier == EXPAND_CONST_ADDRESS
7004 || modifier == EXPAND_INITIALIZER)
7005 return op0;
7006 else if (target == 0)
7007 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7008
7009 convert_move (target, op0, unsignedp);
7010 return target;
7011 }
7012
7013 case VTABLE_REF:
7014 {
7015 rtx insn, before = get_last_insn (), vtbl_ref;
7016
7017 /* Evaluate the interior expression. */
7018 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7019 tmode, modifier);
7020
7021 /* Get or create an instruction off which to hang a note. */
7022 if (REG_P (subtarget))
7023 {
7024 target = subtarget;
7025 insn = get_last_insn ();
7026 if (insn == before)
7027 abort ();
7028 if (! INSN_P (insn))
7029 insn = prev_nonnote_insn (insn);
7030 }
7031 else
7032 {
7033 target = gen_reg_rtx (GET_MODE (subtarget));
7034 insn = emit_move_insn (target, subtarget);
7035 }
7036
7037 /* Collect the data for the note. */
7038 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7039 vtbl_ref = plus_constant (vtbl_ref,
7040 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7041 /* Discard the initial CONST that was added. */
7042 vtbl_ref = XEXP (vtbl_ref, 0);
7043
7044 REG_NOTES (insn)
7045 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7046
7047 return target;
7048 }
7049
7050 /* Intended for a reference to a buffer of a file-object in Pascal.
7051 But it's not certain that a special tree code will really be
7052 necessary for these. INDIRECT_REF might work for them. */
7053 case BUFFER_REF:
7054 abort ();
7055
7056 case IN_EXPR:
7057 {
7058 /* Pascal set IN expression.
7059
7060 Algorithm:
7061 rlo = set_low - (set_low%bits_per_word);
7062 the_word = set [ (index - rlo)/bits_per_word ];
7063 bit_index = index % bits_per_word;
7064 bitmask = 1 << bit_index;
7065 return !!(the_word & bitmask); */
7066
7067 tree set = TREE_OPERAND (exp, 0);
7068 tree index = TREE_OPERAND (exp, 1);
7069 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7070 tree set_type = TREE_TYPE (set);
7071 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7072 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7073 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7074 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7075 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7076 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7077 rtx setaddr = XEXP (setval, 0);
7078 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7079 rtx rlow;
7080 rtx diff, quo, rem, addr, bit, result;
7081
7082 /* If domain is empty, answer is no. Likewise if index is constant
7083 and out of bounds. */
7084 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7085 && TREE_CODE (set_low_bound) == INTEGER_CST
7086 && tree_int_cst_lt (set_high_bound, set_low_bound))
7087 || (TREE_CODE (index) == INTEGER_CST
7088 && TREE_CODE (set_low_bound) == INTEGER_CST
7089 && tree_int_cst_lt (index, set_low_bound))
7090 || (TREE_CODE (set_high_bound) == INTEGER_CST
7091 && TREE_CODE (index) == INTEGER_CST
7092 && tree_int_cst_lt (set_high_bound, index))))
7093 return const0_rtx;
7094
7095 if (target == 0)
7096 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7097
7098 /* If we get here, we have to generate the code for both cases
7099 (in range and out of range). */
7100
7101 op0 = gen_label_rtx ();
7102 op1 = gen_label_rtx ();
7103
7104 if (! (GET_CODE (index_val) == CONST_INT
7105 && GET_CODE (lo_r) == CONST_INT))
7106 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7107 GET_MODE (index_val), iunsignedp, op1);
7108
7109 if (! (GET_CODE (index_val) == CONST_INT
7110 && GET_CODE (hi_r) == CONST_INT))
7111 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7112 GET_MODE (index_val), iunsignedp, op1);
7113
7114 /* Calculate the element number of bit zero in the first word
7115 of the set. */
7116 if (GET_CODE (lo_r) == CONST_INT)
7117 rlow = GEN_INT (INTVAL (lo_r)
7118 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7119 else
7120 rlow = expand_binop (index_mode, and_optab, lo_r,
7121 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7122 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7123
7124 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7125 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7126
7127 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7128 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7129 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7130 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7131
7132 addr = memory_address (byte_mode,
7133 expand_binop (index_mode, add_optab, diff,
7134 setaddr, NULL_RTX, iunsignedp,
7135 OPTAB_LIB_WIDEN));
7136
7137 /* Extract the bit we want to examine. */
7138 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7139 gen_rtx_MEM (byte_mode, addr),
7140 make_tree (TREE_TYPE (index), rem),
7141 NULL_RTX, 1);
7142 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7143 GET_MODE (target) == byte_mode ? target : 0,
7144 1, OPTAB_LIB_WIDEN);
7145
7146 if (result != target)
7147 convert_move (target, result, 1);
7148
7149 /* Output the code to handle the out-of-range case. */
7150 emit_jump (op0);
7151 emit_label (op1);
7152 emit_move_insn (target, const0_rtx);
7153 emit_label (op0);
7154 return target;
7155 }
7156
7157 case WITH_CLEANUP_EXPR:
7158 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7159 {
7160 WITH_CLEANUP_EXPR_RTL (exp)
7161 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7162 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7163
7164 /* That's it for this cleanup. */
7165 TREE_OPERAND (exp, 1) = 0;
7166 }
7167 return WITH_CLEANUP_EXPR_RTL (exp);
7168
7169 case CLEANUP_POINT_EXPR:
7170 {
7171 /* Start a new binding layer that will keep track of all cleanup
7172 actions to be performed. */
7173 expand_start_bindings (2);
7174
7175 target_temp_slot_level = temp_slot_level;
7176
7177 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7178 /* If we're going to use this value, load it up now. */
7179 if (! ignore)
7180 op0 = force_not_mem (op0);
7181 preserve_temp_slots (op0);
7182 expand_end_bindings (NULL_TREE, 0, 0);
7183 }
7184 return op0;
7185
7186 case CALL_EXPR:
7187 /* Check for a built-in function. */
7188 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7189 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7190 == FUNCTION_DECL)
7191 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7192 {
7193 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7194 == BUILT_IN_FRONTEND)
7195 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7196 else
7197 return expand_builtin (exp, target, subtarget, tmode, ignore);
7198 }
7199
7200 return expand_call (exp, target, ignore);
7201
7202 case NON_LVALUE_EXPR:
7203 case NOP_EXPR:
7204 case CONVERT_EXPR:
7205 case REFERENCE_EXPR:
7206 if (TREE_OPERAND (exp, 0) == error_mark_node)
7207 return const0_rtx;
7208
7209 if (TREE_CODE (type) == UNION_TYPE)
7210 {
7211 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7212
7213 /* If both input and output are BLKmode, this conversion isn't doing
7214 anything except possibly changing memory attribute. */
7215 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7216 {
7217 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7218 modifier);
7219
7220 result = copy_rtx (result);
7221 set_mem_attributes (result, exp, 0);
7222 return result;
7223 }
7224
7225 if (target == 0)
7226 target = assign_temp (type, 0, 1, 1);
7227
7228 if (GET_CODE (target) == MEM)
7229 /* Store data into beginning of memory target. */
7230 store_expr (TREE_OPERAND (exp, 0),
7231 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7232
7233 else if (GET_CODE (target) == REG)
7234 /* Store this field into a union of the proper type. */
7235 store_field (target,
7236 MIN ((int_size_in_bytes (TREE_TYPE
7237 (TREE_OPERAND (exp, 0)))
7238 * BITS_PER_UNIT),
7239 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7240 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7241 VOIDmode, 0, type, 0);
7242 else
7243 abort ();
7244
7245 /* Return the entire union. */
7246 return target;
7247 }
7248
7249 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7250 {
7251 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7252 modifier);
7253
7254 /* If the signedness of the conversion differs and OP0 is
7255 a promoted SUBREG, clear that indication since we now
7256 have to do the proper extension. */
7257 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7258 && GET_CODE (op0) == SUBREG)
7259 SUBREG_PROMOTED_VAR_P (op0) = 0;
7260
7261 return op0;
7262 }
7263
7264 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7265 if (GET_MODE (op0) == mode)
7266 return op0;
7267
7268 /* If OP0 is a constant, just convert it into the proper mode. */
7269 if (CONSTANT_P (op0))
7270 return
7271 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7272 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7273
7274 if (modifier == EXPAND_INITIALIZER)
7275 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7276
7277 if (target == 0)
7278 return
7279 convert_to_mode (mode, op0,
7280 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7281 else
7282 convert_move (target, op0,
7283 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7284 return target;
7285
7286 case VIEW_CONVERT_EXPR:
7287 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7288
7289 /* If the input and output modes are both the same, we are done.
7290 Otherwise, if neither mode is BLKmode and both are within a word, we
7291 can use gen_lowpart. If neither is true, make sure the operand is
7292 in memory and convert the MEM to the new mode. */
7293 if (TYPE_MODE (type) == GET_MODE (op0))
7294 ;
7295 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7296 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7297 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7298 op0 = gen_lowpart (TYPE_MODE (type), op0);
7299 else if (GET_CODE (op0) != MEM)
7300 {
7301 /* If the operand is not a MEM, force it into memory. Since we
7302 are going to be be changing the mode of the MEM, don't call
7303 force_const_mem for constants because we don't allow pool
7304 constants to change mode. */
7305 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7306
7307 if (TREE_ADDRESSABLE (exp))
7308 abort ();
7309
7310 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7311 target
7312 = assign_stack_temp_for_type
7313 (TYPE_MODE (inner_type),
7314 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7315
7316 emit_move_insn (target, op0);
7317 op0 = target;
7318 }
7319
7320 /* At this point, OP0 is in the correct mode. If the output type is such
7321 that the operand is known to be aligned, indicate that it is.
7322 Otherwise, we need only be concerned about alignment for non-BLKmode
7323 results. */
7324 if (GET_CODE (op0) == MEM)
7325 {
7326 op0 = copy_rtx (op0);
7327
7328 if (TYPE_ALIGN_OK (type))
7329 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7330 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7331 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7332 {
7333 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7334 HOST_WIDE_INT temp_size
7335 = MAX (int_size_in_bytes (inner_type),
7336 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7337 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7338 temp_size, 0, type);
7339 rtx new_with_op0_mode = copy_rtx (new);
7340
7341 if (TREE_ADDRESSABLE (exp))
7342 abort ();
7343
7344 PUT_MODE (new_with_op0_mode, GET_MODE (op0));
7345 if (GET_MODE (op0) == BLKmode)
7346 emit_block_move (new_with_op0_mode, op0,
7347 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))));
7348 else
7349 emit_move_insn (new_with_op0_mode, op0);
7350
7351 op0 = new;
7352 }
7353
7354 PUT_MODE (op0, TYPE_MODE (type));
7355 }
7356
7357 return op0;
7358
7359 case PLUS_EXPR:
7360 /* We come here from MINUS_EXPR when the second operand is a
7361 constant. */
7362 plus_expr:
7363 this_optab = ! unsignedp && flag_trapv
7364 && (GET_MODE_CLASS (mode) == MODE_INT)
7365 ? addv_optab : add_optab;
7366
7367 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7368 something else, make sure we add the register to the constant and
7369 then to the other thing. This case can occur during strength
7370 reduction and doing it this way will produce better code if the
7371 frame pointer or argument pointer is eliminated.
7372
7373 fold-const.c will ensure that the constant is always in the inner
7374 PLUS_EXPR, so the only case we need to do anything about is if
7375 sp, ap, or fp is our second argument, in which case we must swap
7376 the innermost first argument and our second argument. */
7377
7378 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7379 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7380 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7381 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7382 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7383 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7384 {
7385 tree t = TREE_OPERAND (exp, 1);
7386
7387 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7388 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7389 }
7390
7391 /* If the result is to be ptr_mode and we are adding an integer to
7392 something, we might be forming a constant. So try to use
7393 plus_constant. If it produces a sum and we can't accept it,
7394 use force_operand. This allows P = &ARR[const] to generate
7395 efficient code on machines where a SYMBOL_REF is not a valid
7396 address.
7397
7398 If this is an EXPAND_SUM call, always return the sum. */
7399 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7400 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7401 {
7402 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7403 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7404 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7405 {
7406 rtx constant_part;
7407
7408 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7409 EXPAND_SUM);
7410 /* Use immed_double_const to ensure that the constant is
7411 truncated according to the mode of OP1, then sign extended
7412 to a HOST_WIDE_INT. Using the constant directly can result
7413 in non-canonical RTL in a 64x32 cross compile. */
7414 constant_part
7415 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7416 (HOST_WIDE_INT) 0,
7417 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7418 op1 = plus_constant (op1, INTVAL (constant_part));
7419 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7420 op1 = force_operand (op1, target);
7421 return op1;
7422 }
7423
7424 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7425 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7426 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7427 {
7428 rtx constant_part;
7429
7430 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7431 EXPAND_SUM);
7432 if (! CONSTANT_P (op0))
7433 {
7434 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7435 VOIDmode, modifier);
7436 /* Don't go to both_summands if modifier
7437 says it's not right to return a PLUS. */
7438 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7439 goto binop2;
7440 goto both_summands;
7441 }
7442 /* Use immed_double_const to ensure that the constant is
7443 truncated according to the mode of OP1, then sign extended
7444 to a HOST_WIDE_INT. Using the constant directly can result
7445 in non-canonical RTL in a 64x32 cross compile. */
7446 constant_part
7447 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7448 (HOST_WIDE_INT) 0,
7449 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7450 op0 = plus_constant (op0, INTVAL (constant_part));
7451 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7452 op0 = force_operand (op0, target);
7453 return op0;
7454 }
7455 }
7456
7457 /* No sense saving up arithmetic to be done
7458 if it's all in the wrong mode to form part of an address.
7459 And force_operand won't know whether to sign-extend or
7460 zero-extend. */
7461 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7462 || mode != ptr_mode)
7463 goto binop;
7464
7465 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7466 subtarget = 0;
7467
7468 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7469 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7470
7471 both_summands:
7472 /* Make sure any term that's a sum with a constant comes last. */
7473 if (GET_CODE (op0) == PLUS
7474 && CONSTANT_P (XEXP (op0, 1)))
7475 {
7476 temp = op0;
7477 op0 = op1;
7478 op1 = temp;
7479 }
7480 /* If adding to a sum including a constant,
7481 associate it to put the constant outside. */
7482 if (GET_CODE (op1) == PLUS
7483 && CONSTANT_P (XEXP (op1, 1)))
7484 {
7485 rtx constant_term = const0_rtx;
7486
7487 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7488 if (temp != 0)
7489 op0 = temp;
7490 /* Ensure that MULT comes first if there is one. */
7491 else if (GET_CODE (op0) == MULT)
7492 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7493 else
7494 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7495
7496 /* Let's also eliminate constants from op0 if possible. */
7497 op0 = eliminate_constant_term (op0, &constant_term);
7498
7499 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7500 their sum should be a constant. Form it into OP1, since the
7501 result we want will then be OP0 + OP1. */
7502
7503 temp = simplify_binary_operation (PLUS, mode, constant_term,
7504 XEXP (op1, 1));
7505 if (temp != 0)
7506 op1 = temp;
7507 else
7508 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7509 }
7510
7511 /* Put a constant term last and put a multiplication first. */
7512 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7513 temp = op1, op1 = op0, op0 = temp;
7514
7515 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7516 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7517
7518 case MINUS_EXPR:
7519 /* For initializers, we are allowed to return a MINUS of two
7520 symbolic constants. Here we handle all cases when both operands
7521 are constant. */
7522 /* Handle difference of two symbolic constants,
7523 for the sake of an initializer. */
7524 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7525 && really_constant_p (TREE_OPERAND (exp, 0))
7526 && really_constant_p (TREE_OPERAND (exp, 1)))
7527 {
7528 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7529 modifier);
7530 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7531 modifier);
7532
7533 /* If the last operand is a CONST_INT, use plus_constant of
7534 the negated constant. Else make the MINUS. */
7535 if (GET_CODE (op1) == CONST_INT)
7536 return plus_constant (op0, - INTVAL (op1));
7537 else
7538 return gen_rtx_MINUS (mode, op0, op1);
7539 }
7540 /* Convert A - const to A + (-const). */
7541 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7542 {
7543 tree negated = fold (build1 (NEGATE_EXPR, type,
7544 TREE_OPERAND (exp, 1)));
7545
7546 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7547 /* If we can't negate the constant in TYPE, leave it alone and
7548 expand_binop will negate it for us. We used to try to do it
7549 here in the signed version of TYPE, but that doesn't work
7550 on POINTER_TYPEs. */;
7551 else
7552 {
7553 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7554 goto plus_expr;
7555 }
7556 }
7557 this_optab = ! unsignedp && flag_trapv
7558 && (GET_MODE_CLASS(mode) == MODE_INT)
7559 ? subv_optab : sub_optab;
7560 goto binop;
7561
7562 case MULT_EXPR:
7563 /* If first operand is constant, swap them.
7564 Thus the following special case checks need only
7565 check the second operand. */
7566 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7567 {
7568 tree t1 = TREE_OPERAND (exp, 0);
7569 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7570 TREE_OPERAND (exp, 1) = t1;
7571 }
7572
7573 /* Attempt to return something suitable for generating an
7574 indexed address, for machines that support that. */
7575
7576 if (modifier == EXPAND_SUM && mode == ptr_mode
7577 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7578 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7579 {
7580 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7581 EXPAND_SUM);
7582
7583 /* Apply distributive law if OP0 is x+c. */
7584 if (GET_CODE (op0) == PLUS
7585 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7586 return
7587 gen_rtx_PLUS
7588 (mode,
7589 gen_rtx_MULT
7590 (mode, XEXP (op0, 0),
7591 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7592 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7593 * INTVAL (XEXP (op0, 1))));
7594
7595 if (GET_CODE (op0) != REG)
7596 op0 = force_operand (op0, NULL_RTX);
7597 if (GET_CODE (op0) != REG)
7598 op0 = copy_to_mode_reg (mode, op0);
7599
7600 return
7601 gen_rtx_MULT (mode, op0,
7602 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7603 }
7604
7605 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7606 subtarget = 0;
7607
7608 /* Check for multiplying things that have been extended
7609 from a narrower type. If this machine supports multiplying
7610 in that narrower type with a result in the desired type,
7611 do it that way, and avoid the explicit type-conversion. */
7612 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7613 && TREE_CODE (type) == INTEGER_TYPE
7614 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7615 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7616 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7617 && int_fits_type_p (TREE_OPERAND (exp, 1),
7618 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7619 /* Don't use a widening multiply if a shift will do. */
7620 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7621 > HOST_BITS_PER_WIDE_INT)
7622 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7623 ||
7624 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7625 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7626 ==
7627 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7628 /* If both operands are extended, they must either both
7629 be zero-extended or both be sign-extended. */
7630 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7631 ==
7632 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7633 {
7634 enum machine_mode innermode
7635 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7636 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7637 ? smul_widen_optab : umul_widen_optab);
7638 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7639 ? umul_widen_optab : smul_widen_optab);
7640 if (mode == GET_MODE_WIDER_MODE (innermode))
7641 {
7642 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7643 {
7644 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7645 NULL_RTX, VOIDmode, 0);
7646 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7647 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7648 VOIDmode, 0);
7649 else
7650 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7651 NULL_RTX, VOIDmode, 0);
7652 goto binop2;
7653 }
7654 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7655 && innermode == word_mode)
7656 {
7657 rtx htem;
7658 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7659 NULL_RTX, VOIDmode, 0);
7660 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7661 op1 = convert_modes (innermode, mode,
7662 expand_expr (TREE_OPERAND (exp, 1),
7663 NULL_RTX, VOIDmode, 0),
7664 unsignedp);
7665 else
7666 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7667 NULL_RTX, VOIDmode, 0);
7668 temp = expand_binop (mode, other_optab, op0, op1, target,
7669 unsignedp, OPTAB_LIB_WIDEN);
7670 htem = expand_mult_highpart_adjust (innermode,
7671 gen_highpart (innermode, temp),
7672 op0, op1,
7673 gen_highpart (innermode, temp),
7674 unsignedp);
7675 emit_move_insn (gen_highpart (innermode, temp), htem);
7676 return temp;
7677 }
7678 }
7679 }
7680 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7681 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7682 return expand_mult (mode, op0, op1, target, unsignedp);
7683
7684 case TRUNC_DIV_EXPR:
7685 case FLOOR_DIV_EXPR:
7686 case CEIL_DIV_EXPR:
7687 case ROUND_DIV_EXPR:
7688 case EXACT_DIV_EXPR:
7689 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7690 subtarget = 0;
7691 /* Possible optimization: compute the dividend with EXPAND_SUM
7692 then if the divisor is constant can optimize the case
7693 where some terms of the dividend have coeffs divisible by it. */
7694 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7695 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7696 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7697
7698 case RDIV_EXPR:
7699 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7700 expensive divide. If not, combine will rebuild the original
7701 computation. */
7702 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7703 && !real_onep (TREE_OPERAND (exp, 0)))
7704 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7705 build (RDIV_EXPR, type,
7706 build_real (type, dconst1),
7707 TREE_OPERAND (exp, 1))),
7708 target, tmode, unsignedp);
7709 this_optab = sdiv_optab;
7710 goto binop;
7711
7712 case TRUNC_MOD_EXPR:
7713 case FLOOR_MOD_EXPR:
7714 case CEIL_MOD_EXPR:
7715 case ROUND_MOD_EXPR:
7716 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7717 subtarget = 0;
7718 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7719 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7720 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7721
7722 case FIX_ROUND_EXPR:
7723 case FIX_FLOOR_EXPR:
7724 case FIX_CEIL_EXPR:
7725 abort (); /* Not used for C. */
7726
7727 case FIX_TRUNC_EXPR:
7728 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7729 if (target == 0)
7730 target = gen_reg_rtx (mode);
7731 expand_fix (target, op0, unsignedp);
7732 return target;
7733
7734 case FLOAT_EXPR:
7735 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7736 if (target == 0)
7737 target = gen_reg_rtx (mode);
7738 /* expand_float can't figure out what to do if FROM has VOIDmode.
7739 So give it the correct mode. With -O, cse will optimize this. */
7740 if (GET_MODE (op0) == VOIDmode)
7741 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7742 op0);
7743 expand_float (target, op0,
7744 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7745 return target;
7746
7747 case NEGATE_EXPR:
7748 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7749 temp = expand_unop (mode,
7750 ! unsignedp && flag_trapv
7751 && (GET_MODE_CLASS(mode) == MODE_INT)
7752 ? negv_optab : neg_optab, op0, target, 0);
7753 if (temp == 0)
7754 abort ();
7755 return temp;
7756
7757 case ABS_EXPR:
7758 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7759
7760 /* Handle complex values specially. */
7761 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7762 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7763 return expand_complex_abs (mode, op0, target, unsignedp);
7764
7765 /* Unsigned abs is simply the operand. Testing here means we don't
7766 risk generating incorrect code below. */
7767 if (TREE_UNSIGNED (type))
7768 return op0;
7769
7770 return expand_abs (mode, op0, target, unsignedp,
7771 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7772
7773 case MAX_EXPR:
7774 case MIN_EXPR:
7775 target = original_target;
7776 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7777 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7778 || GET_MODE (target) != mode
7779 || (GET_CODE (target) == REG
7780 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7781 target = gen_reg_rtx (mode);
7782 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7783 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7784
7785 /* First try to do it with a special MIN or MAX instruction.
7786 If that does not win, use a conditional jump to select the proper
7787 value. */
7788 this_optab = (TREE_UNSIGNED (type)
7789 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7790 : (code == MIN_EXPR ? smin_optab : smax_optab));
7791
7792 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7793 OPTAB_WIDEN);
7794 if (temp != 0)
7795 return temp;
7796
7797 /* At this point, a MEM target is no longer useful; we will get better
7798 code without it. */
7799
7800 if (GET_CODE (target) == MEM)
7801 target = gen_reg_rtx (mode);
7802
7803 if (target != op0)
7804 emit_move_insn (target, op0);
7805
7806 op0 = gen_label_rtx ();
7807
7808 /* If this mode is an integer too wide to compare properly,
7809 compare word by word. Rely on cse to optimize constant cases. */
7810 if (GET_MODE_CLASS (mode) == MODE_INT
7811 && ! can_compare_p (GE, mode, ccp_jump))
7812 {
7813 if (code == MAX_EXPR)
7814 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7815 target, op1, NULL_RTX, op0);
7816 else
7817 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7818 op1, target, NULL_RTX, op0);
7819 }
7820 else
7821 {
7822 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7823 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7824 unsignedp, mode, NULL_RTX, NULL_RTX,
7825 op0);
7826 }
7827 emit_move_insn (target, op1);
7828 emit_label (op0);
7829 return target;
7830
7831 case BIT_NOT_EXPR:
7832 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7833 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7834 if (temp == 0)
7835 abort ();
7836 return temp;
7837
7838 case FFS_EXPR:
7839 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7840 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7841 if (temp == 0)
7842 abort ();
7843 return temp;
7844
7845 /* ??? Can optimize bitwise operations with one arg constant.
7846 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7847 and (a bitwise1 b) bitwise2 b (etc)
7848 but that is probably not worth while. */
7849
7850 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7851 boolean values when we want in all cases to compute both of them. In
7852 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7853 as actual zero-or-1 values and then bitwise anding. In cases where
7854 there cannot be any side effects, better code would be made by
7855 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7856 how to recognize those cases. */
7857
7858 case TRUTH_AND_EXPR:
7859 case BIT_AND_EXPR:
7860 this_optab = and_optab;
7861 goto binop;
7862
7863 case TRUTH_OR_EXPR:
7864 case BIT_IOR_EXPR:
7865 this_optab = ior_optab;
7866 goto binop;
7867
7868 case TRUTH_XOR_EXPR:
7869 case BIT_XOR_EXPR:
7870 this_optab = xor_optab;
7871 goto binop;
7872
7873 case LSHIFT_EXPR:
7874 case RSHIFT_EXPR:
7875 case LROTATE_EXPR:
7876 case RROTATE_EXPR:
7877 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7878 subtarget = 0;
7879 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7880 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7881 unsignedp);
7882
7883 /* Could determine the answer when only additive constants differ. Also,
7884 the addition of one can be handled by changing the condition. */
7885 case LT_EXPR:
7886 case LE_EXPR:
7887 case GT_EXPR:
7888 case GE_EXPR:
7889 case EQ_EXPR:
7890 case NE_EXPR:
7891 case UNORDERED_EXPR:
7892 case ORDERED_EXPR:
7893 case UNLT_EXPR:
7894 case UNLE_EXPR:
7895 case UNGT_EXPR:
7896 case UNGE_EXPR:
7897 case UNEQ_EXPR:
7898 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7899 if (temp != 0)
7900 return temp;
7901
7902 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7903 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7904 && original_target
7905 && GET_CODE (original_target) == REG
7906 && (GET_MODE (original_target)
7907 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7908 {
7909 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7910 VOIDmode, 0);
7911
7912 if (temp != original_target)
7913 temp = copy_to_reg (temp);
7914
7915 op1 = gen_label_rtx ();
7916 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7917 GET_MODE (temp), unsignedp, op1);
7918 emit_move_insn (temp, const1_rtx);
7919 emit_label (op1);
7920 return temp;
7921 }
7922
7923 /* If no set-flag instruction, must generate a conditional
7924 store into a temporary variable. Drop through
7925 and handle this like && and ||. */
7926
7927 case TRUTH_ANDIF_EXPR:
7928 case TRUTH_ORIF_EXPR:
7929 if (! ignore
7930 && (target == 0 || ! safe_from_p (target, exp, 1)
7931 /* Make sure we don't have a hard reg (such as function's return
7932 value) live across basic blocks, if not optimizing. */
7933 || (!optimize && GET_CODE (target) == REG
7934 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7935 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7936
7937 if (target)
7938 emit_clr_insn (target);
7939
7940 op1 = gen_label_rtx ();
7941 jumpifnot (exp, op1);
7942
7943 if (target)
7944 emit_0_to_1_insn (target);
7945
7946 emit_label (op1);
7947 return ignore ? const0_rtx : target;
7948
7949 case TRUTH_NOT_EXPR:
7950 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7951 /* The parser is careful to generate TRUTH_NOT_EXPR
7952 only with operands that are always zero or one. */
7953 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7954 target, 1, OPTAB_LIB_WIDEN);
7955 if (temp == 0)
7956 abort ();
7957 return temp;
7958
7959 case COMPOUND_EXPR:
7960 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7961 emit_queue ();
7962 return expand_expr (TREE_OPERAND (exp, 1),
7963 (ignore ? const0_rtx : target),
7964 VOIDmode, 0);
7965
7966 case COND_EXPR:
7967 /* If we would have a "singleton" (see below) were it not for a
7968 conversion in each arm, bring that conversion back out. */
7969 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7970 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7971 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7972 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7973 {
7974 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7975 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7976
7977 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
7978 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
7979 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
7980 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
7981 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
7982 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
7983 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
7984 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
7985 return expand_expr (build1 (NOP_EXPR, type,
7986 build (COND_EXPR, TREE_TYPE (iftrue),
7987 TREE_OPERAND (exp, 0),
7988 iftrue, iffalse)),
7989 target, tmode, modifier);
7990 }
7991
7992 {
7993 /* Note that COND_EXPRs whose type is a structure or union
7994 are required to be constructed to contain assignments of
7995 a temporary variable, so that we can evaluate them here
7996 for side effect only. If type is void, we must do likewise. */
7997
7998 /* If an arm of the branch requires a cleanup,
7999 only that cleanup is performed. */
8000
8001 tree singleton = 0;
8002 tree binary_op = 0, unary_op = 0;
8003
8004 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8005 convert it to our mode, if necessary. */
8006 if (integer_onep (TREE_OPERAND (exp, 1))
8007 && integer_zerop (TREE_OPERAND (exp, 2))
8008 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8009 {
8010 if (ignore)
8011 {
8012 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8013 modifier);
8014 return const0_rtx;
8015 }
8016
8017 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8018 if (GET_MODE (op0) == mode)
8019 return op0;
8020
8021 if (target == 0)
8022 target = gen_reg_rtx (mode);
8023 convert_move (target, op0, unsignedp);
8024 return target;
8025 }
8026
8027 /* Check for X ? A + B : A. If we have this, we can copy A to the
8028 output and conditionally add B. Similarly for unary operations.
8029 Don't do this if X has side-effects because those side effects
8030 might affect A or B and the "?" operation is a sequence point in
8031 ANSI. (operand_equal_p tests for side effects.) */
8032
8033 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8034 && operand_equal_p (TREE_OPERAND (exp, 2),
8035 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8036 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8037 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8038 && operand_equal_p (TREE_OPERAND (exp, 1),
8039 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8040 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8041 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8042 && operand_equal_p (TREE_OPERAND (exp, 2),
8043 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8044 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8045 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8046 && operand_equal_p (TREE_OPERAND (exp, 1),
8047 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8048 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8049
8050 /* If we are not to produce a result, we have no target. Otherwise,
8051 if a target was specified use it; it will not be used as an
8052 intermediate target unless it is safe. If no target, use a
8053 temporary. */
8054
8055 if (ignore)
8056 temp = 0;
8057 else if (original_target
8058 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8059 || (singleton && GET_CODE (original_target) == REG
8060 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8061 && original_target == var_rtx (singleton)))
8062 && GET_MODE (original_target) == mode
8063 #ifdef HAVE_conditional_move
8064 && (! can_conditionally_move_p (mode)
8065 || GET_CODE (original_target) == REG
8066 || TREE_ADDRESSABLE (type))
8067 #endif
8068 && (GET_CODE (original_target) != MEM
8069 || TREE_ADDRESSABLE (type)))
8070 temp = original_target;
8071 else if (TREE_ADDRESSABLE (type))
8072 abort ();
8073 else
8074 temp = assign_temp (type, 0, 0, 1);
8075
8076 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8077 do the test of X as a store-flag operation, do this as
8078 A + ((X != 0) << log C). Similarly for other simple binary
8079 operators. Only do for C == 1 if BRANCH_COST is low. */
8080 if (temp && singleton && binary_op
8081 && (TREE_CODE (binary_op) == PLUS_EXPR
8082 || TREE_CODE (binary_op) == MINUS_EXPR
8083 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8084 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8085 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8086 : integer_onep (TREE_OPERAND (binary_op, 1)))
8087 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8088 {
8089 rtx result;
8090 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8091 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8092 ? addv_optab : add_optab)
8093 : TREE_CODE (binary_op) == MINUS_EXPR
8094 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8095 ? subv_optab : sub_optab)
8096 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8097 : xor_optab);
8098
8099 /* If we had X ? A : A + 1, do this as A + (X == 0).
8100
8101 We have to invert the truth value here and then put it
8102 back later if do_store_flag fails. We cannot simply copy
8103 TREE_OPERAND (exp, 0) to another variable and modify that
8104 because invert_truthvalue can modify the tree pointed to
8105 by its argument. */
8106 if (singleton == TREE_OPERAND (exp, 1))
8107 TREE_OPERAND (exp, 0)
8108 = invert_truthvalue (TREE_OPERAND (exp, 0));
8109
8110 result = do_store_flag (TREE_OPERAND (exp, 0),
8111 (safe_from_p (temp, singleton, 1)
8112 ? temp : NULL_RTX),
8113 mode, BRANCH_COST <= 1);
8114
8115 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8116 result = expand_shift (LSHIFT_EXPR, mode, result,
8117 build_int_2 (tree_log2
8118 (TREE_OPERAND
8119 (binary_op, 1)),
8120 0),
8121 (safe_from_p (temp, singleton, 1)
8122 ? temp : NULL_RTX), 0);
8123
8124 if (result)
8125 {
8126 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8127 return expand_binop (mode, boptab, op1, result, temp,
8128 unsignedp, OPTAB_LIB_WIDEN);
8129 }
8130 else if (singleton == TREE_OPERAND (exp, 1))
8131 TREE_OPERAND (exp, 0)
8132 = invert_truthvalue (TREE_OPERAND (exp, 0));
8133 }
8134
8135 do_pending_stack_adjust ();
8136 NO_DEFER_POP;
8137 op0 = gen_label_rtx ();
8138
8139 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8140 {
8141 if (temp != 0)
8142 {
8143 /* If the target conflicts with the other operand of the
8144 binary op, we can't use it. Also, we can't use the target
8145 if it is a hard register, because evaluating the condition
8146 might clobber it. */
8147 if ((binary_op
8148 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8149 || (GET_CODE (temp) == REG
8150 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8151 temp = gen_reg_rtx (mode);
8152 store_expr (singleton, temp, 0);
8153 }
8154 else
8155 expand_expr (singleton,
8156 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8157 if (singleton == TREE_OPERAND (exp, 1))
8158 jumpif (TREE_OPERAND (exp, 0), op0);
8159 else
8160 jumpifnot (TREE_OPERAND (exp, 0), op0);
8161
8162 start_cleanup_deferral ();
8163 if (binary_op && temp == 0)
8164 /* Just touch the other operand. */
8165 expand_expr (TREE_OPERAND (binary_op, 1),
8166 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8167 else if (binary_op)
8168 store_expr (build (TREE_CODE (binary_op), type,
8169 make_tree (type, temp),
8170 TREE_OPERAND (binary_op, 1)),
8171 temp, 0);
8172 else
8173 store_expr (build1 (TREE_CODE (unary_op), type,
8174 make_tree (type, temp)),
8175 temp, 0);
8176 op1 = op0;
8177 }
8178 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8179 comparison operator. If we have one of these cases, set the
8180 output to A, branch on A (cse will merge these two references),
8181 then set the output to FOO. */
8182 else if (temp
8183 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8184 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8185 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8186 TREE_OPERAND (exp, 1), 0)
8187 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8188 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8189 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8190 {
8191 if (GET_CODE (temp) == REG
8192 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8193 temp = gen_reg_rtx (mode);
8194 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8195 jumpif (TREE_OPERAND (exp, 0), op0);
8196
8197 start_cleanup_deferral ();
8198 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8199 op1 = op0;
8200 }
8201 else if (temp
8202 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8203 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8204 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8205 TREE_OPERAND (exp, 2), 0)
8206 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8207 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8208 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8209 {
8210 if (GET_CODE (temp) == REG
8211 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8212 temp = gen_reg_rtx (mode);
8213 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8214 jumpifnot (TREE_OPERAND (exp, 0), op0);
8215
8216 start_cleanup_deferral ();
8217 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8218 op1 = op0;
8219 }
8220 else
8221 {
8222 op1 = gen_label_rtx ();
8223 jumpifnot (TREE_OPERAND (exp, 0), op0);
8224
8225 start_cleanup_deferral ();
8226
8227 /* One branch of the cond can be void, if it never returns. For
8228 example A ? throw : E */
8229 if (temp != 0
8230 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8231 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8232 else
8233 expand_expr (TREE_OPERAND (exp, 1),
8234 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8235 end_cleanup_deferral ();
8236 emit_queue ();
8237 emit_jump_insn (gen_jump (op1));
8238 emit_barrier ();
8239 emit_label (op0);
8240 start_cleanup_deferral ();
8241 if (temp != 0
8242 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8243 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8244 else
8245 expand_expr (TREE_OPERAND (exp, 2),
8246 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8247 }
8248
8249 end_cleanup_deferral ();
8250
8251 emit_queue ();
8252 emit_label (op1);
8253 OK_DEFER_POP;
8254
8255 return temp;
8256 }
8257
8258 case TARGET_EXPR:
8259 {
8260 /* Something needs to be initialized, but we didn't know
8261 where that thing was when building the tree. For example,
8262 it could be the return value of a function, or a parameter
8263 to a function which lays down in the stack, or a temporary
8264 variable which must be passed by reference.
8265
8266 We guarantee that the expression will either be constructed
8267 or copied into our original target. */
8268
8269 tree slot = TREE_OPERAND (exp, 0);
8270 tree cleanups = NULL_TREE;
8271 tree exp1;
8272
8273 if (TREE_CODE (slot) != VAR_DECL)
8274 abort ();
8275
8276 if (! ignore)
8277 target = original_target;
8278
8279 /* Set this here so that if we get a target that refers to a
8280 register variable that's already been used, put_reg_into_stack
8281 knows that it should fix up those uses. */
8282 TREE_USED (slot) = 1;
8283
8284 if (target == 0)
8285 {
8286 if (DECL_RTL_SET_P (slot))
8287 {
8288 target = DECL_RTL (slot);
8289 /* If we have already expanded the slot, so don't do
8290 it again. (mrs) */
8291 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8292 return target;
8293 }
8294 else
8295 {
8296 target = assign_temp (type, 2, 0, 1);
8297 /* All temp slots at this level must not conflict. */
8298 preserve_temp_slots (target);
8299 SET_DECL_RTL (slot, target);
8300 if (TREE_ADDRESSABLE (slot))
8301 put_var_into_stack (slot);
8302
8303 /* Since SLOT is not known to the called function
8304 to belong to its stack frame, we must build an explicit
8305 cleanup. This case occurs when we must build up a reference
8306 to pass the reference as an argument. In this case,
8307 it is very likely that such a reference need not be
8308 built here. */
8309
8310 if (TREE_OPERAND (exp, 2) == 0)
8311 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8312 cleanups = TREE_OPERAND (exp, 2);
8313 }
8314 }
8315 else
8316 {
8317 /* This case does occur, when expanding a parameter which
8318 needs to be constructed on the stack. The target
8319 is the actual stack address that we want to initialize.
8320 The function we call will perform the cleanup in this case. */
8321
8322 /* If we have already assigned it space, use that space,
8323 not target that we were passed in, as our target
8324 parameter is only a hint. */
8325 if (DECL_RTL_SET_P (slot))
8326 {
8327 target = DECL_RTL (slot);
8328 /* If we have already expanded the slot, so don't do
8329 it again. (mrs) */
8330 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8331 return target;
8332 }
8333 else
8334 {
8335 SET_DECL_RTL (slot, target);
8336 /* If we must have an addressable slot, then make sure that
8337 the RTL that we just stored in slot is OK. */
8338 if (TREE_ADDRESSABLE (slot))
8339 put_var_into_stack (slot);
8340 }
8341 }
8342
8343 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8344 /* Mark it as expanded. */
8345 TREE_OPERAND (exp, 1) = NULL_TREE;
8346
8347 store_expr (exp1, target, 0);
8348
8349 expand_decl_cleanup (NULL_TREE, cleanups);
8350
8351 return target;
8352 }
8353
8354 case INIT_EXPR:
8355 {
8356 tree lhs = TREE_OPERAND (exp, 0);
8357 tree rhs = TREE_OPERAND (exp, 1);
8358
8359 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8360 return temp;
8361 }
8362
8363 case MODIFY_EXPR:
8364 {
8365 /* If lhs is complex, expand calls in rhs before computing it.
8366 That's so we don't compute a pointer and save it over a
8367 call. If lhs is simple, compute it first so we can give it
8368 as a target if the rhs is just a call. This avoids an
8369 extra temp and copy and that prevents a partial-subsumption
8370 which makes bad code. Actually we could treat
8371 component_ref's of vars like vars. */
8372
8373 tree lhs = TREE_OPERAND (exp, 0);
8374 tree rhs = TREE_OPERAND (exp, 1);
8375
8376 temp = 0;
8377
8378 /* Check for |= or &= of a bitfield of size one into another bitfield
8379 of size 1. In this case, (unless we need the result of the
8380 assignment) we can do this more efficiently with a
8381 test followed by an assignment, if necessary.
8382
8383 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8384 things change so we do, this code should be enhanced to
8385 support it. */
8386 if (ignore
8387 && TREE_CODE (lhs) == COMPONENT_REF
8388 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8389 || TREE_CODE (rhs) == BIT_AND_EXPR)
8390 && TREE_OPERAND (rhs, 0) == lhs
8391 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8392 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8393 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8394 {
8395 rtx label = gen_label_rtx ();
8396
8397 do_jump (TREE_OPERAND (rhs, 1),
8398 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8399 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8400 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8401 (TREE_CODE (rhs) == BIT_IOR_EXPR
8402 ? integer_one_node
8403 : integer_zero_node)),
8404 0, 0);
8405 do_pending_stack_adjust ();
8406 emit_label (label);
8407 return const0_rtx;
8408 }
8409
8410 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8411
8412 return temp;
8413 }
8414
8415 case RETURN_EXPR:
8416 if (!TREE_OPERAND (exp, 0))
8417 expand_null_return ();
8418 else
8419 expand_return (TREE_OPERAND (exp, 0));
8420 return const0_rtx;
8421
8422 case PREINCREMENT_EXPR:
8423 case PREDECREMENT_EXPR:
8424 return expand_increment (exp, 0, ignore);
8425
8426 case POSTINCREMENT_EXPR:
8427 case POSTDECREMENT_EXPR:
8428 /* Faster to treat as pre-increment if result is not used. */
8429 return expand_increment (exp, ! ignore, ignore);
8430
8431 case ADDR_EXPR:
8432 /* Are we taking the address of a nested function? */
8433 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8434 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8435 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8436 && ! TREE_STATIC (exp))
8437 {
8438 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8439 op0 = force_operand (op0, target);
8440 }
8441 /* If we are taking the address of something erroneous, just
8442 return a zero. */
8443 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8444 return const0_rtx;
8445 /* If we are taking the address of a constant and are at the
8446 top level, we have to use output_constant_def since we can't
8447 call force_const_mem at top level. */
8448 else if (cfun == 0
8449 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8450 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8451 == 'c')))
8452 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8453 else
8454 {
8455 /* We make sure to pass const0_rtx down if we came in with
8456 ignore set, to avoid doing the cleanups twice for something. */
8457 op0 = expand_expr (TREE_OPERAND (exp, 0),
8458 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8459 (modifier == EXPAND_INITIALIZER
8460 ? modifier : EXPAND_CONST_ADDRESS));
8461
8462 /* If we are going to ignore the result, OP0 will have been set
8463 to const0_rtx, so just return it. Don't get confused and
8464 think we are taking the address of the constant. */
8465 if (ignore)
8466 return op0;
8467
8468 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8469 clever and returns a REG when given a MEM. */
8470 op0 = protect_from_queue (op0, 1);
8471
8472 /* We would like the object in memory. If it is a constant, we can
8473 have it be statically allocated into memory. For a non-constant,
8474 we need to allocate some memory and store the value into it. */
8475
8476 if (CONSTANT_P (op0))
8477 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8478 op0);
8479 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8480 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8481 || GET_CODE (op0) == PARALLEL)
8482 {
8483 /* If this object is in a register, it must can't be BLKmode. */
8484 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8485 tree nt = build_qualified_type (inner_type,
8486 (TYPE_QUALS (inner_type)
8487 | TYPE_QUAL_CONST));
8488 rtx memloc = assign_temp (nt, 1, 1, 1);
8489
8490 if (GET_CODE (op0) == PARALLEL)
8491 /* Handle calls that pass values in multiple non-contiguous
8492 locations. The Irix 6 ABI has examples of this. */
8493 emit_group_store (memloc, op0, int_size_in_bytes (inner_type));
8494 else
8495 emit_move_insn (memloc, op0);
8496
8497 op0 = memloc;
8498 }
8499
8500 if (GET_CODE (op0) != MEM)
8501 abort ();
8502
8503 mark_temp_addr_taken (op0);
8504 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8505 {
8506 op0 = XEXP (op0, 0);
8507 #ifdef POINTERS_EXTEND_UNSIGNED
8508 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8509 && mode == ptr_mode)
8510 op0 = convert_memory_address (ptr_mode, op0);
8511 #endif
8512 return op0;
8513 }
8514
8515 /* If OP0 is not aligned as least as much as the type requires, we
8516 need to make a temporary, copy OP0 to it, and take the address of
8517 the temporary. We want to use the alignment of the type, not of
8518 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8519 the test for BLKmode means that can't happen. The test for
8520 BLKmode is because we never make mis-aligned MEMs with
8521 non-BLKmode.
8522
8523 We don't need to do this at all if the machine doesn't have
8524 strict alignment. */
8525 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8526 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8527 > MEM_ALIGN (op0))
8528 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8529 {
8530 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8531 rtx new
8532 = assign_stack_temp_for_type
8533 (TYPE_MODE (inner_type),
8534 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8535 : int_size_in_bytes (inner_type),
8536 1, build_qualified_type (inner_type,
8537 (TYPE_QUALS (inner_type)
8538 | TYPE_QUAL_CONST)));
8539
8540 if (TYPE_ALIGN_OK (inner_type))
8541 abort ();
8542
8543 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
8544 op0 = new;
8545 }
8546
8547 op0 = force_operand (XEXP (op0, 0), target);
8548 }
8549
8550 if (flag_force_addr && GET_CODE (op0) != REG)
8551 op0 = force_reg (Pmode, op0);
8552
8553 if (GET_CODE (op0) == REG
8554 && ! REG_USERVAR_P (op0))
8555 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8556
8557 #ifdef POINTERS_EXTEND_UNSIGNED
8558 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8559 && mode == ptr_mode)
8560 op0 = convert_memory_address (ptr_mode, op0);
8561 #endif
8562
8563 return op0;
8564
8565 case ENTRY_VALUE_EXPR:
8566 abort ();
8567
8568 /* COMPLEX type for Extended Pascal & Fortran */
8569 case COMPLEX_EXPR:
8570 {
8571 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8572 rtx insns;
8573
8574 /* Get the rtx code of the operands. */
8575 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8576 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8577
8578 if (! target)
8579 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8580
8581 start_sequence ();
8582
8583 /* Move the real (op0) and imaginary (op1) parts to their location. */
8584 emit_move_insn (gen_realpart (mode, target), op0);
8585 emit_move_insn (gen_imagpart (mode, target), op1);
8586
8587 insns = get_insns ();
8588 end_sequence ();
8589
8590 /* Complex construction should appear as a single unit. */
8591 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8592 each with a separate pseudo as destination.
8593 It's not correct for flow to treat them as a unit. */
8594 if (GET_CODE (target) != CONCAT)
8595 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8596 else
8597 emit_insns (insns);
8598
8599 return target;
8600 }
8601
8602 case REALPART_EXPR:
8603 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8604 return gen_realpart (mode, op0);
8605
8606 case IMAGPART_EXPR:
8607 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8608 return gen_imagpart (mode, op0);
8609
8610 case CONJ_EXPR:
8611 {
8612 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8613 rtx imag_t;
8614 rtx insns;
8615
8616 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8617
8618 if (! target)
8619 target = gen_reg_rtx (mode);
8620
8621 start_sequence ();
8622
8623 /* Store the realpart and the negated imagpart to target. */
8624 emit_move_insn (gen_realpart (partmode, target),
8625 gen_realpart (partmode, op0));
8626
8627 imag_t = gen_imagpart (partmode, target);
8628 temp = expand_unop (partmode,
8629 ! unsignedp && flag_trapv
8630 && (GET_MODE_CLASS(partmode) == MODE_INT)
8631 ? negv_optab : neg_optab,
8632 gen_imagpart (partmode, op0), imag_t, 0);
8633 if (temp != imag_t)
8634 emit_move_insn (imag_t, temp);
8635
8636 insns = get_insns ();
8637 end_sequence ();
8638
8639 /* Conjugate should appear as a single unit
8640 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8641 each with a separate pseudo as destination.
8642 It's not correct for flow to treat them as a unit. */
8643 if (GET_CODE (target) != CONCAT)
8644 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8645 else
8646 emit_insns (insns);
8647
8648 return target;
8649 }
8650
8651 case TRY_CATCH_EXPR:
8652 {
8653 tree handler = TREE_OPERAND (exp, 1);
8654
8655 expand_eh_region_start ();
8656
8657 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8658
8659 expand_eh_region_end_cleanup (handler);
8660
8661 return op0;
8662 }
8663
8664 case TRY_FINALLY_EXPR:
8665 {
8666 tree try_block = TREE_OPERAND (exp, 0);
8667 tree finally_block = TREE_OPERAND (exp, 1);
8668 rtx finally_label = gen_label_rtx ();
8669 rtx done_label = gen_label_rtx ();
8670 rtx return_link = gen_reg_rtx (Pmode);
8671 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8672 (tree) finally_label, (tree) return_link);
8673 TREE_SIDE_EFFECTS (cleanup) = 1;
8674
8675 /* Start a new binding layer that will keep track of all cleanup
8676 actions to be performed. */
8677 expand_start_bindings (2);
8678
8679 target_temp_slot_level = temp_slot_level;
8680
8681 expand_decl_cleanup (NULL_TREE, cleanup);
8682 op0 = expand_expr (try_block, target, tmode, modifier);
8683
8684 preserve_temp_slots (op0);
8685 expand_end_bindings (NULL_TREE, 0, 0);
8686 emit_jump (done_label);
8687 emit_label (finally_label);
8688 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8689 emit_indirect_jump (return_link);
8690 emit_label (done_label);
8691 return op0;
8692 }
8693
8694 case GOTO_SUBROUTINE_EXPR:
8695 {
8696 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8697 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8698 rtx return_address = gen_label_rtx ();
8699 emit_move_insn (return_link,
8700 gen_rtx_LABEL_REF (Pmode, return_address));
8701 emit_jump (subr);
8702 emit_label (return_address);
8703 return const0_rtx;
8704 }
8705
8706 case VA_ARG_EXPR:
8707 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8708
8709 case EXC_PTR_EXPR:
8710 return get_exception_pointer (cfun);
8711
8712 case FDESC_EXPR:
8713 /* Function descriptors are not valid except for as
8714 initialization constants, and should not be expanded. */
8715 abort ();
8716
8717 default:
8718 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8719 }
8720
8721 /* Here to do an ordinary binary operator, generating an instruction
8722 from the optab already placed in `this_optab'. */
8723 binop:
8724 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8725 subtarget = 0;
8726 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8727 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8728 binop2:
8729 temp = expand_binop (mode, this_optab, op0, op1, target,
8730 unsignedp, OPTAB_LIB_WIDEN);
8731 if (temp == 0)
8732 abort ();
8733 return temp;
8734 }
8735 \f
8736 /* Return the tree node if a ARG corresponds to a string constant or zero
8737 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8738 in bytes within the string that ARG is accessing. The type of the
8739 offset will be `sizetype'. */
8740
8741 tree
8742 string_constant (arg, ptr_offset)
8743 tree arg;
8744 tree *ptr_offset;
8745 {
8746 STRIP_NOPS (arg);
8747
8748 if (TREE_CODE (arg) == ADDR_EXPR
8749 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8750 {
8751 *ptr_offset = size_zero_node;
8752 return TREE_OPERAND (arg, 0);
8753 }
8754 else if (TREE_CODE (arg) == PLUS_EXPR)
8755 {
8756 tree arg0 = TREE_OPERAND (arg, 0);
8757 tree arg1 = TREE_OPERAND (arg, 1);
8758
8759 STRIP_NOPS (arg0);
8760 STRIP_NOPS (arg1);
8761
8762 if (TREE_CODE (arg0) == ADDR_EXPR
8763 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8764 {
8765 *ptr_offset = convert (sizetype, arg1);
8766 return TREE_OPERAND (arg0, 0);
8767 }
8768 else if (TREE_CODE (arg1) == ADDR_EXPR
8769 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8770 {
8771 *ptr_offset = convert (sizetype, arg0);
8772 return TREE_OPERAND (arg1, 0);
8773 }
8774 }
8775
8776 return 0;
8777 }
8778 \f
8779 /* Expand code for a post- or pre- increment or decrement
8780 and return the RTX for the result.
8781 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8782
8783 static rtx
8784 expand_increment (exp, post, ignore)
8785 tree exp;
8786 int post, ignore;
8787 {
8788 rtx op0, op1;
8789 rtx temp, value;
8790 tree incremented = TREE_OPERAND (exp, 0);
8791 optab this_optab = add_optab;
8792 int icode;
8793 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8794 int op0_is_copy = 0;
8795 int single_insn = 0;
8796 /* 1 means we can't store into OP0 directly,
8797 because it is a subreg narrower than a word,
8798 and we don't dare clobber the rest of the word. */
8799 int bad_subreg = 0;
8800
8801 /* Stabilize any component ref that might need to be
8802 evaluated more than once below. */
8803 if (!post
8804 || TREE_CODE (incremented) == BIT_FIELD_REF
8805 || (TREE_CODE (incremented) == COMPONENT_REF
8806 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8807 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8808 incremented = stabilize_reference (incremented);
8809 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8810 ones into save exprs so that they don't accidentally get evaluated
8811 more than once by the code below. */
8812 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8813 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8814 incremented = save_expr (incremented);
8815
8816 /* Compute the operands as RTX.
8817 Note whether OP0 is the actual lvalue or a copy of it:
8818 I believe it is a copy iff it is a register or subreg
8819 and insns were generated in computing it. */
8820
8821 temp = get_last_insn ();
8822 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
8823
8824 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8825 in place but instead must do sign- or zero-extension during assignment,
8826 so we copy it into a new register and let the code below use it as
8827 a copy.
8828
8829 Note that we can safely modify this SUBREG since it is know not to be
8830 shared (it was made by the expand_expr call above). */
8831
8832 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8833 {
8834 if (post)
8835 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8836 else
8837 bad_subreg = 1;
8838 }
8839 else if (GET_CODE (op0) == SUBREG
8840 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8841 {
8842 /* We cannot increment this SUBREG in place. If we are
8843 post-incrementing, get a copy of the old value. Otherwise,
8844 just mark that we cannot increment in place. */
8845 if (post)
8846 op0 = copy_to_reg (op0);
8847 else
8848 bad_subreg = 1;
8849 }
8850
8851 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8852 && temp != get_last_insn ());
8853 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8854
8855 /* Decide whether incrementing or decrementing. */
8856 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8857 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8858 this_optab = sub_optab;
8859
8860 /* Convert decrement by a constant into a negative increment. */
8861 if (this_optab == sub_optab
8862 && GET_CODE (op1) == CONST_INT)
8863 {
8864 op1 = GEN_INT (-INTVAL (op1));
8865 this_optab = add_optab;
8866 }
8867
8868 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
8869 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
8870
8871 /* For a preincrement, see if we can do this with a single instruction. */
8872 if (!post)
8873 {
8874 icode = (int) this_optab->handlers[(int) mode].insn_code;
8875 if (icode != (int) CODE_FOR_nothing
8876 /* Make sure that OP0 is valid for operands 0 and 1
8877 of the insn we want to queue. */
8878 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8879 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8880 && (*insn_data[icode].operand[2].predicate) (op1, mode))
8881 single_insn = 1;
8882 }
8883
8884 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8885 then we cannot just increment OP0. We must therefore contrive to
8886 increment the original value. Then, for postincrement, we can return
8887 OP0 since it is a copy of the old value. For preincrement, expand here
8888 unless we can do it with a single insn.
8889
8890 Likewise if storing directly into OP0 would clobber high bits
8891 we need to preserve (bad_subreg). */
8892 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8893 {
8894 /* This is the easiest way to increment the value wherever it is.
8895 Problems with multiple evaluation of INCREMENTED are prevented
8896 because either (1) it is a component_ref or preincrement,
8897 in which case it was stabilized above, or (2) it is an array_ref
8898 with constant index in an array in a register, which is
8899 safe to reevaluate. */
8900 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8901 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8902 ? MINUS_EXPR : PLUS_EXPR),
8903 TREE_TYPE (exp),
8904 incremented,
8905 TREE_OPERAND (exp, 1));
8906
8907 while (TREE_CODE (incremented) == NOP_EXPR
8908 || TREE_CODE (incremented) == CONVERT_EXPR)
8909 {
8910 newexp = convert (TREE_TYPE (incremented), newexp);
8911 incremented = TREE_OPERAND (incremented, 0);
8912 }
8913
8914 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8915 return post ? op0 : temp;
8916 }
8917
8918 if (post)
8919 {
8920 /* We have a true reference to the value in OP0.
8921 If there is an insn to add or subtract in this mode, queue it.
8922 Queueing the increment insn avoids the register shuffling
8923 that often results if we must increment now and first save
8924 the old value for subsequent use. */
8925
8926 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8927 op0 = stabilize (op0);
8928 #endif
8929
8930 icode = (int) this_optab->handlers[(int) mode].insn_code;
8931 if (icode != (int) CODE_FOR_nothing
8932 /* Make sure that OP0 is valid for operands 0 and 1
8933 of the insn we want to queue. */
8934 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8935 && (*insn_data[icode].operand[1].predicate) (op0, mode))
8936 {
8937 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8938 op1 = force_reg (mode, op1);
8939
8940 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8941 }
8942 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8943 {
8944 rtx addr = (general_operand (XEXP (op0, 0), mode)
8945 ? force_reg (Pmode, XEXP (op0, 0))
8946 : copy_to_reg (XEXP (op0, 0)));
8947 rtx temp, result;
8948
8949 op0 = replace_equiv_address (op0, addr);
8950 temp = force_reg (GET_MODE (op0), op0);
8951 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
8952 op1 = force_reg (mode, op1);
8953
8954 /* The increment queue is LIFO, thus we have to `queue'
8955 the instructions in reverse order. */
8956 enqueue_insn (op0, gen_move_insn (op0, temp));
8957 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
8958 return result;
8959 }
8960 }
8961
8962 /* Preincrement, or we can't increment with one simple insn. */
8963 if (post)
8964 /* Save a copy of the value before inc or dec, to return it later. */
8965 temp = value = copy_to_reg (op0);
8966 else
8967 /* Arrange to return the incremented value. */
8968 /* Copy the rtx because expand_binop will protect from the queue,
8969 and the results of that would be invalid for us to return
8970 if our caller does emit_queue before using our result. */
8971 temp = copy_rtx (value = op0);
8972
8973 /* Increment however we can. */
8974 op1 = expand_binop (mode, this_optab, value, op1, op0,
8975 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8976
8977 /* Make sure the value is stored into OP0. */
8978 if (op1 != op0)
8979 emit_move_insn (op0, op1);
8980
8981 return temp;
8982 }
8983 \f
8984 /* At the start of a function, record that we have no previously-pushed
8985 arguments waiting to be popped. */
8986
8987 void
8988 init_pending_stack_adjust ()
8989 {
8990 pending_stack_adjust = 0;
8991 }
8992
8993 /* When exiting from function, if safe, clear out any pending stack adjust
8994 so the adjustment won't get done.
8995
8996 Note, if the current function calls alloca, then it must have a
8997 frame pointer regardless of the value of flag_omit_frame_pointer. */
8998
8999 void
9000 clear_pending_stack_adjust ()
9001 {
9002 #ifdef EXIT_IGNORE_STACK
9003 if (optimize > 0
9004 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9005 && EXIT_IGNORE_STACK
9006 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9007 && ! flag_inline_functions)
9008 {
9009 stack_pointer_delta -= pending_stack_adjust,
9010 pending_stack_adjust = 0;
9011 }
9012 #endif
9013 }
9014
9015 /* Pop any previously-pushed arguments that have not been popped yet. */
9016
9017 void
9018 do_pending_stack_adjust ()
9019 {
9020 if (inhibit_defer_pop == 0)
9021 {
9022 if (pending_stack_adjust != 0)
9023 adjust_stack (GEN_INT (pending_stack_adjust));
9024 pending_stack_adjust = 0;
9025 }
9026 }
9027 \f
9028 /* Expand conditional expressions. */
9029
9030 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9031 LABEL is an rtx of code CODE_LABEL, in this function and all the
9032 functions here. */
9033
9034 void
9035 jumpifnot (exp, label)
9036 tree exp;
9037 rtx label;
9038 {
9039 do_jump (exp, label, NULL_RTX);
9040 }
9041
9042 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9043
9044 void
9045 jumpif (exp, label)
9046 tree exp;
9047 rtx label;
9048 {
9049 do_jump (exp, NULL_RTX, label);
9050 }
9051
9052 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9053 the result is zero, or IF_TRUE_LABEL if the result is one.
9054 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9055 meaning fall through in that case.
9056
9057 do_jump always does any pending stack adjust except when it does not
9058 actually perform a jump. An example where there is no jump
9059 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9060
9061 This function is responsible for optimizing cases such as
9062 &&, || and comparison operators in EXP. */
9063
9064 void
9065 do_jump (exp, if_false_label, if_true_label)
9066 tree exp;
9067 rtx if_false_label, if_true_label;
9068 {
9069 enum tree_code code = TREE_CODE (exp);
9070 /* Some cases need to create a label to jump to
9071 in order to properly fall through.
9072 These cases set DROP_THROUGH_LABEL nonzero. */
9073 rtx drop_through_label = 0;
9074 rtx temp;
9075 int i;
9076 tree type;
9077 enum machine_mode mode;
9078
9079 #ifdef MAX_INTEGER_COMPUTATION_MODE
9080 check_max_integer_computation_mode (exp);
9081 #endif
9082
9083 emit_queue ();
9084
9085 switch (code)
9086 {
9087 case ERROR_MARK:
9088 break;
9089
9090 case INTEGER_CST:
9091 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9092 if (temp)
9093 emit_jump (temp);
9094 break;
9095
9096 #if 0
9097 /* This is not true with #pragma weak */
9098 case ADDR_EXPR:
9099 /* The address of something can never be zero. */
9100 if (if_true_label)
9101 emit_jump (if_true_label);
9102 break;
9103 #endif
9104
9105 case NOP_EXPR:
9106 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9107 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9108 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9109 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9110 goto normal;
9111 case CONVERT_EXPR:
9112 /* If we are narrowing the operand, we have to do the compare in the
9113 narrower mode. */
9114 if ((TYPE_PRECISION (TREE_TYPE (exp))
9115 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9116 goto normal;
9117 case NON_LVALUE_EXPR:
9118 case REFERENCE_EXPR:
9119 case ABS_EXPR:
9120 case NEGATE_EXPR:
9121 case LROTATE_EXPR:
9122 case RROTATE_EXPR:
9123 /* These cannot change zero->non-zero or vice versa. */
9124 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9125 break;
9126
9127 case WITH_RECORD_EXPR:
9128 /* Put the object on the placeholder list, recurse through our first
9129 operand, and pop the list. */
9130 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9131 placeholder_list);
9132 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9133 placeholder_list = TREE_CHAIN (placeholder_list);
9134 break;
9135
9136 #if 0
9137 /* This is never less insns than evaluating the PLUS_EXPR followed by
9138 a test and can be longer if the test is eliminated. */
9139 case PLUS_EXPR:
9140 /* Reduce to minus. */
9141 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9142 TREE_OPERAND (exp, 0),
9143 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9144 TREE_OPERAND (exp, 1))));
9145 /* Process as MINUS. */
9146 #endif
9147
9148 case MINUS_EXPR:
9149 /* Non-zero iff operands of minus differ. */
9150 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9151 TREE_OPERAND (exp, 0),
9152 TREE_OPERAND (exp, 1)),
9153 NE, NE, if_false_label, if_true_label);
9154 break;
9155
9156 case BIT_AND_EXPR:
9157 /* If we are AND'ing with a small constant, do this comparison in the
9158 smallest type that fits. If the machine doesn't have comparisons
9159 that small, it will be converted back to the wider comparison.
9160 This helps if we are testing the sign bit of a narrower object.
9161 combine can't do this for us because it can't know whether a
9162 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9163
9164 if (! SLOW_BYTE_ACCESS
9165 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9166 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9167 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9168 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9169 && (type = type_for_mode (mode, 1)) != 0
9170 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9171 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9172 != CODE_FOR_nothing))
9173 {
9174 do_jump (convert (type, exp), if_false_label, if_true_label);
9175 break;
9176 }
9177 goto normal;
9178
9179 case TRUTH_NOT_EXPR:
9180 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9181 break;
9182
9183 case TRUTH_ANDIF_EXPR:
9184 if (if_false_label == 0)
9185 if_false_label = drop_through_label = gen_label_rtx ();
9186 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9187 start_cleanup_deferral ();
9188 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9189 end_cleanup_deferral ();
9190 break;
9191
9192 case TRUTH_ORIF_EXPR:
9193 if (if_true_label == 0)
9194 if_true_label = drop_through_label = gen_label_rtx ();
9195 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9196 start_cleanup_deferral ();
9197 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9198 end_cleanup_deferral ();
9199 break;
9200
9201 case COMPOUND_EXPR:
9202 push_temp_slots ();
9203 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9204 preserve_temp_slots (NULL_RTX);
9205 free_temp_slots ();
9206 pop_temp_slots ();
9207 emit_queue ();
9208 do_pending_stack_adjust ();
9209 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9210 break;
9211
9212 case COMPONENT_REF:
9213 case BIT_FIELD_REF:
9214 case ARRAY_REF:
9215 case ARRAY_RANGE_REF:
9216 {
9217 HOST_WIDE_INT bitsize, bitpos;
9218 int unsignedp;
9219 enum machine_mode mode;
9220 tree type;
9221 tree offset;
9222 int volatilep = 0;
9223
9224 /* Get description of this reference. We don't actually care
9225 about the underlying object here. */
9226 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9227 &unsignedp, &volatilep);
9228
9229 type = type_for_size (bitsize, unsignedp);
9230 if (! SLOW_BYTE_ACCESS
9231 && type != 0 && bitsize >= 0
9232 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9233 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9234 != CODE_FOR_nothing))
9235 {
9236 do_jump (convert (type, exp), if_false_label, if_true_label);
9237 break;
9238 }
9239 goto normal;
9240 }
9241
9242 case COND_EXPR:
9243 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9244 if (integer_onep (TREE_OPERAND (exp, 1))
9245 && integer_zerop (TREE_OPERAND (exp, 2)))
9246 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9247
9248 else if (integer_zerop (TREE_OPERAND (exp, 1))
9249 && integer_onep (TREE_OPERAND (exp, 2)))
9250 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9251
9252 else
9253 {
9254 rtx label1 = gen_label_rtx ();
9255 drop_through_label = gen_label_rtx ();
9256
9257 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9258
9259 start_cleanup_deferral ();
9260 /* Now the THEN-expression. */
9261 do_jump (TREE_OPERAND (exp, 1),
9262 if_false_label ? if_false_label : drop_through_label,
9263 if_true_label ? if_true_label : drop_through_label);
9264 /* In case the do_jump just above never jumps. */
9265 do_pending_stack_adjust ();
9266 emit_label (label1);
9267
9268 /* Now the ELSE-expression. */
9269 do_jump (TREE_OPERAND (exp, 2),
9270 if_false_label ? if_false_label : drop_through_label,
9271 if_true_label ? if_true_label : drop_through_label);
9272 end_cleanup_deferral ();
9273 }
9274 break;
9275
9276 case EQ_EXPR:
9277 {
9278 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9279
9280 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9281 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9282 {
9283 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9284 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9285 do_jump
9286 (fold
9287 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9288 fold (build (EQ_EXPR, TREE_TYPE (exp),
9289 fold (build1 (REALPART_EXPR,
9290 TREE_TYPE (inner_type),
9291 exp0)),
9292 fold (build1 (REALPART_EXPR,
9293 TREE_TYPE (inner_type),
9294 exp1)))),
9295 fold (build (EQ_EXPR, TREE_TYPE (exp),
9296 fold (build1 (IMAGPART_EXPR,
9297 TREE_TYPE (inner_type),
9298 exp0)),
9299 fold (build1 (IMAGPART_EXPR,
9300 TREE_TYPE (inner_type),
9301 exp1)))))),
9302 if_false_label, if_true_label);
9303 }
9304
9305 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9306 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9307
9308 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9309 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9310 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9311 else
9312 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9313 break;
9314 }
9315
9316 case NE_EXPR:
9317 {
9318 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9319
9320 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9321 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9322 {
9323 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9324 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9325 do_jump
9326 (fold
9327 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9328 fold (build (NE_EXPR, TREE_TYPE (exp),
9329 fold (build1 (REALPART_EXPR,
9330 TREE_TYPE (inner_type),
9331 exp0)),
9332 fold (build1 (REALPART_EXPR,
9333 TREE_TYPE (inner_type),
9334 exp1)))),
9335 fold (build (NE_EXPR, TREE_TYPE (exp),
9336 fold (build1 (IMAGPART_EXPR,
9337 TREE_TYPE (inner_type),
9338 exp0)),
9339 fold (build1 (IMAGPART_EXPR,
9340 TREE_TYPE (inner_type),
9341 exp1)))))),
9342 if_false_label, if_true_label);
9343 }
9344
9345 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9346 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9347
9348 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9349 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9350 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9351 else
9352 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9353 break;
9354 }
9355
9356 case LT_EXPR:
9357 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9358 if (GET_MODE_CLASS (mode) == MODE_INT
9359 && ! can_compare_p (LT, mode, ccp_jump))
9360 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9361 else
9362 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9363 break;
9364
9365 case LE_EXPR:
9366 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9367 if (GET_MODE_CLASS (mode) == MODE_INT
9368 && ! can_compare_p (LE, mode, ccp_jump))
9369 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9370 else
9371 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9372 break;
9373
9374 case GT_EXPR:
9375 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9376 if (GET_MODE_CLASS (mode) == MODE_INT
9377 && ! can_compare_p (GT, mode, ccp_jump))
9378 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9379 else
9380 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9381 break;
9382
9383 case GE_EXPR:
9384 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9385 if (GET_MODE_CLASS (mode) == MODE_INT
9386 && ! can_compare_p (GE, mode, ccp_jump))
9387 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9388 else
9389 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9390 break;
9391
9392 case UNORDERED_EXPR:
9393 case ORDERED_EXPR:
9394 {
9395 enum rtx_code cmp, rcmp;
9396 int do_rev;
9397
9398 if (code == UNORDERED_EXPR)
9399 cmp = UNORDERED, rcmp = ORDERED;
9400 else
9401 cmp = ORDERED, rcmp = UNORDERED;
9402 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9403
9404 do_rev = 0;
9405 if (! can_compare_p (cmp, mode, ccp_jump)
9406 && (can_compare_p (rcmp, mode, ccp_jump)
9407 /* If the target doesn't provide either UNORDERED or ORDERED
9408 comparisons, canonicalize on UNORDERED for the library. */
9409 || rcmp == UNORDERED))
9410 do_rev = 1;
9411
9412 if (! do_rev)
9413 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9414 else
9415 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9416 }
9417 break;
9418
9419 {
9420 enum rtx_code rcode1;
9421 enum tree_code tcode2;
9422
9423 case UNLT_EXPR:
9424 rcode1 = UNLT;
9425 tcode2 = LT_EXPR;
9426 goto unordered_bcc;
9427 case UNLE_EXPR:
9428 rcode1 = UNLE;
9429 tcode2 = LE_EXPR;
9430 goto unordered_bcc;
9431 case UNGT_EXPR:
9432 rcode1 = UNGT;
9433 tcode2 = GT_EXPR;
9434 goto unordered_bcc;
9435 case UNGE_EXPR:
9436 rcode1 = UNGE;
9437 tcode2 = GE_EXPR;
9438 goto unordered_bcc;
9439 case UNEQ_EXPR:
9440 rcode1 = UNEQ;
9441 tcode2 = EQ_EXPR;
9442 goto unordered_bcc;
9443
9444 unordered_bcc:
9445 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9446 if (can_compare_p (rcode1, mode, ccp_jump))
9447 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9448 if_true_label);
9449 else
9450 {
9451 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9452 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9453 tree cmp0, cmp1;
9454
9455 /* If the target doesn't support combined unordered
9456 compares, decompose into UNORDERED + comparison. */
9457 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9458 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9459 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9460 do_jump (exp, if_false_label, if_true_label);
9461 }
9462 }
9463 break;
9464
9465 /* Special case:
9466 __builtin_expect (<test>, 0) and
9467 __builtin_expect (<test>, 1)
9468
9469 We need to do this here, so that <test> is not converted to a SCC
9470 operation on machines that use condition code registers and COMPARE
9471 like the PowerPC, and then the jump is done based on whether the SCC
9472 operation produced a 1 or 0. */
9473 case CALL_EXPR:
9474 /* Check for a built-in function. */
9475 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9476 {
9477 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9478 tree arglist = TREE_OPERAND (exp, 1);
9479
9480 if (TREE_CODE (fndecl) == FUNCTION_DECL
9481 && DECL_BUILT_IN (fndecl)
9482 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9483 && arglist != NULL_TREE
9484 && TREE_CHAIN (arglist) != NULL_TREE)
9485 {
9486 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9487 if_true_label);
9488
9489 if (seq != NULL_RTX)
9490 {
9491 emit_insn (seq);
9492 return;
9493 }
9494 }
9495 }
9496 /* fall through and generate the normal code. */
9497
9498 default:
9499 normal:
9500 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9501 #if 0
9502 /* This is not needed any more and causes poor code since it causes
9503 comparisons and tests from non-SI objects to have different code
9504 sequences. */
9505 /* Copy to register to avoid generating bad insns by cse
9506 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9507 if (!cse_not_expected && GET_CODE (temp) == MEM)
9508 temp = copy_to_reg (temp);
9509 #endif
9510 do_pending_stack_adjust ();
9511 /* Do any postincrements in the expression that was tested. */
9512 emit_queue ();
9513
9514 if (GET_CODE (temp) == CONST_INT
9515 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9516 || GET_CODE (temp) == LABEL_REF)
9517 {
9518 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9519 if (target)
9520 emit_jump (target);
9521 }
9522 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9523 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9524 /* Note swapping the labels gives us not-equal. */
9525 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9526 else if (GET_MODE (temp) != VOIDmode)
9527 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9528 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9529 GET_MODE (temp), NULL_RTX,
9530 if_false_label, if_true_label);
9531 else
9532 abort ();
9533 }
9534
9535 if (drop_through_label)
9536 {
9537 /* If do_jump produces code that might be jumped around,
9538 do any stack adjusts from that code, before the place
9539 where control merges in. */
9540 do_pending_stack_adjust ();
9541 emit_label (drop_through_label);
9542 }
9543 }
9544 \f
9545 /* Given a comparison expression EXP for values too wide to be compared
9546 with one insn, test the comparison and jump to the appropriate label.
9547 The code of EXP is ignored; we always test GT if SWAP is 0,
9548 and LT if SWAP is 1. */
9549
9550 static void
9551 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9552 tree exp;
9553 int swap;
9554 rtx if_false_label, if_true_label;
9555 {
9556 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9557 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9558 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9559 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9560
9561 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9562 }
9563
9564 /* Compare OP0 with OP1, word at a time, in mode MODE.
9565 UNSIGNEDP says to do unsigned comparison.
9566 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9567
9568 void
9569 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9570 enum machine_mode mode;
9571 int unsignedp;
9572 rtx op0, op1;
9573 rtx if_false_label, if_true_label;
9574 {
9575 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9576 rtx drop_through_label = 0;
9577 int i;
9578
9579 if (! if_true_label || ! if_false_label)
9580 drop_through_label = gen_label_rtx ();
9581 if (! if_true_label)
9582 if_true_label = drop_through_label;
9583 if (! if_false_label)
9584 if_false_label = drop_through_label;
9585
9586 /* Compare a word at a time, high order first. */
9587 for (i = 0; i < nwords; i++)
9588 {
9589 rtx op0_word, op1_word;
9590
9591 if (WORDS_BIG_ENDIAN)
9592 {
9593 op0_word = operand_subword_force (op0, i, mode);
9594 op1_word = operand_subword_force (op1, i, mode);
9595 }
9596 else
9597 {
9598 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9599 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9600 }
9601
9602 /* All but high-order word must be compared as unsigned. */
9603 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9604 (unsignedp || i > 0), word_mode, NULL_RTX,
9605 NULL_RTX, if_true_label);
9606
9607 /* Consider lower words only if these are equal. */
9608 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9609 NULL_RTX, NULL_RTX, if_false_label);
9610 }
9611
9612 if (if_false_label)
9613 emit_jump (if_false_label);
9614 if (drop_through_label)
9615 emit_label (drop_through_label);
9616 }
9617
9618 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9619 with one insn, test the comparison and jump to the appropriate label. */
9620
9621 static void
9622 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9623 tree exp;
9624 rtx if_false_label, if_true_label;
9625 {
9626 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9627 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9628 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9629 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9630 int i;
9631 rtx drop_through_label = 0;
9632
9633 if (! if_false_label)
9634 drop_through_label = if_false_label = gen_label_rtx ();
9635
9636 for (i = 0; i < nwords; i++)
9637 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9638 operand_subword_force (op1, i, mode),
9639 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9640 word_mode, NULL_RTX, if_false_label, NULL_RTX);
9641
9642 if (if_true_label)
9643 emit_jump (if_true_label);
9644 if (drop_through_label)
9645 emit_label (drop_through_label);
9646 }
9647 \f
9648 /* Jump according to whether OP0 is 0.
9649 We assume that OP0 has an integer mode that is too wide
9650 for the available compare insns. */
9651
9652 void
9653 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9654 rtx op0;
9655 rtx if_false_label, if_true_label;
9656 {
9657 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9658 rtx part;
9659 int i;
9660 rtx drop_through_label = 0;
9661
9662 /* The fastest way of doing this comparison on almost any machine is to
9663 "or" all the words and compare the result. If all have to be loaded
9664 from memory and this is a very wide item, it's possible this may
9665 be slower, but that's highly unlikely. */
9666
9667 part = gen_reg_rtx (word_mode);
9668 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9669 for (i = 1; i < nwords && part != 0; i++)
9670 part = expand_binop (word_mode, ior_optab, part,
9671 operand_subword_force (op0, i, GET_MODE (op0)),
9672 part, 1, OPTAB_WIDEN);
9673
9674 if (part != 0)
9675 {
9676 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9677 NULL_RTX, if_false_label, if_true_label);
9678
9679 return;
9680 }
9681
9682 /* If we couldn't do the "or" simply, do this with a series of compares. */
9683 if (! if_false_label)
9684 drop_through_label = if_false_label = gen_label_rtx ();
9685
9686 for (i = 0; i < nwords; i++)
9687 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9688 const0_rtx, EQ, 1, word_mode, NULL_RTX,
9689 if_false_label, NULL_RTX);
9690
9691 if (if_true_label)
9692 emit_jump (if_true_label);
9693
9694 if (drop_through_label)
9695 emit_label (drop_through_label);
9696 }
9697 \f
9698 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9699 (including code to compute the values to be compared)
9700 and set (CC0) according to the result.
9701 The decision as to signed or unsigned comparison must be made by the caller.
9702
9703 We force a stack adjustment unless there are currently
9704 things pushed on the stack that aren't yet used.
9705
9706 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9707 compared. */
9708
9709 rtx
9710 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
9711 rtx op0, op1;
9712 enum rtx_code code;
9713 int unsignedp;
9714 enum machine_mode mode;
9715 rtx size;
9716 {
9717 rtx tem;
9718
9719 /* If one operand is constant, make it the second one. Only do this
9720 if the other operand is not constant as well. */
9721
9722 if (swap_commutative_operands_p (op0, op1))
9723 {
9724 tem = op0;
9725 op0 = op1;
9726 op1 = tem;
9727 code = swap_condition (code);
9728 }
9729
9730 if (flag_force_mem)
9731 {
9732 op0 = force_not_mem (op0);
9733 op1 = force_not_mem (op1);
9734 }
9735
9736 do_pending_stack_adjust ();
9737
9738 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9739 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9740 return tem;
9741
9742 #if 0
9743 /* There's no need to do this now that combine.c can eliminate lots of
9744 sign extensions. This can be less efficient in certain cases on other
9745 machines. */
9746
9747 /* If this is a signed equality comparison, we can do it as an
9748 unsigned comparison since zero-extension is cheaper than sign
9749 extension and comparisons with zero are done as unsigned. This is
9750 the case even on machines that can do fast sign extension, since
9751 zero-extension is easier to combine with other operations than
9752 sign-extension is. If we are comparing against a constant, we must
9753 convert it to what it would look like unsigned. */
9754 if ((code == EQ || code == NE) && ! unsignedp
9755 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9756 {
9757 if (GET_CODE (op1) == CONST_INT
9758 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9759 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9760 unsignedp = 1;
9761 }
9762 #endif
9763
9764 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
9765
9766 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9767 }
9768
9769 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9770 The decision as to signed or unsigned comparison must be made by the caller.
9771
9772 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9773 compared. */
9774
9775 void
9776 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
9777 if_false_label, if_true_label)
9778 rtx op0, op1;
9779 enum rtx_code code;
9780 int unsignedp;
9781 enum machine_mode mode;
9782 rtx size;
9783 rtx if_false_label, if_true_label;
9784 {
9785 rtx tem;
9786 int dummy_true_label = 0;
9787
9788 /* Reverse the comparison if that is safe and we want to jump if it is
9789 false. */
9790 if (! if_true_label && ! FLOAT_MODE_P (mode))
9791 {
9792 if_true_label = if_false_label;
9793 if_false_label = 0;
9794 code = reverse_condition (code);
9795 }
9796
9797 /* If one operand is constant, make it the second one. Only do this
9798 if the other operand is not constant as well. */
9799
9800 if (swap_commutative_operands_p (op0, op1))
9801 {
9802 tem = op0;
9803 op0 = op1;
9804 op1 = tem;
9805 code = swap_condition (code);
9806 }
9807
9808 if (flag_force_mem)
9809 {
9810 op0 = force_not_mem (op0);
9811 op1 = force_not_mem (op1);
9812 }
9813
9814 do_pending_stack_adjust ();
9815
9816 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9817 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9818 {
9819 if (tem == const_true_rtx)
9820 {
9821 if (if_true_label)
9822 emit_jump (if_true_label);
9823 }
9824 else
9825 {
9826 if (if_false_label)
9827 emit_jump (if_false_label);
9828 }
9829 return;
9830 }
9831
9832 #if 0
9833 /* There's no need to do this now that combine.c can eliminate lots of
9834 sign extensions. This can be less efficient in certain cases on other
9835 machines. */
9836
9837 /* If this is a signed equality comparison, we can do it as an
9838 unsigned comparison since zero-extension is cheaper than sign
9839 extension and comparisons with zero are done as unsigned. This is
9840 the case even on machines that can do fast sign extension, since
9841 zero-extension is easier to combine with other operations than
9842 sign-extension is. If we are comparing against a constant, we must
9843 convert it to what it would look like unsigned. */
9844 if ((code == EQ || code == NE) && ! unsignedp
9845 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9846 {
9847 if (GET_CODE (op1) == CONST_INT
9848 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9849 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9850 unsignedp = 1;
9851 }
9852 #endif
9853
9854 if (! if_true_label)
9855 {
9856 dummy_true_label = 1;
9857 if_true_label = gen_label_rtx ();
9858 }
9859
9860 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
9861 if_true_label);
9862
9863 if (if_false_label)
9864 emit_jump (if_false_label);
9865 if (dummy_true_label)
9866 emit_label (if_true_label);
9867 }
9868
9869 /* Generate code for a comparison expression EXP (including code to compute
9870 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9871 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9872 generated code will drop through.
9873 SIGNED_CODE should be the rtx operation for this comparison for
9874 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9875
9876 We force a stack adjustment unless there are currently
9877 things pushed on the stack that aren't yet used. */
9878
9879 static void
9880 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9881 if_true_label)
9882 tree exp;
9883 enum rtx_code signed_code, unsigned_code;
9884 rtx if_false_label, if_true_label;
9885 {
9886 rtx op0, op1;
9887 tree type;
9888 enum machine_mode mode;
9889 int unsignedp;
9890 enum rtx_code code;
9891
9892 /* Don't crash if the comparison was erroneous. */
9893 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9894 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9895 return;
9896
9897 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9898 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
9899 return;
9900
9901 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9902 mode = TYPE_MODE (type);
9903 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
9904 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
9905 || (GET_MODE_BITSIZE (mode)
9906 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
9907 1)))))))
9908 {
9909 /* op0 might have been replaced by promoted constant, in which
9910 case the type of second argument should be used. */
9911 type = TREE_TYPE (TREE_OPERAND (exp, 1));
9912 mode = TYPE_MODE (type);
9913 }
9914 unsignedp = TREE_UNSIGNED (type);
9915 code = unsignedp ? unsigned_code : signed_code;
9916
9917 #ifdef HAVE_canonicalize_funcptr_for_compare
9918 /* If function pointers need to be "canonicalized" before they can
9919 be reliably compared, then canonicalize them. */
9920 if (HAVE_canonicalize_funcptr_for_compare
9921 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9922 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9923 == FUNCTION_TYPE))
9924 {
9925 rtx new_op0 = gen_reg_rtx (mode);
9926
9927 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
9928 op0 = new_op0;
9929 }
9930
9931 if (HAVE_canonicalize_funcptr_for_compare
9932 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9933 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9934 == FUNCTION_TYPE))
9935 {
9936 rtx new_op1 = gen_reg_rtx (mode);
9937
9938 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
9939 op1 = new_op1;
9940 }
9941 #endif
9942
9943 /* Do any postincrements in the expression that was tested. */
9944 emit_queue ();
9945
9946 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
9947 ((mode == BLKmode)
9948 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
9949 if_false_label, if_true_label);
9950 }
9951 \f
9952 /* Generate code to calculate EXP using a store-flag instruction
9953 and return an rtx for the result. EXP is either a comparison
9954 or a TRUTH_NOT_EXPR whose operand is a comparison.
9955
9956 If TARGET is nonzero, store the result there if convenient.
9957
9958 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9959 cheap.
9960
9961 Return zero if there is no suitable set-flag instruction
9962 available on this machine.
9963
9964 Once expand_expr has been called on the arguments of the comparison,
9965 we are committed to doing the store flag, since it is not safe to
9966 re-evaluate the expression. We emit the store-flag insn by calling
9967 emit_store_flag, but only expand the arguments if we have a reason
9968 to believe that emit_store_flag will be successful. If we think that
9969 it will, but it isn't, we have to simulate the store-flag with a
9970 set/jump/set sequence. */
9971
9972 static rtx
9973 do_store_flag (exp, target, mode, only_cheap)
9974 tree exp;
9975 rtx target;
9976 enum machine_mode mode;
9977 int only_cheap;
9978 {
9979 enum rtx_code code;
9980 tree arg0, arg1, type;
9981 tree tem;
9982 enum machine_mode operand_mode;
9983 int invert = 0;
9984 int unsignedp;
9985 rtx op0, op1;
9986 enum insn_code icode;
9987 rtx subtarget = target;
9988 rtx result, label;
9989
9990 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9991 result at the end. We can't simply invert the test since it would
9992 have already been inverted if it were valid. This case occurs for
9993 some floating-point comparisons. */
9994
9995 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9996 invert = 1, exp = TREE_OPERAND (exp, 0);
9997
9998 arg0 = TREE_OPERAND (exp, 0);
9999 arg1 = TREE_OPERAND (exp, 1);
10000
10001 /* Don't crash if the comparison was erroneous. */
10002 if (arg0 == error_mark_node || arg1 == error_mark_node)
10003 return const0_rtx;
10004
10005 type = TREE_TYPE (arg0);
10006 operand_mode = TYPE_MODE (type);
10007 unsignedp = TREE_UNSIGNED (type);
10008
10009 /* We won't bother with BLKmode store-flag operations because it would mean
10010 passing a lot of information to emit_store_flag. */
10011 if (operand_mode == BLKmode)
10012 return 0;
10013
10014 /* We won't bother with store-flag operations involving function pointers
10015 when function pointers must be canonicalized before comparisons. */
10016 #ifdef HAVE_canonicalize_funcptr_for_compare
10017 if (HAVE_canonicalize_funcptr_for_compare
10018 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10019 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10020 == FUNCTION_TYPE))
10021 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10022 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10023 == FUNCTION_TYPE))))
10024 return 0;
10025 #endif
10026
10027 STRIP_NOPS (arg0);
10028 STRIP_NOPS (arg1);
10029
10030 /* Get the rtx comparison code to use. We know that EXP is a comparison
10031 operation of some type. Some comparisons against 1 and -1 can be
10032 converted to comparisons with zero. Do so here so that the tests
10033 below will be aware that we have a comparison with zero. These
10034 tests will not catch constants in the first operand, but constants
10035 are rarely passed as the first operand. */
10036
10037 switch (TREE_CODE (exp))
10038 {
10039 case EQ_EXPR:
10040 code = EQ;
10041 break;
10042 case NE_EXPR:
10043 code = NE;
10044 break;
10045 case LT_EXPR:
10046 if (integer_onep (arg1))
10047 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10048 else
10049 code = unsignedp ? LTU : LT;
10050 break;
10051 case LE_EXPR:
10052 if (! unsignedp && integer_all_onesp (arg1))
10053 arg1 = integer_zero_node, code = LT;
10054 else
10055 code = unsignedp ? LEU : LE;
10056 break;
10057 case GT_EXPR:
10058 if (! unsignedp && integer_all_onesp (arg1))
10059 arg1 = integer_zero_node, code = GE;
10060 else
10061 code = unsignedp ? GTU : GT;
10062 break;
10063 case GE_EXPR:
10064 if (integer_onep (arg1))
10065 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10066 else
10067 code = unsignedp ? GEU : GE;
10068 break;
10069
10070 case UNORDERED_EXPR:
10071 code = UNORDERED;
10072 break;
10073 case ORDERED_EXPR:
10074 code = ORDERED;
10075 break;
10076 case UNLT_EXPR:
10077 code = UNLT;
10078 break;
10079 case UNLE_EXPR:
10080 code = UNLE;
10081 break;
10082 case UNGT_EXPR:
10083 code = UNGT;
10084 break;
10085 case UNGE_EXPR:
10086 code = UNGE;
10087 break;
10088 case UNEQ_EXPR:
10089 code = UNEQ;
10090 break;
10091
10092 default:
10093 abort ();
10094 }
10095
10096 /* Put a constant second. */
10097 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10098 {
10099 tem = arg0; arg0 = arg1; arg1 = tem;
10100 code = swap_condition (code);
10101 }
10102
10103 /* If this is an equality or inequality test of a single bit, we can
10104 do this by shifting the bit being tested to the low-order bit and
10105 masking the result with the constant 1. If the condition was EQ,
10106 we xor it with 1. This does not require an scc insn and is faster
10107 than an scc insn even if we have it. */
10108
10109 if ((code == NE || code == EQ)
10110 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10111 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10112 {
10113 tree inner = TREE_OPERAND (arg0, 0);
10114 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10115 int ops_unsignedp;
10116
10117 /* If INNER is a right shift of a constant and it plus BITNUM does
10118 not overflow, adjust BITNUM and INNER. */
10119
10120 if (TREE_CODE (inner) == RSHIFT_EXPR
10121 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10122 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10123 && bitnum < TYPE_PRECISION (type)
10124 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10125 bitnum - TYPE_PRECISION (type)))
10126 {
10127 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10128 inner = TREE_OPERAND (inner, 0);
10129 }
10130
10131 /* If we are going to be able to omit the AND below, we must do our
10132 operations as unsigned. If we must use the AND, we have a choice.
10133 Normally unsigned is faster, but for some machines signed is. */
10134 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10135 #ifdef LOAD_EXTEND_OP
10136 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10137 #else
10138 : 1
10139 #endif
10140 );
10141
10142 if (! get_subtarget (subtarget)
10143 || GET_MODE (subtarget) != operand_mode
10144 || ! safe_from_p (subtarget, inner, 1))
10145 subtarget = 0;
10146
10147 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10148
10149 if (bitnum != 0)
10150 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10151 size_int (bitnum), subtarget, ops_unsignedp);
10152
10153 if (GET_MODE (op0) != mode)
10154 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10155
10156 if ((code == EQ && ! invert) || (code == NE && invert))
10157 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10158 ops_unsignedp, OPTAB_LIB_WIDEN);
10159
10160 /* Put the AND last so it can combine with more things. */
10161 if (bitnum != TYPE_PRECISION (type) - 1)
10162 op0 = expand_and (op0, const1_rtx, subtarget);
10163
10164 return op0;
10165 }
10166
10167 /* Now see if we are likely to be able to do this. Return if not. */
10168 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10169 return 0;
10170
10171 icode = setcc_gen_code[(int) code];
10172 if (icode == CODE_FOR_nothing
10173 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10174 {
10175 /* We can only do this if it is one of the special cases that
10176 can be handled without an scc insn. */
10177 if ((code == LT && integer_zerop (arg1))
10178 || (! only_cheap && code == GE && integer_zerop (arg1)))
10179 ;
10180 else if (BRANCH_COST >= 0
10181 && ! only_cheap && (code == NE || code == EQ)
10182 && TREE_CODE (type) != REAL_TYPE
10183 && ((abs_optab->handlers[(int) operand_mode].insn_code
10184 != CODE_FOR_nothing)
10185 || (ffs_optab->handlers[(int) operand_mode].insn_code
10186 != CODE_FOR_nothing)))
10187 ;
10188 else
10189 return 0;
10190 }
10191
10192 if (! get_subtarget (target)
10193 || GET_MODE (subtarget) != operand_mode
10194 || ! safe_from_p (subtarget, arg1, 1))
10195 subtarget = 0;
10196
10197 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10198 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10199
10200 if (target == 0)
10201 target = gen_reg_rtx (mode);
10202
10203 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10204 because, if the emit_store_flag does anything it will succeed and
10205 OP0 and OP1 will not be used subsequently. */
10206
10207 result = emit_store_flag (target, code,
10208 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10209 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10210 operand_mode, unsignedp, 1);
10211
10212 if (result)
10213 {
10214 if (invert)
10215 result = expand_binop (mode, xor_optab, result, const1_rtx,
10216 result, 0, OPTAB_LIB_WIDEN);
10217 return result;
10218 }
10219
10220 /* If this failed, we have to do this with set/compare/jump/set code. */
10221 if (GET_CODE (target) != REG
10222 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10223 target = gen_reg_rtx (GET_MODE (target));
10224
10225 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10226 result = compare_from_rtx (op0, op1, code, unsignedp,
10227 operand_mode, NULL_RTX);
10228 if (GET_CODE (result) == CONST_INT)
10229 return (((result == const0_rtx && ! invert)
10230 || (result != const0_rtx && invert))
10231 ? const0_rtx : const1_rtx);
10232
10233 label = gen_label_rtx ();
10234 if (bcc_gen_fctn[(int) code] == 0)
10235 abort ();
10236
10237 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10238 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10239 emit_label (label);
10240
10241 return target;
10242 }
10243 \f
10244
10245 /* Stubs in case we haven't got a casesi insn. */
10246 #ifndef HAVE_casesi
10247 # define HAVE_casesi 0
10248 # define gen_casesi(a, b, c, d, e) (0)
10249 # define CODE_FOR_casesi CODE_FOR_nothing
10250 #endif
10251
10252 /* If the machine does not have a case insn that compares the bounds,
10253 this means extra overhead for dispatch tables, which raises the
10254 threshold for using them. */
10255 #ifndef CASE_VALUES_THRESHOLD
10256 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10257 #endif /* CASE_VALUES_THRESHOLD */
10258
10259 unsigned int
10260 case_values_threshold ()
10261 {
10262 return CASE_VALUES_THRESHOLD;
10263 }
10264
10265 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10266 0 otherwise (i.e. if there is no casesi instruction). */
10267 int
10268 try_casesi (index_type, index_expr, minval, range,
10269 table_label, default_label)
10270 tree index_type, index_expr, minval, range;
10271 rtx table_label ATTRIBUTE_UNUSED;
10272 rtx default_label;
10273 {
10274 enum machine_mode index_mode = SImode;
10275 int index_bits = GET_MODE_BITSIZE (index_mode);
10276 rtx op1, op2, index;
10277 enum machine_mode op_mode;
10278
10279 if (! HAVE_casesi)
10280 return 0;
10281
10282 /* Convert the index to SImode. */
10283 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10284 {
10285 enum machine_mode omode = TYPE_MODE (index_type);
10286 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10287
10288 /* We must handle the endpoints in the original mode. */
10289 index_expr = build (MINUS_EXPR, index_type,
10290 index_expr, minval);
10291 minval = integer_zero_node;
10292 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10293 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10294 omode, 1, default_label);
10295 /* Now we can safely truncate. */
10296 index = convert_to_mode (index_mode, index, 0);
10297 }
10298 else
10299 {
10300 if (TYPE_MODE (index_type) != index_mode)
10301 {
10302 index_expr = convert (type_for_size (index_bits, 0),
10303 index_expr);
10304 index_type = TREE_TYPE (index_expr);
10305 }
10306
10307 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10308 }
10309 emit_queue ();
10310 index = protect_from_queue (index, 0);
10311 do_pending_stack_adjust ();
10312
10313 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10314 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10315 (index, op_mode))
10316 index = copy_to_mode_reg (op_mode, index);
10317
10318 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10319
10320 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10321 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10322 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10323 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10324 (op1, op_mode))
10325 op1 = copy_to_mode_reg (op_mode, op1);
10326
10327 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10328
10329 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10330 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10331 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10332 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10333 (op2, op_mode))
10334 op2 = copy_to_mode_reg (op_mode, op2);
10335
10336 emit_jump_insn (gen_casesi (index, op1, op2,
10337 table_label, default_label));
10338 return 1;
10339 }
10340
10341 /* Attempt to generate a tablejump instruction; same concept. */
10342 #ifndef HAVE_tablejump
10343 #define HAVE_tablejump 0
10344 #define gen_tablejump(x, y) (0)
10345 #endif
10346
10347 /* Subroutine of the next function.
10348
10349 INDEX is the value being switched on, with the lowest value
10350 in the table already subtracted.
10351 MODE is its expected mode (needed if INDEX is constant).
10352 RANGE is the length of the jump table.
10353 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10354
10355 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10356 index value is out of range. */
10357
10358 static void
10359 do_tablejump (index, mode, range, table_label, default_label)
10360 rtx index, range, table_label, default_label;
10361 enum machine_mode mode;
10362 {
10363 rtx temp, vector;
10364
10365 /* Do an unsigned comparison (in the proper mode) between the index
10366 expression and the value which represents the length of the range.
10367 Since we just finished subtracting the lower bound of the range
10368 from the index expression, this comparison allows us to simultaneously
10369 check that the original index expression value is both greater than
10370 or equal to the minimum value of the range and less than or equal to
10371 the maximum value of the range. */
10372
10373 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10374 default_label);
10375
10376 /* If index is in range, it must fit in Pmode.
10377 Convert to Pmode so we can index with it. */
10378 if (mode != Pmode)
10379 index = convert_to_mode (Pmode, index, 1);
10380
10381 /* Don't let a MEM slip thru, because then INDEX that comes
10382 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10383 and break_out_memory_refs will go to work on it and mess it up. */
10384 #ifdef PIC_CASE_VECTOR_ADDRESS
10385 if (flag_pic && GET_CODE (index) != REG)
10386 index = copy_to_mode_reg (Pmode, index);
10387 #endif
10388
10389 /* If flag_force_addr were to affect this address
10390 it could interfere with the tricky assumptions made
10391 about addresses that contain label-refs,
10392 which may be valid only very near the tablejump itself. */
10393 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10394 GET_MODE_SIZE, because this indicates how large insns are. The other
10395 uses should all be Pmode, because they are addresses. This code
10396 could fail if addresses and insns are not the same size. */
10397 index = gen_rtx_PLUS (Pmode,
10398 gen_rtx_MULT (Pmode, index,
10399 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10400 gen_rtx_LABEL_REF (Pmode, table_label));
10401 #ifdef PIC_CASE_VECTOR_ADDRESS
10402 if (flag_pic)
10403 index = PIC_CASE_VECTOR_ADDRESS (index);
10404 else
10405 #endif
10406 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10407 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10408 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10409 RTX_UNCHANGING_P (vector) = 1;
10410 convert_move (temp, vector, 0);
10411
10412 emit_jump_insn (gen_tablejump (temp, table_label));
10413
10414 /* If we are generating PIC code or if the table is PC-relative, the
10415 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10416 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10417 emit_barrier ();
10418 }
10419
10420 int
10421 try_tablejump (index_type, index_expr, minval, range,
10422 table_label, default_label)
10423 tree index_type, index_expr, minval, range;
10424 rtx table_label, default_label;
10425 {
10426 rtx index;
10427
10428 if (! HAVE_tablejump)
10429 return 0;
10430
10431 index_expr = fold (build (MINUS_EXPR, index_type,
10432 convert (index_type, index_expr),
10433 convert (index_type, minval)));
10434 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10435 emit_queue ();
10436 index = protect_from_queue (index, 0);
10437 do_pending_stack_adjust ();
10438
10439 do_tablejump (index, TYPE_MODE (index_type),
10440 convert_modes (TYPE_MODE (index_type),
10441 TYPE_MODE (TREE_TYPE (range)),
10442 expand_expr (range, NULL_RTX,
10443 VOIDmode, 0),
10444 TREE_UNSIGNED (TREE_TYPE (range))),
10445 table_label, default_label);
10446 return 1;
10447 }